##// END OF EJS Templates
exchange: correctly specify url to unbundle (issue5145)...
Augie Fackler -
r29704:b8f9cdca stable
parent child Browse files
Show More
@@ -1,1932 +1,1933 b''
1 # exchange.py - utility to exchange data between repos.
1 # exchange.py - utility to exchange data between repos.
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import hashlib
11 import hashlib
12
12
13 from .i18n import _
13 from .i18n import _
14 from .node import (
14 from .node import (
15 hex,
15 hex,
16 nullid,
16 nullid,
17 )
17 )
18 from . import (
18 from . import (
19 base85,
19 base85,
20 bookmarks as bookmod,
20 bookmarks as bookmod,
21 bundle2,
21 bundle2,
22 changegroup,
22 changegroup,
23 discovery,
23 discovery,
24 error,
24 error,
25 lock as lockmod,
25 lock as lockmod,
26 obsolete,
26 obsolete,
27 phases,
27 phases,
28 pushkey,
28 pushkey,
29 scmutil,
29 scmutil,
30 sslutil,
30 sslutil,
31 streamclone,
31 streamclone,
32 tags,
32 tags,
33 url as urlmod,
33 url as urlmod,
34 util,
34 util,
35 )
35 )
36
36
37 urlerr = util.urlerr
37 urlerr = util.urlerr
38 urlreq = util.urlreq
38 urlreq = util.urlreq
39
39
40 # Maps bundle compression human names to internal representation.
40 # Maps bundle compression human names to internal representation.
41 _bundlespeccompressions = {'none': None,
41 _bundlespeccompressions = {'none': None,
42 'bzip2': 'BZ',
42 'bzip2': 'BZ',
43 'gzip': 'GZ',
43 'gzip': 'GZ',
44 }
44 }
45
45
46 # Maps bundle version human names to changegroup versions.
46 # Maps bundle version human names to changegroup versions.
47 _bundlespeccgversions = {'v1': '01',
47 _bundlespeccgversions = {'v1': '01',
48 'v2': '02',
48 'v2': '02',
49 'packed1': 's1',
49 'packed1': 's1',
50 'bundle2': '02', #legacy
50 'bundle2': '02', #legacy
51 }
51 }
52
52
53 def parsebundlespec(repo, spec, strict=True, externalnames=False):
53 def parsebundlespec(repo, spec, strict=True, externalnames=False):
54 """Parse a bundle string specification into parts.
54 """Parse a bundle string specification into parts.
55
55
56 Bundle specifications denote a well-defined bundle/exchange format.
56 Bundle specifications denote a well-defined bundle/exchange format.
57 The content of a given specification should not change over time in
57 The content of a given specification should not change over time in
58 order to ensure that bundles produced by a newer version of Mercurial are
58 order to ensure that bundles produced by a newer version of Mercurial are
59 readable from an older version.
59 readable from an older version.
60
60
61 The string currently has the form:
61 The string currently has the form:
62
62
63 <compression>-<type>[;<parameter0>[;<parameter1>]]
63 <compression>-<type>[;<parameter0>[;<parameter1>]]
64
64
65 Where <compression> is one of the supported compression formats
65 Where <compression> is one of the supported compression formats
66 and <type> is (currently) a version string. A ";" can follow the type and
66 and <type> is (currently) a version string. A ";" can follow the type and
67 all text afterwards is interpretted as URI encoded, ";" delimited key=value
67 all text afterwards is interpretted as URI encoded, ";" delimited key=value
68 pairs.
68 pairs.
69
69
70 If ``strict`` is True (the default) <compression> is required. Otherwise,
70 If ``strict`` is True (the default) <compression> is required. Otherwise,
71 it is optional.
71 it is optional.
72
72
73 If ``externalnames`` is False (the default), the human-centric names will
73 If ``externalnames`` is False (the default), the human-centric names will
74 be converted to their internal representation.
74 be converted to their internal representation.
75
75
76 Returns a 3-tuple of (compression, version, parameters). Compression will
76 Returns a 3-tuple of (compression, version, parameters). Compression will
77 be ``None`` if not in strict mode and a compression isn't defined.
77 be ``None`` if not in strict mode and a compression isn't defined.
78
78
79 An ``InvalidBundleSpecification`` is raised when the specification is
79 An ``InvalidBundleSpecification`` is raised when the specification is
80 not syntactically well formed.
80 not syntactically well formed.
81
81
82 An ``UnsupportedBundleSpecification`` is raised when the compression or
82 An ``UnsupportedBundleSpecification`` is raised when the compression or
83 bundle type/version is not recognized.
83 bundle type/version is not recognized.
84
84
85 Note: this function will likely eventually return a more complex data
85 Note: this function will likely eventually return a more complex data
86 structure, including bundle2 part information.
86 structure, including bundle2 part information.
87 """
87 """
88 def parseparams(s):
88 def parseparams(s):
89 if ';' not in s:
89 if ';' not in s:
90 return s, {}
90 return s, {}
91
91
92 params = {}
92 params = {}
93 version, paramstr = s.split(';', 1)
93 version, paramstr = s.split(';', 1)
94
94
95 for p in paramstr.split(';'):
95 for p in paramstr.split(';'):
96 if '=' not in p:
96 if '=' not in p:
97 raise error.InvalidBundleSpecification(
97 raise error.InvalidBundleSpecification(
98 _('invalid bundle specification: '
98 _('invalid bundle specification: '
99 'missing "=" in parameter: %s') % p)
99 'missing "=" in parameter: %s') % p)
100
100
101 key, value = p.split('=', 1)
101 key, value = p.split('=', 1)
102 key = urlreq.unquote(key)
102 key = urlreq.unquote(key)
103 value = urlreq.unquote(value)
103 value = urlreq.unquote(value)
104 params[key] = value
104 params[key] = value
105
105
106 return version, params
106 return version, params
107
107
108
108
109 if strict and '-' not in spec:
109 if strict and '-' not in spec:
110 raise error.InvalidBundleSpecification(
110 raise error.InvalidBundleSpecification(
111 _('invalid bundle specification; '
111 _('invalid bundle specification; '
112 'must be prefixed with compression: %s') % spec)
112 'must be prefixed with compression: %s') % spec)
113
113
114 if '-' in spec:
114 if '-' in spec:
115 compression, version = spec.split('-', 1)
115 compression, version = spec.split('-', 1)
116
116
117 if compression not in _bundlespeccompressions:
117 if compression not in _bundlespeccompressions:
118 raise error.UnsupportedBundleSpecification(
118 raise error.UnsupportedBundleSpecification(
119 _('%s compression is not supported') % compression)
119 _('%s compression is not supported') % compression)
120
120
121 version, params = parseparams(version)
121 version, params = parseparams(version)
122
122
123 if version not in _bundlespeccgversions:
123 if version not in _bundlespeccgversions:
124 raise error.UnsupportedBundleSpecification(
124 raise error.UnsupportedBundleSpecification(
125 _('%s is not a recognized bundle version') % version)
125 _('%s is not a recognized bundle version') % version)
126 else:
126 else:
127 # Value could be just the compression or just the version, in which
127 # Value could be just the compression or just the version, in which
128 # case some defaults are assumed (but only when not in strict mode).
128 # case some defaults are assumed (but only when not in strict mode).
129 assert not strict
129 assert not strict
130
130
131 spec, params = parseparams(spec)
131 spec, params = parseparams(spec)
132
132
133 if spec in _bundlespeccompressions:
133 if spec in _bundlespeccompressions:
134 compression = spec
134 compression = spec
135 version = 'v1'
135 version = 'v1'
136 if 'generaldelta' in repo.requirements:
136 if 'generaldelta' in repo.requirements:
137 version = 'v2'
137 version = 'v2'
138 elif spec in _bundlespeccgversions:
138 elif spec in _bundlespeccgversions:
139 if spec == 'packed1':
139 if spec == 'packed1':
140 compression = 'none'
140 compression = 'none'
141 else:
141 else:
142 compression = 'bzip2'
142 compression = 'bzip2'
143 version = spec
143 version = spec
144 else:
144 else:
145 raise error.UnsupportedBundleSpecification(
145 raise error.UnsupportedBundleSpecification(
146 _('%s is not a recognized bundle specification') % spec)
146 _('%s is not a recognized bundle specification') % spec)
147
147
148 # The specification for packed1 can optionally declare the data formats
148 # The specification for packed1 can optionally declare the data formats
149 # required to apply it. If we see this metadata, compare against what the
149 # required to apply it. If we see this metadata, compare against what the
150 # repo supports and error if the bundle isn't compatible.
150 # repo supports and error if the bundle isn't compatible.
151 if version == 'packed1' and 'requirements' in params:
151 if version == 'packed1' and 'requirements' in params:
152 requirements = set(params['requirements'].split(','))
152 requirements = set(params['requirements'].split(','))
153 missingreqs = requirements - repo.supportedformats
153 missingreqs = requirements - repo.supportedformats
154 if missingreqs:
154 if missingreqs:
155 raise error.UnsupportedBundleSpecification(
155 raise error.UnsupportedBundleSpecification(
156 _('missing support for repository features: %s') %
156 _('missing support for repository features: %s') %
157 ', '.join(sorted(missingreqs)))
157 ', '.join(sorted(missingreqs)))
158
158
159 if not externalnames:
159 if not externalnames:
160 compression = _bundlespeccompressions[compression]
160 compression = _bundlespeccompressions[compression]
161 version = _bundlespeccgversions[version]
161 version = _bundlespeccgversions[version]
162 return compression, version, params
162 return compression, version, params
163
163
164 def readbundle(ui, fh, fname, vfs=None):
164 def readbundle(ui, fh, fname, vfs=None):
165 header = changegroup.readexactly(fh, 4)
165 header = changegroup.readexactly(fh, 4)
166
166
167 alg = None
167 alg = None
168 if not fname:
168 if not fname:
169 fname = "stream"
169 fname = "stream"
170 if not header.startswith('HG') and header.startswith('\0'):
170 if not header.startswith('HG') and header.startswith('\0'):
171 fh = changegroup.headerlessfixup(fh, header)
171 fh = changegroup.headerlessfixup(fh, header)
172 header = "HG10"
172 header = "HG10"
173 alg = 'UN'
173 alg = 'UN'
174 elif vfs:
174 elif vfs:
175 fname = vfs.join(fname)
175 fname = vfs.join(fname)
176
176
177 magic, version = header[0:2], header[2:4]
177 magic, version = header[0:2], header[2:4]
178
178
179 if magic != 'HG':
179 if magic != 'HG':
180 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
180 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
181 if version == '10':
181 if version == '10':
182 if alg is None:
182 if alg is None:
183 alg = changegroup.readexactly(fh, 2)
183 alg = changegroup.readexactly(fh, 2)
184 return changegroup.cg1unpacker(fh, alg)
184 return changegroup.cg1unpacker(fh, alg)
185 elif version.startswith('2'):
185 elif version.startswith('2'):
186 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
186 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
187 elif version == 'S1':
187 elif version == 'S1':
188 return streamclone.streamcloneapplier(fh)
188 return streamclone.streamcloneapplier(fh)
189 else:
189 else:
190 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
190 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
191
191
192 def getbundlespec(ui, fh):
192 def getbundlespec(ui, fh):
193 """Infer the bundlespec from a bundle file handle.
193 """Infer the bundlespec from a bundle file handle.
194
194
195 The input file handle is seeked and the original seek position is not
195 The input file handle is seeked and the original seek position is not
196 restored.
196 restored.
197 """
197 """
198 def speccompression(alg):
198 def speccompression(alg):
199 for k, v in _bundlespeccompressions.items():
199 for k, v in _bundlespeccompressions.items():
200 if v == alg:
200 if v == alg:
201 return k
201 return k
202 return None
202 return None
203
203
204 b = readbundle(ui, fh, None)
204 b = readbundle(ui, fh, None)
205 if isinstance(b, changegroup.cg1unpacker):
205 if isinstance(b, changegroup.cg1unpacker):
206 alg = b._type
206 alg = b._type
207 if alg == '_truncatedBZ':
207 if alg == '_truncatedBZ':
208 alg = 'BZ'
208 alg = 'BZ'
209 comp = speccompression(alg)
209 comp = speccompression(alg)
210 if not comp:
210 if not comp:
211 raise error.Abort(_('unknown compression algorithm: %s') % alg)
211 raise error.Abort(_('unknown compression algorithm: %s') % alg)
212 return '%s-v1' % comp
212 return '%s-v1' % comp
213 elif isinstance(b, bundle2.unbundle20):
213 elif isinstance(b, bundle2.unbundle20):
214 if 'Compression' in b.params:
214 if 'Compression' in b.params:
215 comp = speccompression(b.params['Compression'])
215 comp = speccompression(b.params['Compression'])
216 if not comp:
216 if not comp:
217 raise error.Abort(_('unknown compression algorithm: %s') % comp)
217 raise error.Abort(_('unknown compression algorithm: %s') % comp)
218 else:
218 else:
219 comp = 'none'
219 comp = 'none'
220
220
221 version = None
221 version = None
222 for part in b.iterparts():
222 for part in b.iterparts():
223 if part.type == 'changegroup':
223 if part.type == 'changegroup':
224 version = part.params['version']
224 version = part.params['version']
225 if version in ('01', '02'):
225 if version in ('01', '02'):
226 version = 'v2'
226 version = 'v2'
227 else:
227 else:
228 raise error.Abort(_('changegroup version %s does not have '
228 raise error.Abort(_('changegroup version %s does not have '
229 'a known bundlespec') % version,
229 'a known bundlespec') % version,
230 hint=_('try upgrading your Mercurial '
230 hint=_('try upgrading your Mercurial '
231 'client'))
231 'client'))
232
232
233 if not version:
233 if not version:
234 raise error.Abort(_('could not identify changegroup version in '
234 raise error.Abort(_('could not identify changegroup version in '
235 'bundle'))
235 'bundle'))
236
236
237 return '%s-%s' % (comp, version)
237 return '%s-%s' % (comp, version)
238 elif isinstance(b, streamclone.streamcloneapplier):
238 elif isinstance(b, streamclone.streamcloneapplier):
239 requirements = streamclone.readbundle1header(fh)[2]
239 requirements = streamclone.readbundle1header(fh)[2]
240 params = 'requirements=%s' % ','.join(sorted(requirements))
240 params = 'requirements=%s' % ','.join(sorted(requirements))
241 return 'none-packed1;%s' % urlreq.quote(params)
241 return 'none-packed1;%s' % urlreq.quote(params)
242 else:
242 else:
243 raise error.Abort(_('unknown bundle type: %s') % b)
243 raise error.Abort(_('unknown bundle type: %s') % b)
244
244
245 def buildobsmarkerspart(bundler, markers):
245 def buildobsmarkerspart(bundler, markers):
246 """add an obsmarker part to the bundler with <markers>
246 """add an obsmarker part to the bundler with <markers>
247
247
248 No part is created if markers is empty.
248 No part is created if markers is empty.
249 Raises ValueError if the bundler doesn't support any known obsmarker format.
249 Raises ValueError if the bundler doesn't support any known obsmarker format.
250 """
250 """
251 if markers:
251 if markers:
252 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
252 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
253 version = obsolete.commonversion(remoteversions)
253 version = obsolete.commonversion(remoteversions)
254 if version is None:
254 if version is None:
255 raise ValueError('bundler does not support common obsmarker format')
255 raise ValueError('bundler does not support common obsmarker format')
256 stream = obsolete.encodemarkers(markers, True, version=version)
256 stream = obsolete.encodemarkers(markers, True, version=version)
257 return bundler.newpart('obsmarkers', data=stream)
257 return bundler.newpart('obsmarkers', data=stream)
258 return None
258 return None
259
259
260 def _canusebundle2(op):
260 def _canusebundle2(op):
261 """return true if a pull/push can use bundle2
261 """return true if a pull/push can use bundle2
262
262
263 Feel free to nuke this function when we drop the experimental option"""
263 Feel free to nuke this function when we drop the experimental option"""
264 return (op.repo.ui.configbool('experimental', 'bundle2-exp', True)
264 return (op.repo.ui.configbool('experimental', 'bundle2-exp', True)
265 and op.remote.capable('bundle2'))
265 and op.remote.capable('bundle2'))
266
266
267
267
268 class pushoperation(object):
268 class pushoperation(object):
269 """A object that represent a single push operation
269 """A object that represent a single push operation
270
270
271 Its purpose is to carry push related state and very common operations.
271 Its purpose is to carry push related state and very common operations.
272
272
273 A new pushoperation should be created at the beginning of each push and
273 A new pushoperation should be created at the beginning of each push and
274 discarded afterward.
274 discarded afterward.
275 """
275 """
276
276
277 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
277 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
278 bookmarks=()):
278 bookmarks=()):
279 # repo we push from
279 # repo we push from
280 self.repo = repo
280 self.repo = repo
281 self.ui = repo.ui
281 self.ui = repo.ui
282 # repo we push to
282 # repo we push to
283 self.remote = remote
283 self.remote = remote
284 # force option provided
284 # force option provided
285 self.force = force
285 self.force = force
286 # revs to be pushed (None is "all")
286 # revs to be pushed (None is "all")
287 self.revs = revs
287 self.revs = revs
288 # bookmark explicitly pushed
288 # bookmark explicitly pushed
289 self.bookmarks = bookmarks
289 self.bookmarks = bookmarks
290 # allow push of new branch
290 # allow push of new branch
291 self.newbranch = newbranch
291 self.newbranch = newbranch
292 # did a local lock get acquired?
292 # did a local lock get acquired?
293 self.locallocked = None
293 self.locallocked = None
294 # step already performed
294 # step already performed
295 # (used to check what steps have been already performed through bundle2)
295 # (used to check what steps have been already performed through bundle2)
296 self.stepsdone = set()
296 self.stepsdone = set()
297 # Integer version of the changegroup push result
297 # Integer version of the changegroup push result
298 # - None means nothing to push
298 # - None means nothing to push
299 # - 0 means HTTP error
299 # - 0 means HTTP error
300 # - 1 means we pushed and remote head count is unchanged *or*
300 # - 1 means we pushed and remote head count is unchanged *or*
301 # we have outgoing changesets but refused to push
301 # we have outgoing changesets but refused to push
302 # - other values as described by addchangegroup()
302 # - other values as described by addchangegroup()
303 self.cgresult = None
303 self.cgresult = None
304 # Boolean value for the bookmark push
304 # Boolean value for the bookmark push
305 self.bkresult = None
305 self.bkresult = None
306 # discover.outgoing object (contains common and outgoing data)
306 # discover.outgoing object (contains common and outgoing data)
307 self.outgoing = None
307 self.outgoing = None
308 # all remote heads before the push
308 # all remote heads before the push
309 self.remoteheads = None
309 self.remoteheads = None
310 # testable as a boolean indicating if any nodes are missing locally.
310 # testable as a boolean indicating if any nodes are missing locally.
311 self.incoming = None
311 self.incoming = None
312 # phases changes that must be pushed along side the changesets
312 # phases changes that must be pushed along side the changesets
313 self.outdatedphases = None
313 self.outdatedphases = None
314 # phases changes that must be pushed if changeset push fails
314 # phases changes that must be pushed if changeset push fails
315 self.fallbackoutdatedphases = None
315 self.fallbackoutdatedphases = None
316 # outgoing obsmarkers
316 # outgoing obsmarkers
317 self.outobsmarkers = set()
317 self.outobsmarkers = set()
318 # outgoing bookmarks
318 # outgoing bookmarks
319 self.outbookmarks = []
319 self.outbookmarks = []
320 # transaction manager
320 # transaction manager
321 self.trmanager = None
321 self.trmanager = None
322 # map { pushkey partid -> callback handling failure}
322 # map { pushkey partid -> callback handling failure}
323 # used to handle exception from mandatory pushkey part failure
323 # used to handle exception from mandatory pushkey part failure
324 self.pkfailcb = {}
324 self.pkfailcb = {}
325
325
326 @util.propertycache
326 @util.propertycache
327 def futureheads(self):
327 def futureheads(self):
328 """future remote heads if the changeset push succeeds"""
328 """future remote heads if the changeset push succeeds"""
329 return self.outgoing.missingheads
329 return self.outgoing.missingheads
330
330
331 @util.propertycache
331 @util.propertycache
332 def fallbackheads(self):
332 def fallbackheads(self):
333 """future remote heads if the changeset push fails"""
333 """future remote heads if the changeset push fails"""
334 if self.revs is None:
334 if self.revs is None:
335 # not target to push, all common are relevant
335 # not target to push, all common are relevant
336 return self.outgoing.commonheads
336 return self.outgoing.commonheads
337 unfi = self.repo.unfiltered()
337 unfi = self.repo.unfiltered()
338 # I want cheads = heads(::missingheads and ::commonheads)
338 # I want cheads = heads(::missingheads and ::commonheads)
339 # (missingheads is revs with secret changeset filtered out)
339 # (missingheads is revs with secret changeset filtered out)
340 #
340 #
341 # This can be expressed as:
341 # This can be expressed as:
342 # cheads = ( (missingheads and ::commonheads)
342 # cheads = ( (missingheads and ::commonheads)
343 # + (commonheads and ::missingheads))"
343 # + (commonheads and ::missingheads))"
344 # )
344 # )
345 #
345 #
346 # while trying to push we already computed the following:
346 # while trying to push we already computed the following:
347 # common = (::commonheads)
347 # common = (::commonheads)
348 # missing = ((commonheads::missingheads) - commonheads)
348 # missing = ((commonheads::missingheads) - commonheads)
349 #
349 #
350 # We can pick:
350 # We can pick:
351 # * missingheads part of common (::commonheads)
351 # * missingheads part of common (::commonheads)
352 common = self.outgoing.common
352 common = self.outgoing.common
353 nm = self.repo.changelog.nodemap
353 nm = self.repo.changelog.nodemap
354 cheads = [node for node in self.revs if nm[node] in common]
354 cheads = [node for node in self.revs if nm[node] in common]
355 # and
355 # and
356 # * commonheads parents on missing
356 # * commonheads parents on missing
357 revset = unfi.set('%ln and parents(roots(%ln))',
357 revset = unfi.set('%ln and parents(roots(%ln))',
358 self.outgoing.commonheads,
358 self.outgoing.commonheads,
359 self.outgoing.missing)
359 self.outgoing.missing)
360 cheads.extend(c.node() for c in revset)
360 cheads.extend(c.node() for c in revset)
361 return cheads
361 return cheads
362
362
363 @property
363 @property
364 def commonheads(self):
364 def commonheads(self):
365 """set of all common heads after changeset bundle push"""
365 """set of all common heads after changeset bundle push"""
366 if self.cgresult:
366 if self.cgresult:
367 return self.futureheads
367 return self.futureheads
368 else:
368 else:
369 return self.fallbackheads
369 return self.fallbackheads
370
370
371 # mapping of message used when pushing bookmark
371 # mapping of message used when pushing bookmark
372 bookmsgmap = {'update': (_("updating bookmark %s\n"),
372 bookmsgmap = {'update': (_("updating bookmark %s\n"),
373 _('updating bookmark %s failed!\n')),
373 _('updating bookmark %s failed!\n')),
374 'export': (_("exporting bookmark %s\n"),
374 'export': (_("exporting bookmark %s\n"),
375 _('exporting bookmark %s failed!\n')),
375 _('exporting bookmark %s failed!\n')),
376 'delete': (_("deleting remote bookmark %s\n"),
376 'delete': (_("deleting remote bookmark %s\n"),
377 _('deleting remote bookmark %s failed!\n')),
377 _('deleting remote bookmark %s failed!\n')),
378 }
378 }
379
379
380
380
381 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
381 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
382 opargs=None):
382 opargs=None):
383 '''Push outgoing changesets (limited by revs) from a local
383 '''Push outgoing changesets (limited by revs) from a local
384 repository to remote. Return an integer:
384 repository to remote. Return an integer:
385 - None means nothing to push
385 - None means nothing to push
386 - 0 means HTTP error
386 - 0 means HTTP error
387 - 1 means we pushed and remote head count is unchanged *or*
387 - 1 means we pushed and remote head count is unchanged *or*
388 we have outgoing changesets but refused to push
388 we have outgoing changesets but refused to push
389 - other values as described by addchangegroup()
389 - other values as described by addchangegroup()
390 '''
390 '''
391 if opargs is None:
391 if opargs is None:
392 opargs = {}
392 opargs = {}
393 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
393 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
394 **opargs)
394 **opargs)
395 if pushop.remote.local():
395 if pushop.remote.local():
396 missing = (set(pushop.repo.requirements)
396 missing = (set(pushop.repo.requirements)
397 - pushop.remote.local().supported)
397 - pushop.remote.local().supported)
398 if missing:
398 if missing:
399 msg = _("required features are not"
399 msg = _("required features are not"
400 " supported in the destination:"
400 " supported in the destination:"
401 " %s") % (', '.join(sorted(missing)))
401 " %s") % (', '.join(sorted(missing)))
402 raise error.Abort(msg)
402 raise error.Abort(msg)
403
403
404 # there are two ways to push to remote repo:
404 # there are two ways to push to remote repo:
405 #
405 #
406 # addchangegroup assumes local user can lock remote
406 # addchangegroup assumes local user can lock remote
407 # repo (local filesystem, old ssh servers).
407 # repo (local filesystem, old ssh servers).
408 #
408 #
409 # unbundle assumes local user cannot lock remote repo (new ssh
409 # unbundle assumes local user cannot lock remote repo (new ssh
410 # servers, http servers).
410 # servers, http servers).
411
411
412 if not pushop.remote.canpush():
412 if not pushop.remote.canpush():
413 raise error.Abort(_("destination does not support push"))
413 raise error.Abort(_("destination does not support push"))
414 # get local lock as we might write phase data
414 # get local lock as we might write phase data
415 localwlock = locallock = None
415 localwlock = locallock = None
416 try:
416 try:
417 # bundle2 push may receive a reply bundle touching bookmarks or other
417 # bundle2 push may receive a reply bundle touching bookmarks or other
418 # things requiring the wlock. Take it now to ensure proper ordering.
418 # things requiring the wlock. Take it now to ensure proper ordering.
419 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
419 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
420 if _canusebundle2(pushop) and maypushback:
420 if _canusebundle2(pushop) and maypushback:
421 localwlock = pushop.repo.wlock()
421 localwlock = pushop.repo.wlock()
422 locallock = pushop.repo.lock()
422 locallock = pushop.repo.lock()
423 pushop.locallocked = True
423 pushop.locallocked = True
424 except IOError as err:
424 except IOError as err:
425 pushop.locallocked = False
425 pushop.locallocked = False
426 if err.errno != errno.EACCES:
426 if err.errno != errno.EACCES:
427 raise
427 raise
428 # source repo cannot be locked.
428 # source repo cannot be locked.
429 # We do not abort the push, but just disable the local phase
429 # We do not abort the push, but just disable the local phase
430 # synchronisation.
430 # synchronisation.
431 msg = 'cannot lock source repository: %s\n' % err
431 msg = 'cannot lock source repository: %s\n' % err
432 pushop.ui.debug(msg)
432 pushop.ui.debug(msg)
433 try:
433 try:
434 if pushop.locallocked:
434 if pushop.locallocked:
435 pushop.trmanager = transactionmanager(pushop.repo,
435 pushop.trmanager = transactionmanager(pushop.repo,
436 'push-response',
436 'push-response',
437 pushop.remote.url())
437 pushop.remote.url())
438 pushop.repo.checkpush(pushop)
438 pushop.repo.checkpush(pushop)
439 lock = None
439 lock = None
440 unbundle = pushop.remote.capable('unbundle')
440 unbundle = pushop.remote.capable('unbundle')
441 if not unbundle:
441 if not unbundle:
442 lock = pushop.remote.lock()
442 lock = pushop.remote.lock()
443 try:
443 try:
444 _pushdiscovery(pushop)
444 _pushdiscovery(pushop)
445 if _canusebundle2(pushop):
445 if _canusebundle2(pushop):
446 _pushbundle2(pushop)
446 _pushbundle2(pushop)
447 _pushchangeset(pushop)
447 _pushchangeset(pushop)
448 _pushsyncphase(pushop)
448 _pushsyncphase(pushop)
449 _pushobsolete(pushop)
449 _pushobsolete(pushop)
450 _pushbookmark(pushop)
450 _pushbookmark(pushop)
451 finally:
451 finally:
452 if lock is not None:
452 if lock is not None:
453 lock.release()
453 lock.release()
454 if pushop.trmanager:
454 if pushop.trmanager:
455 pushop.trmanager.close()
455 pushop.trmanager.close()
456 finally:
456 finally:
457 if pushop.trmanager:
457 if pushop.trmanager:
458 pushop.trmanager.release()
458 pushop.trmanager.release()
459 if locallock is not None:
459 if locallock is not None:
460 locallock.release()
460 locallock.release()
461 if localwlock is not None:
461 if localwlock is not None:
462 localwlock.release()
462 localwlock.release()
463
463
464 return pushop
464 return pushop
465
465
466 # list of steps to perform discovery before push
466 # list of steps to perform discovery before push
467 pushdiscoveryorder = []
467 pushdiscoveryorder = []
468
468
469 # Mapping between step name and function
469 # Mapping between step name and function
470 #
470 #
471 # This exists to help extensions wrap steps if necessary
471 # This exists to help extensions wrap steps if necessary
472 pushdiscoverymapping = {}
472 pushdiscoverymapping = {}
473
473
474 def pushdiscovery(stepname):
474 def pushdiscovery(stepname):
475 """decorator for function performing discovery before push
475 """decorator for function performing discovery before push
476
476
477 The function is added to the step -> function mapping and appended to the
477 The function is added to the step -> function mapping and appended to the
478 list of steps. Beware that decorated function will be added in order (this
478 list of steps. Beware that decorated function will be added in order (this
479 may matter).
479 may matter).
480
480
481 You can only use this decorator for a new step, if you want to wrap a step
481 You can only use this decorator for a new step, if you want to wrap a step
482 from an extension, change the pushdiscovery dictionary directly."""
482 from an extension, change the pushdiscovery dictionary directly."""
483 def dec(func):
483 def dec(func):
484 assert stepname not in pushdiscoverymapping
484 assert stepname not in pushdiscoverymapping
485 pushdiscoverymapping[stepname] = func
485 pushdiscoverymapping[stepname] = func
486 pushdiscoveryorder.append(stepname)
486 pushdiscoveryorder.append(stepname)
487 return func
487 return func
488 return dec
488 return dec
489
489
490 def _pushdiscovery(pushop):
490 def _pushdiscovery(pushop):
491 """Run all discovery steps"""
491 """Run all discovery steps"""
492 for stepname in pushdiscoveryorder:
492 for stepname in pushdiscoveryorder:
493 step = pushdiscoverymapping[stepname]
493 step = pushdiscoverymapping[stepname]
494 step(pushop)
494 step(pushop)
495
495
496 @pushdiscovery('changeset')
496 @pushdiscovery('changeset')
497 def _pushdiscoverychangeset(pushop):
497 def _pushdiscoverychangeset(pushop):
498 """discover the changeset that need to be pushed"""
498 """discover the changeset that need to be pushed"""
499 fci = discovery.findcommonincoming
499 fci = discovery.findcommonincoming
500 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
500 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
501 common, inc, remoteheads = commoninc
501 common, inc, remoteheads = commoninc
502 fco = discovery.findcommonoutgoing
502 fco = discovery.findcommonoutgoing
503 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
503 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
504 commoninc=commoninc, force=pushop.force)
504 commoninc=commoninc, force=pushop.force)
505 pushop.outgoing = outgoing
505 pushop.outgoing = outgoing
506 pushop.remoteheads = remoteheads
506 pushop.remoteheads = remoteheads
507 pushop.incoming = inc
507 pushop.incoming = inc
508
508
509 @pushdiscovery('phase')
509 @pushdiscovery('phase')
510 def _pushdiscoveryphase(pushop):
510 def _pushdiscoveryphase(pushop):
511 """discover the phase that needs to be pushed
511 """discover the phase that needs to be pushed
512
512
513 (computed for both success and failure case for changesets push)"""
513 (computed for both success and failure case for changesets push)"""
514 outgoing = pushop.outgoing
514 outgoing = pushop.outgoing
515 unfi = pushop.repo.unfiltered()
515 unfi = pushop.repo.unfiltered()
516 remotephases = pushop.remote.listkeys('phases')
516 remotephases = pushop.remote.listkeys('phases')
517 publishing = remotephases.get('publishing', False)
517 publishing = remotephases.get('publishing', False)
518 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
518 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
519 and remotephases # server supports phases
519 and remotephases # server supports phases
520 and not pushop.outgoing.missing # no changesets to be pushed
520 and not pushop.outgoing.missing # no changesets to be pushed
521 and publishing):
521 and publishing):
522 # When:
522 # When:
523 # - this is a subrepo push
523 # - this is a subrepo push
524 # - and remote support phase
524 # - and remote support phase
525 # - and no changeset are to be pushed
525 # - and no changeset are to be pushed
526 # - and remote is publishing
526 # - and remote is publishing
527 # We may be in issue 3871 case!
527 # We may be in issue 3871 case!
528 # We drop the possible phase synchronisation done by
528 # We drop the possible phase synchronisation done by
529 # courtesy to publish changesets possibly locally draft
529 # courtesy to publish changesets possibly locally draft
530 # on the remote.
530 # on the remote.
531 remotephases = {'publishing': 'True'}
531 remotephases = {'publishing': 'True'}
532 ana = phases.analyzeremotephases(pushop.repo,
532 ana = phases.analyzeremotephases(pushop.repo,
533 pushop.fallbackheads,
533 pushop.fallbackheads,
534 remotephases)
534 remotephases)
535 pheads, droots = ana
535 pheads, droots = ana
536 extracond = ''
536 extracond = ''
537 if not publishing:
537 if not publishing:
538 extracond = ' and public()'
538 extracond = ' and public()'
539 revset = 'heads((%%ln::%%ln) %s)' % extracond
539 revset = 'heads((%%ln::%%ln) %s)' % extracond
540 # Get the list of all revs draft on remote by public here.
540 # Get the list of all revs draft on remote by public here.
541 # XXX Beware that revset break if droots is not strictly
541 # XXX Beware that revset break if droots is not strictly
542 # XXX root we may want to ensure it is but it is costly
542 # XXX root we may want to ensure it is but it is costly
543 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
543 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
544 if not outgoing.missing:
544 if not outgoing.missing:
545 future = fallback
545 future = fallback
546 else:
546 else:
547 # adds changeset we are going to push as draft
547 # adds changeset we are going to push as draft
548 #
548 #
549 # should not be necessary for publishing server, but because of an
549 # should not be necessary for publishing server, but because of an
550 # issue fixed in xxxxx we have to do it anyway.
550 # issue fixed in xxxxx we have to do it anyway.
551 fdroots = list(unfi.set('roots(%ln + %ln::)',
551 fdroots = list(unfi.set('roots(%ln + %ln::)',
552 outgoing.missing, droots))
552 outgoing.missing, droots))
553 fdroots = [f.node() for f in fdroots]
553 fdroots = [f.node() for f in fdroots]
554 future = list(unfi.set(revset, fdroots, pushop.futureheads))
554 future = list(unfi.set(revset, fdroots, pushop.futureheads))
555 pushop.outdatedphases = future
555 pushop.outdatedphases = future
556 pushop.fallbackoutdatedphases = fallback
556 pushop.fallbackoutdatedphases = fallback
557
557
558 @pushdiscovery('obsmarker')
558 @pushdiscovery('obsmarker')
559 def _pushdiscoveryobsmarkers(pushop):
559 def _pushdiscoveryobsmarkers(pushop):
560 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
560 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
561 and pushop.repo.obsstore
561 and pushop.repo.obsstore
562 and 'obsolete' in pushop.remote.listkeys('namespaces')):
562 and 'obsolete' in pushop.remote.listkeys('namespaces')):
563 repo = pushop.repo
563 repo = pushop.repo
564 # very naive computation, that can be quite expensive on big repo.
564 # very naive computation, that can be quite expensive on big repo.
565 # However: evolution is currently slow on them anyway.
565 # However: evolution is currently slow on them anyway.
566 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
566 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
567 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
567 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
568
568
569 @pushdiscovery('bookmarks')
569 @pushdiscovery('bookmarks')
570 def _pushdiscoverybookmarks(pushop):
570 def _pushdiscoverybookmarks(pushop):
571 ui = pushop.ui
571 ui = pushop.ui
572 repo = pushop.repo.unfiltered()
572 repo = pushop.repo.unfiltered()
573 remote = pushop.remote
573 remote = pushop.remote
574 ui.debug("checking for updated bookmarks\n")
574 ui.debug("checking for updated bookmarks\n")
575 ancestors = ()
575 ancestors = ()
576 if pushop.revs:
576 if pushop.revs:
577 revnums = map(repo.changelog.rev, pushop.revs)
577 revnums = map(repo.changelog.rev, pushop.revs)
578 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
578 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
579 remotebookmark = remote.listkeys('bookmarks')
579 remotebookmark = remote.listkeys('bookmarks')
580
580
581 explicit = set([repo._bookmarks.expandname(bookmark)
581 explicit = set([repo._bookmarks.expandname(bookmark)
582 for bookmark in pushop.bookmarks])
582 for bookmark in pushop.bookmarks])
583
583
584 comp = bookmod.compare(repo, repo._bookmarks, remotebookmark, srchex=hex)
584 comp = bookmod.compare(repo, repo._bookmarks, remotebookmark, srchex=hex)
585 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
585 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
586 for b, scid, dcid in advsrc:
586 for b, scid, dcid in advsrc:
587 if b in explicit:
587 if b in explicit:
588 explicit.remove(b)
588 explicit.remove(b)
589 if not ancestors or repo[scid].rev() in ancestors:
589 if not ancestors or repo[scid].rev() in ancestors:
590 pushop.outbookmarks.append((b, dcid, scid))
590 pushop.outbookmarks.append((b, dcid, scid))
591 # search added bookmark
591 # search added bookmark
592 for b, scid, dcid in addsrc:
592 for b, scid, dcid in addsrc:
593 if b in explicit:
593 if b in explicit:
594 explicit.remove(b)
594 explicit.remove(b)
595 pushop.outbookmarks.append((b, '', scid))
595 pushop.outbookmarks.append((b, '', scid))
596 # search for overwritten bookmark
596 # search for overwritten bookmark
597 for b, scid, dcid in advdst + diverge + differ:
597 for b, scid, dcid in advdst + diverge + differ:
598 if b in explicit:
598 if b in explicit:
599 explicit.remove(b)
599 explicit.remove(b)
600 pushop.outbookmarks.append((b, dcid, scid))
600 pushop.outbookmarks.append((b, dcid, scid))
601 # search for bookmark to delete
601 # search for bookmark to delete
602 for b, scid, dcid in adddst:
602 for b, scid, dcid in adddst:
603 if b in explicit:
603 if b in explicit:
604 explicit.remove(b)
604 explicit.remove(b)
605 # treat as "deleted locally"
605 # treat as "deleted locally"
606 pushop.outbookmarks.append((b, dcid, ''))
606 pushop.outbookmarks.append((b, dcid, ''))
607 # identical bookmarks shouldn't get reported
607 # identical bookmarks shouldn't get reported
608 for b, scid, dcid in same:
608 for b, scid, dcid in same:
609 if b in explicit:
609 if b in explicit:
610 explicit.remove(b)
610 explicit.remove(b)
611
611
612 if explicit:
612 if explicit:
613 explicit = sorted(explicit)
613 explicit = sorted(explicit)
614 # we should probably list all of them
614 # we should probably list all of them
615 ui.warn(_('bookmark %s does not exist on the local '
615 ui.warn(_('bookmark %s does not exist on the local '
616 'or remote repository!\n') % explicit[0])
616 'or remote repository!\n') % explicit[0])
617 pushop.bkresult = 2
617 pushop.bkresult = 2
618
618
619 pushop.outbookmarks.sort()
619 pushop.outbookmarks.sort()
620
620
621 def _pushcheckoutgoing(pushop):
621 def _pushcheckoutgoing(pushop):
622 outgoing = pushop.outgoing
622 outgoing = pushop.outgoing
623 unfi = pushop.repo.unfiltered()
623 unfi = pushop.repo.unfiltered()
624 if not outgoing.missing:
624 if not outgoing.missing:
625 # nothing to push
625 # nothing to push
626 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
626 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
627 return False
627 return False
628 # something to push
628 # something to push
629 if not pushop.force:
629 if not pushop.force:
630 # if repo.obsstore == False --> no obsolete
630 # if repo.obsstore == False --> no obsolete
631 # then, save the iteration
631 # then, save the iteration
632 if unfi.obsstore:
632 if unfi.obsstore:
633 # this message are here for 80 char limit reason
633 # this message are here for 80 char limit reason
634 mso = _("push includes obsolete changeset: %s!")
634 mso = _("push includes obsolete changeset: %s!")
635 mst = {"unstable": _("push includes unstable changeset: %s!"),
635 mst = {"unstable": _("push includes unstable changeset: %s!"),
636 "bumped": _("push includes bumped changeset: %s!"),
636 "bumped": _("push includes bumped changeset: %s!"),
637 "divergent": _("push includes divergent changeset: %s!")}
637 "divergent": _("push includes divergent changeset: %s!")}
638 # If we are to push if there is at least one
638 # If we are to push if there is at least one
639 # obsolete or unstable changeset in missing, at
639 # obsolete or unstable changeset in missing, at
640 # least one of the missinghead will be obsolete or
640 # least one of the missinghead will be obsolete or
641 # unstable. So checking heads only is ok
641 # unstable. So checking heads only is ok
642 for node in outgoing.missingheads:
642 for node in outgoing.missingheads:
643 ctx = unfi[node]
643 ctx = unfi[node]
644 if ctx.obsolete():
644 if ctx.obsolete():
645 raise error.Abort(mso % ctx)
645 raise error.Abort(mso % ctx)
646 elif ctx.troubled():
646 elif ctx.troubled():
647 raise error.Abort(mst[ctx.troubles()[0]] % ctx)
647 raise error.Abort(mst[ctx.troubles()[0]] % ctx)
648
648
649 discovery.checkheads(pushop)
649 discovery.checkheads(pushop)
650 return True
650 return True
651
651
652 # List of names of steps to perform for an outgoing bundle2, order matters.
652 # List of names of steps to perform for an outgoing bundle2, order matters.
653 b2partsgenorder = []
653 b2partsgenorder = []
654
654
655 # Mapping between step name and function
655 # Mapping between step name and function
656 #
656 #
657 # This exists to help extensions wrap steps if necessary
657 # This exists to help extensions wrap steps if necessary
658 b2partsgenmapping = {}
658 b2partsgenmapping = {}
659
659
660 def b2partsgenerator(stepname, idx=None):
660 def b2partsgenerator(stepname, idx=None):
661 """decorator for function generating bundle2 part
661 """decorator for function generating bundle2 part
662
662
663 The function is added to the step -> function mapping and appended to the
663 The function is added to the step -> function mapping and appended to the
664 list of steps. Beware that decorated functions will be added in order
664 list of steps. Beware that decorated functions will be added in order
665 (this may matter).
665 (this may matter).
666
666
667 You can only use this decorator for new steps, if you want to wrap a step
667 You can only use this decorator for new steps, if you want to wrap a step
668 from an extension, attack the b2partsgenmapping dictionary directly."""
668 from an extension, attack the b2partsgenmapping dictionary directly."""
669 def dec(func):
669 def dec(func):
670 assert stepname not in b2partsgenmapping
670 assert stepname not in b2partsgenmapping
671 b2partsgenmapping[stepname] = func
671 b2partsgenmapping[stepname] = func
672 if idx is None:
672 if idx is None:
673 b2partsgenorder.append(stepname)
673 b2partsgenorder.append(stepname)
674 else:
674 else:
675 b2partsgenorder.insert(idx, stepname)
675 b2partsgenorder.insert(idx, stepname)
676 return func
676 return func
677 return dec
677 return dec
678
678
679 def _pushb2ctxcheckheads(pushop, bundler):
679 def _pushb2ctxcheckheads(pushop, bundler):
680 """Generate race condition checking parts
680 """Generate race condition checking parts
681
681
682 Exists as an independent function to aid extensions
682 Exists as an independent function to aid extensions
683 """
683 """
684 if not pushop.force:
684 if not pushop.force:
685 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
685 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
686
686
687 @b2partsgenerator('changeset')
687 @b2partsgenerator('changeset')
688 def _pushb2ctx(pushop, bundler):
688 def _pushb2ctx(pushop, bundler):
689 """handle changegroup push through bundle2
689 """handle changegroup push through bundle2
690
690
691 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
691 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
692 """
692 """
693 if 'changesets' in pushop.stepsdone:
693 if 'changesets' in pushop.stepsdone:
694 return
694 return
695 pushop.stepsdone.add('changesets')
695 pushop.stepsdone.add('changesets')
696 # Send known heads to the server for race detection.
696 # Send known heads to the server for race detection.
697 if not _pushcheckoutgoing(pushop):
697 if not _pushcheckoutgoing(pushop):
698 return
698 return
699 pushop.repo.prepushoutgoinghooks(pushop)
699 pushop.repo.prepushoutgoinghooks(pushop)
700
700
701 _pushb2ctxcheckheads(pushop, bundler)
701 _pushb2ctxcheckheads(pushop, bundler)
702
702
703 b2caps = bundle2.bundle2caps(pushop.remote)
703 b2caps = bundle2.bundle2caps(pushop.remote)
704 version = '01'
704 version = '01'
705 cgversions = b2caps.get('changegroup')
705 cgversions = b2caps.get('changegroup')
706 if cgversions: # 3.1 and 3.2 ship with an empty value
706 if cgversions: # 3.1 and 3.2 ship with an empty value
707 cgversions = [v for v in cgversions
707 cgversions = [v for v in cgversions
708 if v in changegroup.supportedoutgoingversions(
708 if v in changegroup.supportedoutgoingversions(
709 pushop.repo)]
709 pushop.repo)]
710 if not cgversions:
710 if not cgversions:
711 raise ValueError(_('no common changegroup version'))
711 raise ValueError(_('no common changegroup version'))
712 version = max(cgversions)
712 version = max(cgversions)
713 cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push',
713 cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push',
714 pushop.outgoing,
714 pushop.outgoing,
715 version=version)
715 version=version)
716 cgpart = bundler.newpart('changegroup', data=cg)
716 cgpart = bundler.newpart('changegroup', data=cg)
717 if cgversions:
717 if cgversions:
718 cgpart.addparam('version', version)
718 cgpart.addparam('version', version)
719 if 'treemanifest' in pushop.repo.requirements:
719 if 'treemanifest' in pushop.repo.requirements:
720 cgpart.addparam('treemanifest', '1')
720 cgpart.addparam('treemanifest', '1')
721 def handlereply(op):
721 def handlereply(op):
722 """extract addchangegroup returns from server reply"""
722 """extract addchangegroup returns from server reply"""
723 cgreplies = op.records.getreplies(cgpart.id)
723 cgreplies = op.records.getreplies(cgpart.id)
724 assert len(cgreplies['changegroup']) == 1
724 assert len(cgreplies['changegroup']) == 1
725 pushop.cgresult = cgreplies['changegroup'][0]['return']
725 pushop.cgresult = cgreplies['changegroup'][0]['return']
726 return handlereply
726 return handlereply
727
727
728 @b2partsgenerator('phase')
728 @b2partsgenerator('phase')
729 def _pushb2phases(pushop, bundler):
729 def _pushb2phases(pushop, bundler):
730 """handle phase push through bundle2"""
730 """handle phase push through bundle2"""
731 if 'phases' in pushop.stepsdone:
731 if 'phases' in pushop.stepsdone:
732 return
732 return
733 b2caps = bundle2.bundle2caps(pushop.remote)
733 b2caps = bundle2.bundle2caps(pushop.remote)
734 if not 'pushkey' in b2caps:
734 if not 'pushkey' in b2caps:
735 return
735 return
736 pushop.stepsdone.add('phases')
736 pushop.stepsdone.add('phases')
737 part2node = []
737 part2node = []
738
738
739 def handlefailure(pushop, exc):
739 def handlefailure(pushop, exc):
740 targetid = int(exc.partid)
740 targetid = int(exc.partid)
741 for partid, node in part2node:
741 for partid, node in part2node:
742 if partid == targetid:
742 if partid == targetid:
743 raise error.Abort(_('updating %s to public failed') % node)
743 raise error.Abort(_('updating %s to public failed') % node)
744
744
745 enc = pushkey.encode
745 enc = pushkey.encode
746 for newremotehead in pushop.outdatedphases:
746 for newremotehead in pushop.outdatedphases:
747 part = bundler.newpart('pushkey')
747 part = bundler.newpart('pushkey')
748 part.addparam('namespace', enc('phases'))
748 part.addparam('namespace', enc('phases'))
749 part.addparam('key', enc(newremotehead.hex()))
749 part.addparam('key', enc(newremotehead.hex()))
750 part.addparam('old', enc(str(phases.draft)))
750 part.addparam('old', enc(str(phases.draft)))
751 part.addparam('new', enc(str(phases.public)))
751 part.addparam('new', enc(str(phases.public)))
752 part2node.append((part.id, newremotehead))
752 part2node.append((part.id, newremotehead))
753 pushop.pkfailcb[part.id] = handlefailure
753 pushop.pkfailcb[part.id] = handlefailure
754
754
755 def handlereply(op):
755 def handlereply(op):
756 for partid, node in part2node:
756 for partid, node in part2node:
757 partrep = op.records.getreplies(partid)
757 partrep = op.records.getreplies(partid)
758 results = partrep['pushkey']
758 results = partrep['pushkey']
759 assert len(results) <= 1
759 assert len(results) <= 1
760 msg = None
760 msg = None
761 if not results:
761 if not results:
762 msg = _('server ignored update of %s to public!\n') % node
762 msg = _('server ignored update of %s to public!\n') % node
763 elif not int(results[0]['return']):
763 elif not int(results[0]['return']):
764 msg = _('updating %s to public failed!\n') % node
764 msg = _('updating %s to public failed!\n') % node
765 if msg is not None:
765 if msg is not None:
766 pushop.ui.warn(msg)
766 pushop.ui.warn(msg)
767 return handlereply
767 return handlereply
768
768
769 @b2partsgenerator('obsmarkers')
769 @b2partsgenerator('obsmarkers')
770 def _pushb2obsmarkers(pushop, bundler):
770 def _pushb2obsmarkers(pushop, bundler):
771 if 'obsmarkers' in pushop.stepsdone:
771 if 'obsmarkers' in pushop.stepsdone:
772 return
772 return
773 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
773 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
774 if obsolete.commonversion(remoteversions) is None:
774 if obsolete.commonversion(remoteversions) is None:
775 return
775 return
776 pushop.stepsdone.add('obsmarkers')
776 pushop.stepsdone.add('obsmarkers')
777 if pushop.outobsmarkers:
777 if pushop.outobsmarkers:
778 markers = sorted(pushop.outobsmarkers)
778 markers = sorted(pushop.outobsmarkers)
779 buildobsmarkerspart(bundler, markers)
779 buildobsmarkerspart(bundler, markers)
780
780
781 @b2partsgenerator('bookmarks')
781 @b2partsgenerator('bookmarks')
782 def _pushb2bookmarks(pushop, bundler):
782 def _pushb2bookmarks(pushop, bundler):
783 """handle bookmark push through bundle2"""
783 """handle bookmark push through bundle2"""
784 if 'bookmarks' in pushop.stepsdone:
784 if 'bookmarks' in pushop.stepsdone:
785 return
785 return
786 b2caps = bundle2.bundle2caps(pushop.remote)
786 b2caps = bundle2.bundle2caps(pushop.remote)
787 if 'pushkey' not in b2caps:
787 if 'pushkey' not in b2caps:
788 return
788 return
789 pushop.stepsdone.add('bookmarks')
789 pushop.stepsdone.add('bookmarks')
790 part2book = []
790 part2book = []
791 enc = pushkey.encode
791 enc = pushkey.encode
792
792
793 def handlefailure(pushop, exc):
793 def handlefailure(pushop, exc):
794 targetid = int(exc.partid)
794 targetid = int(exc.partid)
795 for partid, book, action in part2book:
795 for partid, book, action in part2book:
796 if partid == targetid:
796 if partid == targetid:
797 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
797 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
798 # we should not be called for part we did not generated
798 # we should not be called for part we did not generated
799 assert False
799 assert False
800
800
801 for book, old, new in pushop.outbookmarks:
801 for book, old, new in pushop.outbookmarks:
802 part = bundler.newpart('pushkey')
802 part = bundler.newpart('pushkey')
803 part.addparam('namespace', enc('bookmarks'))
803 part.addparam('namespace', enc('bookmarks'))
804 part.addparam('key', enc(book))
804 part.addparam('key', enc(book))
805 part.addparam('old', enc(old))
805 part.addparam('old', enc(old))
806 part.addparam('new', enc(new))
806 part.addparam('new', enc(new))
807 action = 'update'
807 action = 'update'
808 if not old:
808 if not old:
809 action = 'export'
809 action = 'export'
810 elif not new:
810 elif not new:
811 action = 'delete'
811 action = 'delete'
812 part2book.append((part.id, book, action))
812 part2book.append((part.id, book, action))
813 pushop.pkfailcb[part.id] = handlefailure
813 pushop.pkfailcb[part.id] = handlefailure
814
814
815 def handlereply(op):
815 def handlereply(op):
816 ui = pushop.ui
816 ui = pushop.ui
817 for partid, book, action in part2book:
817 for partid, book, action in part2book:
818 partrep = op.records.getreplies(partid)
818 partrep = op.records.getreplies(partid)
819 results = partrep['pushkey']
819 results = partrep['pushkey']
820 assert len(results) <= 1
820 assert len(results) <= 1
821 if not results:
821 if not results:
822 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
822 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
823 else:
823 else:
824 ret = int(results[0]['return'])
824 ret = int(results[0]['return'])
825 if ret:
825 if ret:
826 ui.status(bookmsgmap[action][0] % book)
826 ui.status(bookmsgmap[action][0] % book)
827 else:
827 else:
828 ui.warn(bookmsgmap[action][1] % book)
828 ui.warn(bookmsgmap[action][1] % book)
829 if pushop.bkresult is not None:
829 if pushop.bkresult is not None:
830 pushop.bkresult = 1
830 pushop.bkresult = 1
831 return handlereply
831 return handlereply
832
832
833
833
834 def _pushbundle2(pushop):
834 def _pushbundle2(pushop):
835 """push data to the remote using bundle2
835 """push data to the remote using bundle2
836
836
837 The only currently supported type of data is changegroup but this will
837 The only currently supported type of data is changegroup but this will
838 evolve in the future."""
838 evolve in the future."""
839 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
839 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
840 pushback = (pushop.trmanager
840 pushback = (pushop.trmanager
841 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
841 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
842
842
843 # create reply capability
843 # create reply capability
844 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
844 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
845 allowpushback=pushback))
845 allowpushback=pushback))
846 bundler.newpart('replycaps', data=capsblob)
846 bundler.newpart('replycaps', data=capsblob)
847 replyhandlers = []
847 replyhandlers = []
848 for partgenname in b2partsgenorder:
848 for partgenname in b2partsgenorder:
849 partgen = b2partsgenmapping[partgenname]
849 partgen = b2partsgenmapping[partgenname]
850 ret = partgen(pushop, bundler)
850 ret = partgen(pushop, bundler)
851 if callable(ret):
851 if callable(ret):
852 replyhandlers.append(ret)
852 replyhandlers.append(ret)
853 # do not push if nothing to push
853 # do not push if nothing to push
854 if bundler.nbparts <= 1:
854 if bundler.nbparts <= 1:
855 return
855 return
856 stream = util.chunkbuffer(bundler.getchunks())
856 stream = util.chunkbuffer(bundler.getchunks())
857 try:
857 try:
858 try:
858 try:
859 reply = pushop.remote.unbundle(stream, ['force'], 'push')
859 reply = pushop.remote.unbundle(
860 stream, ['force'], pushop.remote.url())
860 except error.BundleValueError as exc:
861 except error.BundleValueError as exc:
861 raise error.Abort(_('missing support for %s') % exc)
862 raise error.Abort(_('missing support for %s') % exc)
862 try:
863 try:
863 trgetter = None
864 trgetter = None
864 if pushback:
865 if pushback:
865 trgetter = pushop.trmanager.transaction
866 trgetter = pushop.trmanager.transaction
866 op = bundle2.processbundle(pushop.repo, reply, trgetter)
867 op = bundle2.processbundle(pushop.repo, reply, trgetter)
867 except error.BundleValueError as exc:
868 except error.BundleValueError as exc:
868 raise error.Abort(_('missing support for %s') % exc)
869 raise error.Abort(_('missing support for %s') % exc)
869 except bundle2.AbortFromPart as exc:
870 except bundle2.AbortFromPart as exc:
870 pushop.ui.status(_('remote: %s\n') % exc)
871 pushop.ui.status(_('remote: %s\n') % exc)
871 raise error.Abort(_('push failed on remote'), hint=exc.hint)
872 raise error.Abort(_('push failed on remote'), hint=exc.hint)
872 except error.PushkeyFailed as exc:
873 except error.PushkeyFailed as exc:
873 partid = int(exc.partid)
874 partid = int(exc.partid)
874 if partid not in pushop.pkfailcb:
875 if partid not in pushop.pkfailcb:
875 raise
876 raise
876 pushop.pkfailcb[partid](pushop, exc)
877 pushop.pkfailcb[partid](pushop, exc)
877 for rephand in replyhandlers:
878 for rephand in replyhandlers:
878 rephand(op)
879 rephand(op)
879
880
880 def _pushchangeset(pushop):
881 def _pushchangeset(pushop):
881 """Make the actual push of changeset bundle to remote repo"""
882 """Make the actual push of changeset bundle to remote repo"""
882 if 'changesets' in pushop.stepsdone:
883 if 'changesets' in pushop.stepsdone:
883 return
884 return
884 pushop.stepsdone.add('changesets')
885 pushop.stepsdone.add('changesets')
885 if not _pushcheckoutgoing(pushop):
886 if not _pushcheckoutgoing(pushop):
886 return
887 return
887 pushop.repo.prepushoutgoinghooks(pushop)
888 pushop.repo.prepushoutgoinghooks(pushop)
888 outgoing = pushop.outgoing
889 outgoing = pushop.outgoing
889 unbundle = pushop.remote.capable('unbundle')
890 unbundle = pushop.remote.capable('unbundle')
890 # TODO: get bundlecaps from remote
891 # TODO: get bundlecaps from remote
891 bundlecaps = None
892 bundlecaps = None
892 # create a changegroup from local
893 # create a changegroup from local
893 if pushop.revs is None and not (outgoing.excluded
894 if pushop.revs is None and not (outgoing.excluded
894 or pushop.repo.changelog.filteredrevs):
895 or pushop.repo.changelog.filteredrevs):
895 # push everything,
896 # push everything,
896 # use the fast path, no race possible on push
897 # use the fast path, no race possible on push
897 bundler = changegroup.cg1packer(pushop.repo, bundlecaps)
898 bundler = changegroup.cg1packer(pushop.repo, bundlecaps)
898 cg = changegroup.getsubset(pushop.repo,
899 cg = changegroup.getsubset(pushop.repo,
899 outgoing,
900 outgoing,
900 bundler,
901 bundler,
901 'push',
902 'push',
902 fastpath=True)
903 fastpath=True)
903 else:
904 else:
904 cg = changegroup.getlocalchangegroup(pushop.repo, 'push', outgoing,
905 cg = changegroup.getlocalchangegroup(pushop.repo, 'push', outgoing,
905 bundlecaps)
906 bundlecaps)
906
907
907 # apply changegroup to remote
908 # apply changegroup to remote
908 if unbundle:
909 if unbundle:
909 # local repo finds heads on server, finds out what
910 # local repo finds heads on server, finds out what
910 # revs it must push. once revs transferred, if server
911 # revs it must push. once revs transferred, if server
911 # finds it has different heads (someone else won
912 # finds it has different heads (someone else won
912 # commit/push race), server aborts.
913 # commit/push race), server aborts.
913 if pushop.force:
914 if pushop.force:
914 remoteheads = ['force']
915 remoteheads = ['force']
915 else:
916 else:
916 remoteheads = pushop.remoteheads
917 remoteheads = pushop.remoteheads
917 # ssh: return remote's addchangegroup()
918 # ssh: return remote's addchangegroup()
918 # http: return remote's addchangegroup() or 0 for error
919 # http: return remote's addchangegroup() or 0 for error
919 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
920 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
920 pushop.repo.url())
921 pushop.repo.url())
921 else:
922 else:
922 # we return an integer indicating remote head count
923 # we return an integer indicating remote head count
923 # change
924 # change
924 pushop.cgresult = pushop.remote.addchangegroup(cg, 'push',
925 pushop.cgresult = pushop.remote.addchangegroup(cg, 'push',
925 pushop.repo.url())
926 pushop.repo.url())
926
927
927 def _pushsyncphase(pushop):
928 def _pushsyncphase(pushop):
928 """synchronise phase information locally and remotely"""
929 """synchronise phase information locally and remotely"""
929 cheads = pushop.commonheads
930 cheads = pushop.commonheads
930 # even when we don't push, exchanging phase data is useful
931 # even when we don't push, exchanging phase data is useful
931 remotephases = pushop.remote.listkeys('phases')
932 remotephases = pushop.remote.listkeys('phases')
932 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
933 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
933 and remotephases # server supports phases
934 and remotephases # server supports phases
934 and pushop.cgresult is None # nothing was pushed
935 and pushop.cgresult is None # nothing was pushed
935 and remotephases.get('publishing', False)):
936 and remotephases.get('publishing', False)):
936 # When:
937 # When:
937 # - this is a subrepo push
938 # - this is a subrepo push
938 # - and remote support phase
939 # - and remote support phase
939 # - and no changeset was pushed
940 # - and no changeset was pushed
940 # - and remote is publishing
941 # - and remote is publishing
941 # We may be in issue 3871 case!
942 # We may be in issue 3871 case!
942 # We drop the possible phase synchronisation done by
943 # We drop the possible phase synchronisation done by
943 # courtesy to publish changesets possibly locally draft
944 # courtesy to publish changesets possibly locally draft
944 # on the remote.
945 # on the remote.
945 remotephases = {'publishing': 'True'}
946 remotephases = {'publishing': 'True'}
946 if not remotephases: # old server or public only reply from non-publishing
947 if not remotephases: # old server or public only reply from non-publishing
947 _localphasemove(pushop, cheads)
948 _localphasemove(pushop, cheads)
948 # don't push any phase data as there is nothing to push
949 # don't push any phase data as there is nothing to push
949 else:
950 else:
950 ana = phases.analyzeremotephases(pushop.repo, cheads,
951 ana = phases.analyzeremotephases(pushop.repo, cheads,
951 remotephases)
952 remotephases)
952 pheads, droots = ana
953 pheads, droots = ana
953 ### Apply remote phase on local
954 ### Apply remote phase on local
954 if remotephases.get('publishing', False):
955 if remotephases.get('publishing', False):
955 _localphasemove(pushop, cheads)
956 _localphasemove(pushop, cheads)
956 else: # publish = False
957 else: # publish = False
957 _localphasemove(pushop, pheads)
958 _localphasemove(pushop, pheads)
958 _localphasemove(pushop, cheads, phases.draft)
959 _localphasemove(pushop, cheads, phases.draft)
959 ### Apply local phase on remote
960 ### Apply local phase on remote
960
961
961 if pushop.cgresult:
962 if pushop.cgresult:
962 if 'phases' in pushop.stepsdone:
963 if 'phases' in pushop.stepsdone:
963 # phases already pushed though bundle2
964 # phases already pushed though bundle2
964 return
965 return
965 outdated = pushop.outdatedphases
966 outdated = pushop.outdatedphases
966 else:
967 else:
967 outdated = pushop.fallbackoutdatedphases
968 outdated = pushop.fallbackoutdatedphases
968
969
969 pushop.stepsdone.add('phases')
970 pushop.stepsdone.add('phases')
970
971
971 # filter heads already turned public by the push
972 # filter heads already turned public by the push
972 outdated = [c for c in outdated if c.node() not in pheads]
973 outdated = [c for c in outdated if c.node() not in pheads]
973 # fallback to independent pushkey command
974 # fallback to independent pushkey command
974 for newremotehead in outdated:
975 for newremotehead in outdated:
975 r = pushop.remote.pushkey('phases',
976 r = pushop.remote.pushkey('phases',
976 newremotehead.hex(),
977 newremotehead.hex(),
977 str(phases.draft),
978 str(phases.draft),
978 str(phases.public))
979 str(phases.public))
979 if not r:
980 if not r:
980 pushop.ui.warn(_('updating %s to public failed!\n')
981 pushop.ui.warn(_('updating %s to public failed!\n')
981 % newremotehead)
982 % newremotehead)
982
983
983 def _localphasemove(pushop, nodes, phase=phases.public):
984 def _localphasemove(pushop, nodes, phase=phases.public):
984 """move <nodes> to <phase> in the local source repo"""
985 """move <nodes> to <phase> in the local source repo"""
985 if pushop.trmanager:
986 if pushop.trmanager:
986 phases.advanceboundary(pushop.repo,
987 phases.advanceboundary(pushop.repo,
987 pushop.trmanager.transaction(),
988 pushop.trmanager.transaction(),
988 phase,
989 phase,
989 nodes)
990 nodes)
990 else:
991 else:
991 # repo is not locked, do not change any phases!
992 # repo is not locked, do not change any phases!
992 # Informs the user that phases should have been moved when
993 # Informs the user that phases should have been moved when
993 # applicable.
994 # applicable.
994 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
995 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
995 phasestr = phases.phasenames[phase]
996 phasestr = phases.phasenames[phase]
996 if actualmoves:
997 if actualmoves:
997 pushop.ui.status(_('cannot lock source repo, skipping '
998 pushop.ui.status(_('cannot lock source repo, skipping '
998 'local %s phase update\n') % phasestr)
999 'local %s phase update\n') % phasestr)
999
1000
1000 def _pushobsolete(pushop):
1001 def _pushobsolete(pushop):
1001 """utility function to push obsolete markers to a remote"""
1002 """utility function to push obsolete markers to a remote"""
1002 if 'obsmarkers' in pushop.stepsdone:
1003 if 'obsmarkers' in pushop.stepsdone:
1003 return
1004 return
1004 repo = pushop.repo
1005 repo = pushop.repo
1005 remote = pushop.remote
1006 remote = pushop.remote
1006 pushop.stepsdone.add('obsmarkers')
1007 pushop.stepsdone.add('obsmarkers')
1007 if pushop.outobsmarkers:
1008 if pushop.outobsmarkers:
1008 pushop.ui.debug('try to push obsolete markers to remote\n')
1009 pushop.ui.debug('try to push obsolete markers to remote\n')
1009 rslts = []
1010 rslts = []
1010 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1011 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1011 for key in sorted(remotedata, reverse=True):
1012 for key in sorted(remotedata, reverse=True):
1012 # reverse sort to ensure we end with dump0
1013 # reverse sort to ensure we end with dump0
1013 data = remotedata[key]
1014 data = remotedata[key]
1014 rslts.append(remote.pushkey('obsolete', key, '', data))
1015 rslts.append(remote.pushkey('obsolete', key, '', data))
1015 if [r for r in rslts if not r]:
1016 if [r for r in rslts if not r]:
1016 msg = _('failed to push some obsolete markers!\n')
1017 msg = _('failed to push some obsolete markers!\n')
1017 repo.ui.warn(msg)
1018 repo.ui.warn(msg)
1018
1019
1019 def _pushbookmark(pushop):
1020 def _pushbookmark(pushop):
1020 """Update bookmark position on remote"""
1021 """Update bookmark position on remote"""
1021 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1022 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1022 return
1023 return
1023 pushop.stepsdone.add('bookmarks')
1024 pushop.stepsdone.add('bookmarks')
1024 ui = pushop.ui
1025 ui = pushop.ui
1025 remote = pushop.remote
1026 remote = pushop.remote
1026
1027
1027 for b, old, new in pushop.outbookmarks:
1028 for b, old, new in pushop.outbookmarks:
1028 action = 'update'
1029 action = 'update'
1029 if not old:
1030 if not old:
1030 action = 'export'
1031 action = 'export'
1031 elif not new:
1032 elif not new:
1032 action = 'delete'
1033 action = 'delete'
1033 if remote.pushkey('bookmarks', b, old, new):
1034 if remote.pushkey('bookmarks', b, old, new):
1034 ui.status(bookmsgmap[action][0] % b)
1035 ui.status(bookmsgmap[action][0] % b)
1035 else:
1036 else:
1036 ui.warn(bookmsgmap[action][1] % b)
1037 ui.warn(bookmsgmap[action][1] % b)
1037 # discovery can have set the value form invalid entry
1038 # discovery can have set the value form invalid entry
1038 if pushop.bkresult is not None:
1039 if pushop.bkresult is not None:
1039 pushop.bkresult = 1
1040 pushop.bkresult = 1
1040
1041
1041 class pulloperation(object):
1042 class pulloperation(object):
1042 """A object that represent a single pull operation
1043 """A object that represent a single pull operation
1043
1044
1044 It purpose is to carry pull related state and very common operation.
1045 It purpose is to carry pull related state and very common operation.
1045
1046
1046 A new should be created at the beginning of each pull and discarded
1047 A new should be created at the beginning of each pull and discarded
1047 afterward.
1048 afterward.
1048 """
1049 """
1049
1050
1050 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1051 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1051 remotebookmarks=None, streamclonerequested=None):
1052 remotebookmarks=None, streamclonerequested=None):
1052 # repo we pull into
1053 # repo we pull into
1053 self.repo = repo
1054 self.repo = repo
1054 # repo we pull from
1055 # repo we pull from
1055 self.remote = remote
1056 self.remote = remote
1056 # revision we try to pull (None is "all")
1057 # revision we try to pull (None is "all")
1057 self.heads = heads
1058 self.heads = heads
1058 # bookmark pulled explicitly
1059 # bookmark pulled explicitly
1059 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1060 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1060 for bookmark in bookmarks]
1061 for bookmark in bookmarks]
1061 # do we force pull?
1062 # do we force pull?
1062 self.force = force
1063 self.force = force
1063 # whether a streaming clone was requested
1064 # whether a streaming clone was requested
1064 self.streamclonerequested = streamclonerequested
1065 self.streamclonerequested = streamclonerequested
1065 # transaction manager
1066 # transaction manager
1066 self.trmanager = None
1067 self.trmanager = None
1067 # set of common changeset between local and remote before pull
1068 # set of common changeset between local and remote before pull
1068 self.common = None
1069 self.common = None
1069 # set of pulled head
1070 # set of pulled head
1070 self.rheads = None
1071 self.rheads = None
1071 # list of missing changeset to fetch remotely
1072 # list of missing changeset to fetch remotely
1072 self.fetch = None
1073 self.fetch = None
1073 # remote bookmarks data
1074 # remote bookmarks data
1074 self.remotebookmarks = remotebookmarks
1075 self.remotebookmarks = remotebookmarks
1075 # result of changegroup pulling (used as return code by pull)
1076 # result of changegroup pulling (used as return code by pull)
1076 self.cgresult = None
1077 self.cgresult = None
1077 # list of step already done
1078 # list of step already done
1078 self.stepsdone = set()
1079 self.stepsdone = set()
1079 # Whether we attempted a clone from pre-generated bundles.
1080 # Whether we attempted a clone from pre-generated bundles.
1080 self.clonebundleattempted = False
1081 self.clonebundleattempted = False
1081
1082
1082 @util.propertycache
1083 @util.propertycache
1083 def pulledsubset(self):
1084 def pulledsubset(self):
1084 """heads of the set of changeset target by the pull"""
1085 """heads of the set of changeset target by the pull"""
1085 # compute target subset
1086 # compute target subset
1086 if self.heads is None:
1087 if self.heads is None:
1087 # We pulled every thing possible
1088 # We pulled every thing possible
1088 # sync on everything common
1089 # sync on everything common
1089 c = set(self.common)
1090 c = set(self.common)
1090 ret = list(self.common)
1091 ret = list(self.common)
1091 for n in self.rheads:
1092 for n in self.rheads:
1092 if n not in c:
1093 if n not in c:
1093 ret.append(n)
1094 ret.append(n)
1094 return ret
1095 return ret
1095 else:
1096 else:
1096 # We pulled a specific subset
1097 # We pulled a specific subset
1097 # sync on this subset
1098 # sync on this subset
1098 return self.heads
1099 return self.heads
1099
1100
1100 @util.propertycache
1101 @util.propertycache
1101 def canusebundle2(self):
1102 def canusebundle2(self):
1102 return _canusebundle2(self)
1103 return _canusebundle2(self)
1103
1104
1104 @util.propertycache
1105 @util.propertycache
1105 def remotebundle2caps(self):
1106 def remotebundle2caps(self):
1106 return bundle2.bundle2caps(self.remote)
1107 return bundle2.bundle2caps(self.remote)
1107
1108
1108 def gettransaction(self):
1109 def gettransaction(self):
1109 # deprecated; talk to trmanager directly
1110 # deprecated; talk to trmanager directly
1110 return self.trmanager.transaction()
1111 return self.trmanager.transaction()
1111
1112
1112 class transactionmanager(object):
1113 class transactionmanager(object):
1113 """An object to manage the life cycle of a transaction
1114 """An object to manage the life cycle of a transaction
1114
1115
1115 It creates the transaction on demand and calls the appropriate hooks when
1116 It creates the transaction on demand and calls the appropriate hooks when
1116 closing the transaction."""
1117 closing the transaction."""
1117 def __init__(self, repo, source, url):
1118 def __init__(self, repo, source, url):
1118 self.repo = repo
1119 self.repo = repo
1119 self.source = source
1120 self.source = source
1120 self.url = url
1121 self.url = url
1121 self._tr = None
1122 self._tr = None
1122
1123
1123 def transaction(self):
1124 def transaction(self):
1124 """Return an open transaction object, constructing if necessary"""
1125 """Return an open transaction object, constructing if necessary"""
1125 if not self._tr:
1126 if not self._tr:
1126 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1127 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1127 self._tr = self.repo.transaction(trname)
1128 self._tr = self.repo.transaction(trname)
1128 self._tr.hookargs['source'] = self.source
1129 self._tr.hookargs['source'] = self.source
1129 self._tr.hookargs['url'] = self.url
1130 self._tr.hookargs['url'] = self.url
1130 return self._tr
1131 return self._tr
1131
1132
1132 def close(self):
1133 def close(self):
1133 """close transaction if created"""
1134 """close transaction if created"""
1134 if self._tr is not None:
1135 if self._tr is not None:
1135 self._tr.close()
1136 self._tr.close()
1136
1137
1137 def release(self):
1138 def release(self):
1138 """release transaction if created"""
1139 """release transaction if created"""
1139 if self._tr is not None:
1140 if self._tr is not None:
1140 self._tr.release()
1141 self._tr.release()
1141
1142
1142 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1143 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1143 streamclonerequested=None):
1144 streamclonerequested=None):
1144 """Fetch repository data from a remote.
1145 """Fetch repository data from a remote.
1145
1146
1146 This is the main function used to retrieve data from a remote repository.
1147 This is the main function used to retrieve data from a remote repository.
1147
1148
1148 ``repo`` is the local repository to clone into.
1149 ``repo`` is the local repository to clone into.
1149 ``remote`` is a peer instance.
1150 ``remote`` is a peer instance.
1150 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1151 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1151 default) means to pull everything from the remote.
1152 default) means to pull everything from the remote.
1152 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1153 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1153 default, all remote bookmarks are pulled.
1154 default, all remote bookmarks are pulled.
1154 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1155 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1155 initialization.
1156 initialization.
1156 ``streamclonerequested`` is a boolean indicating whether a "streaming
1157 ``streamclonerequested`` is a boolean indicating whether a "streaming
1157 clone" is requested. A "streaming clone" is essentially a raw file copy
1158 clone" is requested. A "streaming clone" is essentially a raw file copy
1158 of revlogs from the server. This only works when the local repository is
1159 of revlogs from the server. This only works when the local repository is
1159 empty. The default value of ``None`` means to respect the server
1160 empty. The default value of ``None`` means to respect the server
1160 configuration for preferring stream clones.
1161 configuration for preferring stream clones.
1161
1162
1162 Returns the ``pulloperation`` created for this pull.
1163 Returns the ``pulloperation`` created for this pull.
1163 """
1164 """
1164 if opargs is None:
1165 if opargs is None:
1165 opargs = {}
1166 opargs = {}
1166 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1167 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1167 streamclonerequested=streamclonerequested, **opargs)
1168 streamclonerequested=streamclonerequested, **opargs)
1168 if pullop.remote.local():
1169 if pullop.remote.local():
1169 missing = set(pullop.remote.requirements) - pullop.repo.supported
1170 missing = set(pullop.remote.requirements) - pullop.repo.supported
1170 if missing:
1171 if missing:
1171 msg = _("required features are not"
1172 msg = _("required features are not"
1172 " supported in the destination:"
1173 " supported in the destination:"
1173 " %s") % (', '.join(sorted(missing)))
1174 " %s") % (', '.join(sorted(missing)))
1174 raise error.Abort(msg)
1175 raise error.Abort(msg)
1175
1176
1176 lock = pullop.repo.lock()
1177 lock = pullop.repo.lock()
1177 try:
1178 try:
1178 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1179 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1179 streamclone.maybeperformlegacystreamclone(pullop)
1180 streamclone.maybeperformlegacystreamclone(pullop)
1180 # This should ideally be in _pullbundle2(). However, it needs to run
1181 # This should ideally be in _pullbundle2(). However, it needs to run
1181 # before discovery to avoid extra work.
1182 # before discovery to avoid extra work.
1182 _maybeapplyclonebundle(pullop)
1183 _maybeapplyclonebundle(pullop)
1183 _pulldiscovery(pullop)
1184 _pulldiscovery(pullop)
1184 if pullop.canusebundle2:
1185 if pullop.canusebundle2:
1185 _pullbundle2(pullop)
1186 _pullbundle2(pullop)
1186 _pullchangeset(pullop)
1187 _pullchangeset(pullop)
1187 _pullphase(pullop)
1188 _pullphase(pullop)
1188 _pullbookmarks(pullop)
1189 _pullbookmarks(pullop)
1189 _pullobsolete(pullop)
1190 _pullobsolete(pullop)
1190 pullop.trmanager.close()
1191 pullop.trmanager.close()
1191 finally:
1192 finally:
1192 pullop.trmanager.release()
1193 pullop.trmanager.release()
1193 lock.release()
1194 lock.release()
1194
1195
1195 return pullop
1196 return pullop
1196
1197
1197 # list of steps to perform discovery before pull
1198 # list of steps to perform discovery before pull
1198 pulldiscoveryorder = []
1199 pulldiscoveryorder = []
1199
1200
1200 # Mapping between step name and function
1201 # Mapping between step name and function
1201 #
1202 #
1202 # This exists to help extensions wrap steps if necessary
1203 # This exists to help extensions wrap steps if necessary
1203 pulldiscoverymapping = {}
1204 pulldiscoverymapping = {}
1204
1205
1205 def pulldiscovery(stepname):
1206 def pulldiscovery(stepname):
1206 """decorator for function performing discovery before pull
1207 """decorator for function performing discovery before pull
1207
1208
1208 The function is added to the step -> function mapping and appended to the
1209 The function is added to the step -> function mapping and appended to the
1209 list of steps. Beware that decorated function will be added in order (this
1210 list of steps. Beware that decorated function will be added in order (this
1210 may matter).
1211 may matter).
1211
1212
1212 You can only use this decorator for a new step, if you want to wrap a step
1213 You can only use this decorator for a new step, if you want to wrap a step
1213 from an extension, change the pulldiscovery dictionary directly."""
1214 from an extension, change the pulldiscovery dictionary directly."""
1214 def dec(func):
1215 def dec(func):
1215 assert stepname not in pulldiscoverymapping
1216 assert stepname not in pulldiscoverymapping
1216 pulldiscoverymapping[stepname] = func
1217 pulldiscoverymapping[stepname] = func
1217 pulldiscoveryorder.append(stepname)
1218 pulldiscoveryorder.append(stepname)
1218 return func
1219 return func
1219 return dec
1220 return dec
1220
1221
1221 def _pulldiscovery(pullop):
1222 def _pulldiscovery(pullop):
1222 """Run all discovery steps"""
1223 """Run all discovery steps"""
1223 for stepname in pulldiscoveryorder:
1224 for stepname in pulldiscoveryorder:
1224 step = pulldiscoverymapping[stepname]
1225 step = pulldiscoverymapping[stepname]
1225 step(pullop)
1226 step(pullop)
1226
1227
1227 @pulldiscovery('b1:bookmarks')
1228 @pulldiscovery('b1:bookmarks')
1228 def _pullbookmarkbundle1(pullop):
1229 def _pullbookmarkbundle1(pullop):
1229 """fetch bookmark data in bundle1 case
1230 """fetch bookmark data in bundle1 case
1230
1231
1231 If not using bundle2, we have to fetch bookmarks before changeset
1232 If not using bundle2, we have to fetch bookmarks before changeset
1232 discovery to reduce the chance and impact of race conditions."""
1233 discovery to reduce the chance and impact of race conditions."""
1233 if pullop.remotebookmarks is not None:
1234 if pullop.remotebookmarks is not None:
1234 return
1235 return
1235 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1236 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1236 # all known bundle2 servers now support listkeys, but lets be nice with
1237 # all known bundle2 servers now support listkeys, but lets be nice with
1237 # new implementation.
1238 # new implementation.
1238 return
1239 return
1239 pullop.remotebookmarks = pullop.remote.listkeys('bookmarks')
1240 pullop.remotebookmarks = pullop.remote.listkeys('bookmarks')
1240
1241
1241
1242
1242 @pulldiscovery('changegroup')
1243 @pulldiscovery('changegroup')
1243 def _pulldiscoverychangegroup(pullop):
1244 def _pulldiscoverychangegroup(pullop):
1244 """discovery phase for the pull
1245 """discovery phase for the pull
1245
1246
1246 Current handle changeset discovery only, will change handle all discovery
1247 Current handle changeset discovery only, will change handle all discovery
1247 at some point."""
1248 at some point."""
1248 tmp = discovery.findcommonincoming(pullop.repo,
1249 tmp = discovery.findcommonincoming(pullop.repo,
1249 pullop.remote,
1250 pullop.remote,
1250 heads=pullop.heads,
1251 heads=pullop.heads,
1251 force=pullop.force)
1252 force=pullop.force)
1252 common, fetch, rheads = tmp
1253 common, fetch, rheads = tmp
1253 nm = pullop.repo.unfiltered().changelog.nodemap
1254 nm = pullop.repo.unfiltered().changelog.nodemap
1254 if fetch and rheads:
1255 if fetch and rheads:
1255 # If a remote heads in filtered locally, lets drop it from the unknown
1256 # If a remote heads in filtered locally, lets drop it from the unknown
1256 # remote heads and put in back in common.
1257 # remote heads and put in back in common.
1257 #
1258 #
1258 # This is a hackish solution to catch most of "common but locally
1259 # This is a hackish solution to catch most of "common but locally
1259 # hidden situation". We do not performs discovery on unfiltered
1260 # hidden situation". We do not performs discovery on unfiltered
1260 # repository because it end up doing a pathological amount of round
1261 # repository because it end up doing a pathological amount of round
1261 # trip for w huge amount of changeset we do not care about.
1262 # trip for w huge amount of changeset we do not care about.
1262 #
1263 #
1263 # If a set of such "common but filtered" changeset exist on the server
1264 # If a set of such "common but filtered" changeset exist on the server
1264 # but are not including a remote heads, we'll not be able to detect it,
1265 # but are not including a remote heads, we'll not be able to detect it,
1265 scommon = set(common)
1266 scommon = set(common)
1266 filteredrheads = []
1267 filteredrheads = []
1267 for n in rheads:
1268 for n in rheads:
1268 if n in nm:
1269 if n in nm:
1269 if n not in scommon:
1270 if n not in scommon:
1270 common.append(n)
1271 common.append(n)
1271 else:
1272 else:
1272 filteredrheads.append(n)
1273 filteredrheads.append(n)
1273 if not filteredrheads:
1274 if not filteredrheads:
1274 fetch = []
1275 fetch = []
1275 rheads = filteredrheads
1276 rheads = filteredrheads
1276 pullop.common = common
1277 pullop.common = common
1277 pullop.fetch = fetch
1278 pullop.fetch = fetch
1278 pullop.rheads = rheads
1279 pullop.rheads = rheads
1279
1280
1280 def _pullbundle2(pullop):
1281 def _pullbundle2(pullop):
1281 """pull data using bundle2
1282 """pull data using bundle2
1282
1283
1283 For now, the only supported data are changegroup."""
1284 For now, the only supported data are changegroup."""
1284 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
1285 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
1285
1286
1286 streaming, streamreqs = streamclone.canperformstreamclone(pullop)
1287 streaming, streamreqs = streamclone.canperformstreamclone(pullop)
1287
1288
1288 # pulling changegroup
1289 # pulling changegroup
1289 pullop.stepsdone.add('changegroup')
1290 pullop.stepsdone.add('changegroup')
1290
1291
1291 kwargs['common'] = pullop.common
1292 kwargs['common'] = pullop.common
1292 kwargs['heads'] = pullop.heads or pullop.rheads
1293 kwargs['heads'] = pullop.heads or pullop.rheads
1293 kwargs['cg'] = pullop.fetch
1294 kwargs['cg'] = pullop.fetch
1294 if 'listkeys' in pullop.remotebundle2caps:
1295 if 'listkeys' in pullop.remotebundle2caps:
1295 kwargs['listkeys'] = ['phases']
1296 kwargs['listkeys'] = ['phases']
1296 if pullop.remotebookmarks is None:
1297 if pullop.remotebookmarks is None:
1297 # make sure to always includes bookmark data when migrating
1298 # make sure to always includes bookmark data when migrating
1298 # `hg incoming --bundle` to using this function.
1299 # `hg incoming --bundle` to using this function.
1299 kwargs['listkeys'].append('bookmarks')
1300 kwargs['listkeys'].append('bookmarks')
1300
1301
1301 # If this is a full pull / clone and the server supports the clone bundles
1302 # If this is a full pull / clone and the server supports the clone bundles
1302 # feature, tell the server whether we attempted a clone bundle. The
1303 # feature, tell the server whether we attempted a clone bundle. The
1303 # presence of this flag indicates the client supports clone bundles. This
1304 # presence of this flag indicates the client supports clone bundles. This
1304 # will enable the server to treat clients that support clone bundles
1305 # will enable the server to treat clients that support clone bundles
1305 # differently from those that don't.
1306 # differently from those that don't.
1306 if (pullop.remote.capable('clonebundles')
1307 if (pullop.remote.capable('clonebundles')
1307 and pullop.heads is None and list(pullop.common) == [nullid]):
1308 and pullop.heads is None and list(pullop.common) == [nullid]):
1308 kwargs['cbattempted'] = pullop.clonebundleattempted
1309 kwargs['cbattempted'] = pullop.clonebundleattempted
1309
1310
1310 if streaming:
1311 if streaming:
1311 pullop.repo.ui.status(_('streaming all changes\n'))
1312 pullop.repo.ui.status(_('streaming all changes\n'))
1312 elif not pullop.fetch:
1313 elif not pullop.fetch:
1313 pullop.repo.ui.status(_("no changes found\n"))
1314 pullop.repo.ui.status(_("no changes found\n"))
1314 pullop.cgresult = 0
1315 pullop.cgresult = 0
1315 else:
1316 else:
1316 if pullop.heads is None and list(pullop.common) == [nullid]:
1317 if pullop.heads is None and list(pullop.common) == [nullid]:
1317 pullop.repo.ui.status(_("requesting all changes\n"))
1318 pullop.repo.ui.status(_("requesting all changes\n"))
1318 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1319 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1319 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1320 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1320 if obsolete.commonversion(remoteversions) is not None:
1321 if obsolete.commonversion(remoteversions) is not None:
1321 kwargs['obsmarkers'] = True
1322 kwargs['obsmarkers'] = True
1322 pullop.stepsdone.add('obsmarkers')
1323 pullop.stepsdone.add('obsmarkers')
1323 _pullbundle2extraprepare(pullop, kwargs)
1324 _pullbundle2extraprepare(pullop, kwargs)
1324 bundle = pullop.remote.getbundle('pull', **kwargs)
1325 bundle = pullop.remote.getbundle('pull', **kwargs)
1325 try:
1326 try:
1326 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
1327 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
1327 except error.BundleValueError as exc:
1328 except error.BundleValueError as exc:
1328 raise error.Abort(_('missing support for %s') % exc)
1329 raise error.Abort(_('missing support for %s') % exc)
1329
1330
1330 if pullop.fetch:
1331 if pullop.fetch:
1331 results = [cg['return'] for cg in op.records['changegroup']]
1332 results = [cg['return'] for cg in op.records['changegroup']]
1332 pullop.cgresult = changegroup.combineresults(results)
1333 pullop.cgresult = changegroup.combineresults(results)
1333
1334
1334 # processing phases change
1335 # processing phases change
1335 for namespace, value in op.records['listkeys']:
1336 for namespace, value in op.records['listkeys']:
1336 if namespace == 'phases':
1337 if namespace == 'phases':
1337 _pullapplyphases(pullop, value)
1338 _pullapplyphases(pullop, value)
1338
1339
1339 # processing bookmark update
1340 # processing bookmark update
1340 for namespace, value in op.records['listkeys']:
1341 for namespace, value in op.records['listkeys']:
1341 if namespace == 'bookmarks':
1342 if namespace == 'bookmarks':
1342 pullop.remotebookmarks = value
1343 pullop.remotebookmarks = value
1343
1344
1344 # bookmark data were either already there or pulled in the bundle
1345 # bookmark data were either already there or pulled in the bundle
1345 if pullop.remotebookmarks is not None:
1346 if pullop.remotebookmarks is not None:
1346 _pullbookmarks(pullop)
1347 _pullbookmarks(pullop)
1347
1348
1348 def _pullbundle2extraprepare(pullop, kwargs):
1349 def _pullbundle2extraprepare(pullop, kwargs):
1349 """hook function so that extensions can extend the getbundle call"""
1350 """hook function so that extensions can extend the getbundle call"""
1350 pass
1351 pass
1351
1352
1352 def _pullchangeset(pullop):
1353 def _pullchangeset(pullop):
1353 """pull changeset from unbundle into the local repo"""
1354 """pull changeset from unbundle into the local repo"""
1354 # We delay the open of the transaction as late as possible so we
1355 # We delay the open of the transaction as late as possible so we
1355 # don't open transaction for nothing or you break future useful
1356 # don't open transaction for nothing or you break future useful
1356 # rollback call
1357 # rollback call
1357 if 'changegroup' in pullop.stepsdone:
1358 if 'changegroup' in pullop.stepsdone:
1358 return
1359 return
1359 pullop.stepsdone.add('changegroup')
1360 pullop.stepsdone.add('changegroup')
1360 if not pullop.fetch:
1361 if not pullop.fetch:
1361 pullop.repo.ui.status(_("no changes found\n"))
1362 pullop.repo.ui.status(_("no changes found\n"))
1362 pullop.cgresult = 0
1363 pullop.cgresult = 0
1363 return
1364 return
1364 pullop.gettransaction()
1365 pullop.gettransaction()
1365 if pullop.heads is None and list(pullop.common) == [nullid]:
1366 if pullop.heads is None and list(pullop.common) == [nullid]:
1366 pullop.repo.ui.status(_("requesting all changes\n"))
1367 pullop.repo.ui.status(_("requesting all changes\n"))
1367 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1368 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1368 # issue1320, avoid a race if remote changed after discovery
1369 # issue1320, avoid a race if remote changed after discovery
1369 pullop.heads = pullop.rheads
1370 pullop.heads = pullop.rheads
1370
1371
1371 if pullop.remote.capable('getbundle'):
1372 if pullop.remote.capable('getbundle'):
1372 # TODO: get bundlecaps from remote
1373 # TODO: get bundlecaps from remote
1373 cg = pullop.remote.getbundle('pull', common=pullop.common,
1374 cg = pullop.remote.getbundle('pull', common=pullop.common,
1374 heads=pullop.heads or pullop.rheads)
1375 heads=pullop.heads or pullop.rheads)
1375 elif pullop.heads is None:
1376 elif pullop.heads is None:
1376 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1377 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1377 elif not pullop.remote.capable('changegroupsubset'):
1378 elif not pullop.remote.capable('changegroupsubset'):
1378 raise error.Abort(_("partial pull cannot be done because "
1379 raise error.Abort(_("partial pull cannot be done because "
1379 "other repository doesn't support "
1380 "other repository doesn't support "
1380 "changegroupsubset."))
1381 "changegroupsubset."))
1381 else:
1382 else:
1382 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1383 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1383 pullop.cgresult = cg.apply(pullop.repo, 'pull', pullop.remote.url())
1384 pullop.cgresult = cg.apply(pullop.repo, 'pull', pullop.remote.url())
1384
1385
1385 def _pullphase(pullop):
1386 def _pullphase(pullop):
1386 # Get remote phases data from remote
1387 # Get remote phases data from remote
1387 if 'phases' in pullop.stepsdone:
1388 if 'phases' in pullop.stepsdone:
1388 return
1389 return
1389 remotephases = pullop.remote.listkeys('phases')
1390 remotephases = pullop.remote.listkeys('phases')
1390 _pullapplyphases(pullop, remotephases)
1391 _pullapplyphases(pullop, remotephases)
1391
1392
1392 def _pullapplyphases(pullop, remotephases):
1393 def _pullapplyphases(pullop, remotephases):
1393 """apply phase movement from observed remote state"""
1394 """apply phase movement from observed remote state"""
1394 if 'phases' in pullop.stepsdone:
1395 if 'phases' in pullop.stepsdone:
1395 return
1396 return
1396 pullop.stepsdone.add('phases')
1397 pullop.stepsdone.add('phases')
1397 publishing = bool(remotephases.get('publishing', False))
1398 publishing = bool(remotephases.get('publishing', False))
1398 if remotephases and not publishing:
1399 if remotephases and not publishing:
1399 # remote is new and unpublishing
1400 # remote is new and unpublishing
1400 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1401 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1401 pullop.pulledsubset,
1402 pullop.pulledsubset,
1402 remotephases)
1403 remotephases)
1403 dheads = pullop.pulledsubset
1404 dheads = pullop.pulledsubset
1404 else:
1405 else:
1405 # Remote is old or publishing all common changesets
1406 # Remote is old or publishing all common changesets
1406 # should be seen as public
1407 # should be seen as public
1407 pheads = pullop.pulledsubset
1408 pheads = pullop.pulledsubset
1408 dheads = []
1409 dheads = []
1409 unfi = pullop.repo.unfiltered()
1410 unfi = pullop.repo.unfiltered()
1410 phase = unfi._phasecache.phase
1411 phase = unfi._phasecache.phase
1411 rev = unfi.changelog.nodemap.get
1412 rev = unfi.changelog.nodemap.get
1412 public = phases.public
1413 public = phases.public
1413 draft = phases.draft
1414 draft = phases.draft
1414
1415
1415 # exclude changesets already public locally and update the others
1416 # exclude changesets already public locally and update the others
1416 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1417 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1417 if pheads:
1418 if pheads:
1418 tr = pullop.gettransaction()
1419 tr = pullop.gettransaction()
1419 phases.advanceboundary(pullop.repo, tr, public, pheads)
1420 phases.advanceboundary(pullop.repo, tr, public, pheads)
1420
1421
1421 # exclude changesets already draft locally and update the others
1422 # exclude changesets already draft locally and update the others
1422 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1423 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1423 if dheads:
1424 if dheads:
1424 tr = pullop.gettransaction()
1425 tr = pullop.gettransaction()
1425 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1426 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1426
1427
1427 def _pullbookmarks(pullop):
1428 def _pullbookmarks(pullop):
1428 """process the remote bookmark information to update the local one"""
1429 """process the remote bookmark information to update the local one"""
1429 if 'bookmarks' in pullop.stepsdone:
1430 if 'bookmarks' in pullop.stepsdone:
1430 return
1431 return
1431 pullop.stepsdone.add('bookmarks')
1432 pullop.stepsdone.add('bookmarks')
1432 repo = pullop.repo
1433 repo = pullop.repo
1433 remotebookmarks = pullop.remotebookmarks
1434 remotebookmarks = pullop.remotebookmarks
1434 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1435 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1435 pullop.remote.url(),
1436 pullop.remote.url(),
1436 pullop.gettransaction,
1437 pullop.gettransaction,
1437 explicit=pullop.explicitbookmarks)
1438 explicit=pullop.explicitbookmarks)
1438
1439
1439 def _pullobsolete(pullop):
1440 def _pullobsolete(pullop):
1440 """utility function to pull obsolete markers from a remote
1441 """utility function to pull obsolete markers from a remote
1441
1442
1442 The `gettransaction` is function that return the pull transaction, creating
1443 The `gettransaction` is function that return the pull transaction, creating
1443 one if necessary. We return the transaction to inform the calling code that
1444 one if necessary. We return the transaction to inform the calling code that
1444 a new transaction have been created (when applicable).
1445 a new transaction have been created (when applicable).
1445
1446
1446 Exists mostly to allow overriding for experimentation purpose"""
1447 Exists mostly to allow overriding for experimentation purpose"""
1447 if 'obsmarkers' in pullop.stepsdone:
1448 if 'obsmarkers' in pullop.stepsdone:
1448 return
1449 return
1449 pullop.stepsdone.add('obsmarkers')
1450 pullop.stepsdone.add('obsmarkers')
1450 tr = None
1451 tr = None
1451 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1452 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1452 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1453 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1453 remoteobs = pullop.remote.listkeys('obsolete')
1454 remoteobs = pullop.remote.listkeys('obsolete')
1454 if 'dump0' in remoteobs:
1455 if 'dump0' in remoteobs:
1455 tr = pullop.gettransaction()
1456 tr = pullop.gettransaction()
1456 markers = []
1457 markers = []
1457 for key in sorted(remoteobs, reverse=True):
1458 for key in sorted(remoteobs, reverse=True):
1458 if key.startswith('dump'):
1459 if key.startswith('dump'):
1459 data = base85.b85decode(remoteobs[key])
1460 data = base85.b85decode(remoteobs[key])
1460 version, newmarks = obsolete._readmarkers(data)
1461 version, newmarks = obsolete._readmarkers(data)
1461 markers += newmarks
1462 markers += newmarks
1462 if markers:
1463 if markers:
1463 pullop.repo.obsstore.add(tr, markers)
1464 pullop.repo.obsstore.add(tr, markers)
1464 pullop.repo.invalidatevolatilesets()
1465 pullop.repo.invalidatevolatilesets()
1465 return tr
1466 return tr
1466
1467
1467 def caps20to10(repo):
1468 def caps20to10(repo):
1468 """return a set with appropriate options to use bundle20 during getbundle"""
1469 """return a set with appropriate options to use bundle20 during getbundle"""
1469 caps = set(['HG20'])
1470 caps = set(['HG20'])
1470 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
1471 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
1471 caps.add('bundle2=' + urlreq.quote(capsblob))
1472 caps.add('bundle2=' + urlreq.quote(capsblob))
1472 return caps
1473 return caps
1473
1474
1474 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1475 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1475 getbundle2partsorder = []
1476 getbundle2partsorder = []
1476
1477
1477 # Mapping between step name and function
1478 # Mapping between step name and function
1478 #
1479 #
1479 # This exists to help extensions wrap steps if necessary
1480 # This exists to help extensions wrap steps if necessary
1480 getbundle2partsmapping = {}
1481 getbundle2partsmapping = {}
1481
1482
1482 def getbundle2partsgenerator(stepname, idx=None):
1483 def getbundle2partsgenerator(stepname, idx=None):
1483 """decorator for function generating bundle2 part for getbundle
1484 """decorator for function generating bundle2 part for getbundle
1484
1485
1485 The function is added to the step -> function mapping and appended to the
1486 The function is added to the step -> function mapping and appended to the
1486 list of steps. Beware that decorated functions will be added in order
1487 list of steps. Beware that decorated functions will be added in order
1487 (this may matter).
1488 (this may matter).
1488
1489
1489 You can only use this decorator for new steps, if you want to wrap a step
1490 You can only use this decorator for new steps, if you want to wrap a step
1490 from an extension, attack the getbundle2partsmapping dictionary directly."""
1491 from an extension, attack the getbundle2partsmapping dictionary directly."""
1491 def dec(func):
1492 def dec(func):
1492 assert stepname not in getbundle2partsmapping
1493 assert stepname not in getbundle2partsmapping
1493 getbundle2partsmapping[stepname] = func
1494 getbundle2partsmapping[stepname] = func
1494 if idx is None:
1495 if idx is None:
1495 getbundle2partsorder.append(stepname)
1496 getbundle2partsorder.append(stepname)
1496 else:
1497 else:
1497 getbundle2partsorder.insert(idx, stepname)
1498 getbundle2partsorder.insert(idx, stepname)
1498 return func
1499 return func
1499 return dec
1500 return dec
1500
1501
1501 def bundle2requested(bundlecaps):
1502 def bundle2requested(bundlecaps):
1502 if bundlecaps is not None:
1503 if bundlecaps is not None:
1503 return any(cap.startswith('HG2') for cap in bundlecaps)
1504 return any(cap.startswith('HG2') for cap in bundlecaps)
1504 return False
1505 return False
1505
1506
1506 def getbundle(repo, source, heads=None, common=None, bundlecaps=None,
1507 def getbundle(repo, source, heads=None, common=None, bundlecaps=None,
1507 **kwargs):
1508 **kwargs):
1508 """return a full bundle (with potentially multiple kind of parts)
1509 """return a full bundle (with potentially multiple kind of parts)
1509
1510
1510 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1511 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1511 passed. For now, the bundle can contain only changegroup, but this will
1512 passed. For now, the bundle can contain only changegroup, but this will
1512 changes when more part type will be available for bundle2.
1513 changes when more part type will be available for bundle2.
1513
1514
1514 This is different from changegroup.getchangegroup that only returns an HG10
1515 This is different from changegroup.getchangegroup that only returns an HG10
1515 changegroup bundle. They may eventually get reunited in the future when we
1516 changegroup bundle. They may eventually get reunited in the future when we
1516 have a clearer idea of the API we what to query different data.
1517 have a clearer idea of the API we what to query different data.
1517
1518
1518 The implementation is at a very early stage and will get massive rework
1519 The implementation is at a very early stage and will get massive rework
1519 when the API of bundle is refined.
1520 when the API of bundle is refined.
1520 """
1521 """
1521 usebundle2 = bundle2requested(bundlecaps)
1522 usebundle2 = bundle2requested(bundlecaps)
1522 # bundle10 case
1523 # bundle10 case
1523 if not usebundle2:
1524 if not usebundle2:
1524 if bundlecaps and not kwargs.get('cg', True):
1525 if bundlecaps and not kwargs.get('cg', True):
1525 raise ValueError(_('request for bundle10 must include changegroup'))
1526 raise ValueError(_('request for bundle10 must include changegroup'))
1526
1527
1527 if kwargs:
1528 if kwargs:
1528 raise ValueError(_('unsupported getbundle arguments: %s')
1529 raise ValueError(_('unsupported getbundle arguments: %s')
1529 % ', '.join(sorted(kwargs.keys())))
1530 % ', '.join(sorted(kwargs.keys())))
1530 return changegroup.getchangegroup(repo, source, heads=heads,
1531 return changegroup.getchangegroup(repo, source, heads=heads,
1531 common=common, bundlecaps=bundlecaps)
1532 common=common, bundlecaps=bundlecaps)
1532
1533
1533 # bundle20 case
1534 # bundle20 case
1534 b2caps = {}
1535 b2caps = {}
1535 for bcaps in bundlecaps:
1536 for bcaps in bundlecaps:
1536 if bcaps.startswith('bundle2='):
1537 if bcaps.startswith('bundle2='):
1537 blob = urlreq.unquote(bcaps[len('bundle2='):])
1538 blob = urlreq.unquote(bcaps[len('bundle2='):])
1538 b2caps.update(bundle2.decodecaps(blob))
1539 b2caps.update(bundle2.decodecaps(blob))
1539 bundler = bundle2.bundle20(repo.ui, b2caps)
1540 bundler = bundle2.bundle20(repo.ui, b2caps)
1540
1541
1541 kwargs['heads'] = heads
1542 kwargs['heads'] = heads
1542 kwargs['common'] = common
1543 kwargs['common'] = common
1543
1544
1544 for name in getbundle2partsorder:
1545 for name in getbundle2partsorder:
1545 func = getbundle2partsmapping[name]
1546 func = getbundle2partsmapping[name]
1546 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1547 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1547 **kwargs)
1548 **kwargs)
1548
1549
1549 return util.chunkbuffer(bundler.getchunks())
1550 return util.chunkbuffer(bundler.getchunks())
1550
1551
1551 @getbundle2partsgenerator('changegroup')
1552 @getbundle2partsgenerator('changegroup')
1552 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1553 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1553 b2caps=None, heads=None, common=None, **kwargs):
1554 b2caps=None, heads=None, common=None, **kwargs):
1554 """add a changegroup part to the requested bundle"""
1555 """add a changegroup part to the requested bundle"""
1555 cg = None
1556 cg = None
1556 if kwargs.get('cg', True):
1557 if kwargs.get('cg', True):
1557 # build changegroup bundle here.
1558 # build changegroup bundle here.
1558 version = '01'
1559 version = '01'
1559 cgversions = b2caps.get('changegroup')
1560 cgversions = b2caps.get('changegroup')
1560 if cgversions: # 3.1 and 3.2 ship with an empty value
1561 if cgversions: # 3.1 and 3.2 ship with an empty value
1561 cgversions = [v for v in cgversions
1562 cgversions = [v for v in cgversions
1562 if v in changegroup.supportedoutgoingversions(repo)]
1563 if v in changegroup.supportedoutgoingversions(repo)]
1563 if not cgversions:
1564 if not cgversions:
1564 raise ValueError(_('no common changegroup version'))
1565 raise ValueError(_('no common changegroup version'))
1565 version = max(cgversions)
1566 version = max(cgversions)
1566 outgoing = changegroup.computeoutgoing(repo, heads, common)
1567 outgoing = changegroup.computeoutgoing(repo, heads, common)
1567 cg = changegroup.getlocalchangegroupraw(repo, source, outgoing,
1568 cg = changegroup.getlocalchangegroupraw(repo, source, outgoing,
1568 bundlecaps=bundlecaps,
1569 bundlecaps=bundlecaps,
1569 version=version)
1570 version=version)
1570
1571
1571 if cg:
1572 if cg:
1572 part = bundler.newpart('changegroup', data=cg)
1573 part = bundler.newpart('changegroup', data=cg)
1573 if cgversions:
1574 if cgversions:
1574 part.addparam('version', version)
1575 part.addparam('version', version)
1575 part.addparam('nbchanges', str(len(outgoing.missing)), mandatory=False)
1576 part.addparam('nbchanges', str(len(outgoing.missing)), mandatory=False)
1576 if 'treemanifest' in repo.requirements:
1577 if 'treemanifest' in repo.requirements:
1577 part.addparam('treemanifest', '1')
1578 part.addparam('treemanifest', '1')
1578
1579
1579 @getbundle2partsgenerator('listkeys')
1580 @getbundle2partsgenerator('listkeys')
1580 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1581 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1581 b2caps=None, **kwargs):
1582 b2caps=None, **kwargs):
1582 """add parts containing listkeys namespaces to the requested bundle"""
1583 """add parts containing listkeys namespaces to the requested bundle"""
1583 listkeys = kwargs.get('listkeys', ())
1584 listkeys = kwargs.get('listkeys', ())
1584 for namespace in listkeys:
1585 for namespace in listkeys:
1585 part = bundler.newpart('listkeys')
1586 part = bundler.newpart('listkeys')
1586 part.addparam('namespace', namespace)
1587 part.addparam('namespace', namespace)
1587 keys = repo.listkeys(namespace).items()
1588 keys = repo.listkeys(namespace).items()
1588 part.data = pushkey.encodekeys(keys)
1589 part.data = pushkey.encodekeys(keys)
1589
1590
1590 @getbundle2partsgenerator('obsmarkers')
1591 @getbundle2partsgenerator('obsmarkers')
1591 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1592 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1592 b2caps=None, heads=None, **kwargs):
1593 b2caps=None, heads=None, **kwargs):
1593 """add an obsolescence markers part to the requested bundle"""
1594 """add an obsolescence markers part to the requested bundle"""
1594 if kwargs.get('obsmarkers', False):
1595 if kwargs.get('obsmarkers', False):
1595 if heads is None:
1596 if heads is None:
1596 heads = repo.heads()
1597 heads = repo.heads()
1597 subset = [c.node() for c in repo.set('::%ln', heads)]
1598 subset = [c.node() for c in repo.set('::%ln', heads)]
1598 markers = repo.obsstore.relevantmarkers(subset)
1599 markers = repo.obsstore.relevantmarkers(subset)
1599 markers = sorted(markers)
1600 markers = sorted(markers)
1600 buildobsmarkerspart(bundler, markers)
1601 buildobsmarkerspart(bundler, markers)
1601
1602
1602 @getbundle2partsgenerator('hgtagsfnodes')
1603 @getbundle2partsgenerator('hgtagsfnodes')
1603 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
1604 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
1604 b2caps=None, heads=None, common=None,
1605 b2caps=None, heads=None, common=None,
1605 **kwargs):
1606 **kwargs):
1606 """Transfer the .hgtags filenodes mapping.
1607 """Transfer the .hgtags filenodes mapping.
1607
1608
1608 Only values for heads in this bundle will be transferred.
1609 Only values for heads in this bundle will be transferred.
1609
1610
1610 The part data consists of pairs of 20 byte changeset node and .hgtags
1611 The part data consists of pairs of 20 byte changeset node and .hgtags
1611 filenodes raw values.
1612 filenodes raw values.
1612 """
1613 """
1613 # Don't send unless:
1614 # Don't send unless:
1614 # - changeset are being exchanged,
1615 # - changeset are being exchanged,
1615 # - the client supports it.
1616 # - the client supports it.
1616 if not (kwargs.get('cg', True) and 'hgtagsfnodes' in b2caps):
1617 if not (kwargs.get('cg', True) and 'hgtagsfnodes' in b2caps):
1617 return
1618 return
1618
1619
1619 outgoing = changegroup.computeoutgoing(repo, heads, common)
1620 outgoing = changegroup.computeoutgoing(repo, heads, common)
1620
1621
1621 if not outgoing.missingheads:
1622 if not outgoing.missingheads:
1622 return
1623 return
1623
1624
1624 cache = tags.hgtagsfnodescache(repo.unfiltered())
1625 cache = tags.hgtagsfnodescache(repo.unfiltered())
1625 chunks = []
1626 chunks = []
1626
1627
1627 # .hgtags fnodes are only relevant for head changesets. While we could
1628 # .hgtags fnodes are only relevant for head changesets. While we could
1628 # transfer values for all known nodes, there will likely be little to
1629 # transfer values for all known nodes, there will likely be little to
1629 # no benefit.
1630 # no benefit.
1630 #
1631 #
1631 # We don't bother using a generator to produce output data because
1632 # We don't bother using a generator to produce output data because
1632 # a) we only have 40 bytes per head and even esoteric numbers of heads
1633 # a) we only have 40 bytes per head and even esoteric numbers of heads
1633 # consume little memory (1M heads is 40MB) b) we don't want to send the
1634 # consume little memory (1M heads is 40MB) b) we don't want to send the
1634 # part if we don't have entries and knowing if we have entries requires
1635 # part if we don't have entries and knowing if we have entries requires
1635 # cache lookups.
1636 # cache lookups.
1636 for node in outgoing.missingheads:
1637 for node in outgoing.missingheads:
1637 # Don't compute missing, as this may slow down serving.
1638 # Don't compute missing, as this may slow down serving.
1638 fnode = cache.getfnode(node, computemissing=False)
1639 fnode = cache.getfnode(node, computemissing=False)
1639 if fnode is not None:
1640 if fnode is not None:
1640 chunks.extend([node, fnode])
1641 chunks.extend([node, fnode])
1641
1642
1642 if chunks:
1643 if chunks:
1643 bundler.newpart('hgtagsfnodes', data=''.join(chunks))
1644 bundler.newpart('hgtagsfnodes', data=''.join(chunks))
1644
1645
1645 def check_heads(repo, their_heads, context):
1646 def check_heads(repo, their_heads, context):
1646 """check if the heads of a repo have been modified
1647 """check if the heads of a repo have been modified
1647
1648
1648 Used by peer for unbundling.
1649 Used by peer for unbundling.
1649 """
1650 """
1650 heads = repo.heads()
1651 heads = repo.heads()
1651 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
1652 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
1652 if not (their_heads == ['force'] or their_heads == heads or
1653 if not (their_heads == ['force'] or their_heads == heads or
1653 their_heads == ['hashed', heads_hash]):
1654 their_heads == ['hashed', heads_hash]):
1654 # someone else committed/pushed/unbundled while we
1655 # someone else committed/pushed/unbundled while we
1655 # were transferring data
1656 # were transferring data
1656 raise error.PushRaced('repository changed while %s - '
1657 raise error.PushRaced('repository changed while %s - '
1657 'please try again' % context)
1658 'please try again' % context)
1658
1659
1659 def unbundle(repo, cg, heads, source, url):
1660 def unbundle(repo, cg, heads, source, url):
1660 """Apply a bundle to a repo.
1661 """Apply a bundle to a repo.
1661
1662
1662 this function makes sure the repo is locked during the application and have
1663 this function makes sure the repo is locked during the application and have
1663 mechanism to check that no push race occurred between the creation of the
1664 mechanism to check that no push race occurred between the creation of the
1664 bundle and its application.
1665 bundle and its application.
1665
1666
1666 If the push was raced as PushRaced exception is raised."""
1667 If the push was raced as PushRaced exception is raised."""
1667 r = 0
1668 r = 0
1668 # need a transaction when processing a bundle2 stream
1669 # need a transaction when processing a bundle2 stream
1669 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
1670 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
1670 lockandtr = [None, None, None]
1671 lockandtr = [None, None, None]
1671 recordout = None
1672 recordout = None
1672 # quick fix for output mismatch with bundle2 in 3.4
1673 # quick fix for output mismatch with bundle2 in 3.4
1673 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture',
1674 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture',
1674 False)
1675 False)
1675 if url.startswith('remote:http:') or url.startswith('remote:https:'):
1676 if url.startswith('remote:http:') or url.startswith('remote:https:'):
1676 captureoutput = True
1677 captureoutput = True
1677 try:
1678 try:
1678 check_heads(repo, heads, 'uploading changes')
1679 check_heads(repo, heads, 'uploading changes')
1679 # push can proceed
1680 # push can proceed
1680 if util.safehasattr(cg, 'params'):
1681 if util.safehasattr(cg, 'params'):
1681 r = None
1682 r = None
1682 try:
1683 try:
1683 def gettransaction():
1684 def gettransaction():
1684 if not lockandtr[2]:
1685 if not lockandtr[2]:
1685 lockandtr[0] = repo.wlock()
1686 lockandtr[0] = repo.wlock()
1686 lockandtr[1] = repo.lock()
1687 lockandtr[1] = repo.lock()
1687 lockandtr[2] = repo.transaction(source)
1688 lockandtr[2] = repo.transaction(source)
1688 lockandtr[2].hookargs['source'] = source
1689 lockandtr[2].hookargs['source'] = source
1689 lockandtr[2].hookargs['url'] = url
1690 lockandtr[2].hookargs['url'] = url
1690 lockandtr[2].hookargs['bundle2'] = '1'
1691 lockandtr[2].hookargs['bundle2'] = '1'
1691 return lockandtr[2]
1692 return lockandtr[2]
1692
1693
1693 # Do greedy locking by default until we're satisfied with lazy
1694 # Do greedy locking by default until we're satisfied with lazy
1694 # locking.
1695 # locking.
1695 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
1696 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
1696 gettransaction()
1697 gettransaction()
1697
1698
1698 op = bundle2.bundleoperation(repo, gettransaction,
1699 op = bundle2.bundleoperation(repo, gettransaction,
1699 captureoutput=captureoutput)
1700 captureoutput=captureoutput)
1700 try:
1701 try:
1701 op = bundle2.processbundle(repo, cg, op=op)
1702 op = bundle2.processbundle(repo, cg, op=op)
1702 finally:
1703 finally:
1703 r = op.reply
1704 r = op.reply
1704 if captureoutput and r is not None:
1705 if captureoutput and r is not None:
1705 repo.ui.pushbuffer(error=True, subproc=True)
1706 repo.ui.pushbuffer(error=True, subproc=True)
1706 def recordout(output):
1707 def recordout(output):
1707 r.newpart('output', data=output, mandatory=False)
1708 r.newpart('output', data=output, mandatory=False)
1708 if lockandtr[2] is not None:
1709 if lockandtr[2] is not None:
1709 lockandtr[2].close()
1710 lockandtr[2].close()
1710 except BaseException as exc:
1711 except BaseException as exc:
1711 exc.duringunbundle2 = True
1712 exc.duringunbundle2 = True
1712 if captureoutput and r is not None:
1713 if captureoutput and r is not None:
1713 parts = exc._bundle2salvagedoutput = r.salvageoutput()
1714 parts = exc._bundle2salvagedoutput = r.salvageoutput()
1714 def recordout(output):
1715 def recordout(output):
1715 part = bundle2.bundlepart('output', data=output,
1716 part = bundle2.bundlepart('output', data=output,
1716 mandatory=False)
1717 mandatory=False)
1717 parts.append(part)
1718 parts.append(part)
1718 raise
1719 raise
1719 else:
1720 else:
1720 lockandtr[1] = repo.lock()
1721 lockandtr[1] = repo.lock()
1721 r = cg.apply(repo, source, url)
1722 r = cg.apply(repo, source, url)
1722 finally:
1723 finally:
1723 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
1724 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
1724 if recordout is not None:
1725 if recordout is not None:
1725 recordout(repo.ui.popbuffer())
1726 recordout(repo.ui.popbuffer())
1726 return r
1727 return r
1727
1728
1728 def _maybeapplyclonebundle(pullop):
1729 def _maybeapplyclonebundle(pullop):
1729 """Apply a clone bundle from a remote, if possible."""
1730 """Apply a clone bundle from a remote, if possible."""
1730
1731
1731 repo = pullop.repo
1732 repo = pullop.repo
1732 remote = pullop.remote
1733 remote = pullop.remote
1733
1734
1734 if not repo.ui.configbool('ui', 'clonebundles', True):
1735 if not repo.ui.configbool('ui', 'clonebundles', True):
1735 return
1736 return
1736
1737
1737 # Only run if local repo is empty.
1738 # Only run if local repo is empty.
1738 if len(repo):
1739 if len(repo):
1739 return
1740 return
1740
1741
1741 if pullop.heads:
1742 if pullop.heads:
1742 return
1743 return
1743
1744
1744 if not remote.capable('clonebundles'):
1745 if not remote.capable('clonebundles'):
1745 return
1746 return
1746
1747
1747 res = remote._call('clonebundles')
1748 res = remote._call('clonebundles')
1748
1749
1749 # If we call the wire protocol command, that's good enough to record the
1750 # If we call the wire protocol command, that's good enough to record the
1750 # attempt.
1751 # attempt.
1751 pullop.clonebundleattempted = True
1752 pullop.clonebundleattempted = True
1752
1753
1753 entries = parseclonebundlesmanifest(repo, res)
1754 entries = parseclonebundlesmanifest(repo, res)
1754 if not entries:
1755 if not entries:
1755 repo.ui.note(_('no clone bundles available on remote; '
1756 repo.ui.note(_('no clone bundles available on remote; '
1756 'falling back to regular clone\n'))
1757 'falling back to regular clone\n'))
1757 return
1758 return
1758
1759
1759 entries = filterclonebundleentries(repo, entries)
1760 entries = filterclonebundleentries(repo, entries)
1760 if not entries:
1761 if not entries:
1761 # There is a thundering herd concern here. However, if a server
1762 # There is a thundering herd concern here. However, if a server
1762 # operator doesn't advertise bundles appropriate for its clients,
1763 # operator doesn't advertise bundles appropriate for its clients,
1763 # they deserve what's coming. Furthermore, from a client's
1764 # they deserve what's coming. Furthermore, from a client's
1764 # perspective, no automatic fallback would mean not being able to
1765 # perspective, no automatic fallback would mean not being able to
1765 # clone!
1766 # clone!
1766 repo.ui.warn(_('no compatible clone bundles available on server; '
1767 repo.ui.warn(_('no compatible clone bundles available on server; '
1767 'falling back to regular clone\n'))
1768 'falling back to regular clone\n'))
1768 repo.ui.warn(_('(you may want to report this to the server '
1769 repo.ui.warn(_('(you may want to report this to the server '
1769 'operator)\n'))
1770 'operator)\n'))
1770 return
1771 return
1771
1772
1772 entries = sortclonebundleentries(repo.ui, entries)
1773 entries = sortclonebundleentries(repo.ui, entries)
1773
1774
1774 url = entries[0]['URL']
1775 url = entries[0]['URL']
1775 repo.ui.status(_('applying clone bundle from %s\n') % url)
1776 repo.ui.status(_('applying clone bundle from %s\n') % url)
1776 if trypullbundlefromurl(repo.ui, repo, url):
1777 if trypullbundlefromurl(repo.ui, repo, url):
1777 repo.ui.status(_('finished applying clone bundle\n'))
1778 repo.ui.status(_('finished applying clone bundle\n'))
1778 # Bundle failed.
1779 # Bundle failed.
1779 #
1780 #
1780 # We abort by default to avoid the thundering herd of
1781 # We abort by default to avoid the thundering herd of
1781 # clients flooding a server that was expecting expensive
1782 # clients flooding a server that was expecting expensive
1782 # clone load to be offloaded.
1783 # clone load to be offloaded.
1783 elif repo.ui.configbool('ui', 'clonebundlefallback', False):
1784 elif repo.ui.configbool('ui', 'clonebundlefallback', False):
1784 repo.ui.warn(_('falling back to normal clone\n'))
1785 repo.ui.warn(_('falling back to normal clone\n'))
1785 else:
1786 else:
1786 raise error.Abort(_('error applying bundle'),
1787 raise error.Abort(_('error applying bundle'),
1787 hint=_('if this error persists, consider contacting '
1788 hint=_('if this error persists, consider contacting '
1788 'the server operator or disable clone '
1789 'the server operator or disable clone '
1789 'bundles via '
1790 'bundles via '
1790 '"--config ui.clonebundles=false"'))
1791 '"--config ui.clonebundles=false"'))
1791
1792
1792 def parseclonebundlesmanifest(repo, s):
1793 def parseclonebundlesmanifest(repo, s):
1793 """Parses the raw text of a clone bundles manifest.
1794 """Parses the raw text of a clone bundles manifest.
1794
1795
1795 Returns a list of dicts. The dicts have a ``URL`` key corresponding
1796 Returns a list of dicts. The dicts have a ``URL`` key corresponding
1796 to the URL and other keys are the attributes for the entry.
1797 to the URL and other keys are the attributes for the entry.
1797 """
1798 """
1798 m = []
1799 m = []
1799 for line in s.splitlines():
1800 for line in s.splitlines():
1800 fields = line.split()
1801 fields = line.split()
1801 if not fields:
1802 if not fields:
1802 continue
1803 continue
1803 attrs = {'URL': fields[0]}
1804 attrs = {'URL': fields[0]}
1804 for rawattr in fields[1:]:
1805 for rawattr in fields[1:]:
1805 key, value = rawattr.split('=', 1)
1806 key, value = rawattr.split('=', 1)
1806 key = urlreq.unquote(key)
1807 key = urlreq.unquote(key)
1807 value = urlreq.unquote(value)
1808 value = urlreq.unquote(value)
1808 attrs[key] = value
1809 attrs[key] = value
1809
1810
1810 # Parse BUNDLESPEC into components. This makes client-side
1811 # Parse BUNDLESPEC into components. This makes client-side
1811 # preferences easier to specify since you can prefer a single
1812 # preferences easier to specify since you can prefer a single
1812 # component of the BUNDLESPEC.
1813 # component of the BUNDLESPEC.
1813 if key == 'BUNDLESPEC':
1814 if key == 'BUNDLESPEC':
1814 try:
1815 try:
1815 comp, version, params = parsebundlespec(repo, value,
1816 comp, version, params = parsebundlespec(repo, value,
1816 externalnames=True)
1817 externalnames=True)
1817 attrs['COMPRESSION'] = comp
1818 attrs['COMPRESSION'] = comp
1818 attrs['VERSION'] = version
1819 attrs['VERSION'] = version
1819 except error.InvalidBundleSpecification:
1820 except error.InvalidBundleSpecification:
1820 pass
1821 pass
1821 except error.UnsupportedBundleSpecification:
1822 except error.UnsupportedBundleSpecification:
1822 pass
1823 pass
1823
1824
1824 m.append(attrs)
1825 m.append(attrs)
1825
1826
1826 return m
1827 return m
1827
1828
1828 def filterclonebundleentries(repo, entries):
1829 def filterclonebundleentries(repo, entries):
1829 """Remove incompatible clone bundle manifest entries.
1830 """Remove incompatible clone bundle manifest entries.
1830
1831
1831 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
1832 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
1832 and returns a new list consisting of only the entries that this client
1833 and returns a new list consisting of only the entries that this client
1833 should be able to apply.
1834 should be able to apply.
1834
1835
1835 There is no guarantee we'll be able to apply all returned entries because
1836 There is no guarantee we'll be able to apply all returned entries because
1836 the metadata we use to filter on may be missing or wrong.
1837 the metadata we use to filter on may be missing or wrong.
1837 """
1838 """
1838 newentries = []
1839 newentries = []
1839 for entry in entries:
1840 for entry in entries:
1840 spec = entry.get('BUNDLESPEC')
1841 spec = entry.get('BUNDLESPEC')
1841 if spec:
1842 if spec:
1842 try:
1843 try:
1843 parsebundlespec(repo, spec, strict=True)
1844 parsebundlespec(repo, spec, strict=True)
1844 except error.InvalidBundleSpecification as e:
1845 except error.InvalidBundleSpecification as e:
1845 repo.ui.debug(str(e) + '\n')
1846 repo.ui.debug(str(e) + '\n')
1846 continue
1847 continue
1847 except error.UnsupportedBundleSpecification as e:
1848 except error.UnsupportedBundleSpecification as e:
1848 repo.ui.debug('filtering %s because unsupported bundle '
1849 repo.ui.debug('filtering %s because unsupported bundle '
1849 'spec: %s\n' % (entry['URL'], str(e)))
1850 'spec: %s\n' % (entry['URL'], str(e)))
1850 continue
1851 continue
1851
1852
1852 if 'REQUIRESNI' in entry and not sslutil.hassni:
1853 if 'REQUIRESNI' in entry and not sslutil.hassni:
1853 repo.ui.debug('filtering %s because SNI not supported\n' %
1854 repo.ui.debug('filtering %s because SNI not supported\n' %
1854 entry['URL'])
1855 entry['URL'])
1855 continue
1856 continue
1856
1857
1857 newentries.append(entry)
1858 newentries.append(entry)
1858
1859
1859 return newentries
1860 return newentries
1860
1861
1861 def sortclonebundleentries(ui, entries):
1862 def sortclonebundleentries(ui, entries):
1862 prefers = ui.configlist('ui', 'clonebundleprefers', default=[])
1863 prefers = ui.configlist('ui', 'clonebundleprefers', default=[])
1863 if not prefers:
1864 if not prefers:
1864 return list(entries)
1865 return list(entries)
1865
1866
1866 prefers = [p.split('=', 1) for p in prefers]
1867 prefers = [p.split('=', 1) for p in prefers]
1867
1868
1868 # Our sort function.
1869 # Our sort function.
1869 def compareentry(a, b):
1870 def compareentry(a, b):
1870 for prefkey, prefvalue in prefers:
1871 for prefkey, prefvalue in prefers:
1871 avalue = a.get(prefkey)
1872 avalue = a.get(prefkey)
1872 bvalue = b.get(prefkey)
1873 bvalue = b.get(prefkey)
1873
1874
1874 # Special case for b missing attribute and a matches exactly.
1875 # Special case for b missing attribute and a matches exactly.
1875 if avalue is not None and bvalue is None and avalue == prefvalue:
1876 if avalue is not None and bvalue is None and avalue == prefvalue:
1876 return -1
1877 return -1
1877
1878
1878 # Special case for a missing attribute and b matches exactly.
1879 # Special case for a missing attribute and b matches exactly.
1879 if bvalue is not None and avalue is None and bvalue == prefvalue:
1880 if bvalue is not None and avalue is None and bvalue == prefvalue:
1880 return 1
1881 return 1
1881
1882
1882 # We can't compare unless attribute present on both.
1883 # We can't compare unless attribute present on both.
1883 if avalue is None or bvalue is None:
1884 if avalue is None or bvalue is None:
1884 continue
1885 continue
1885
1886
1886 # Same values should fall back to next attribute.
1887 # Same values should fall back to next attribute.
1887 if avalue == bvalue:
1888 if avalue == bvalue:
1888 continue
1889 continue
1889
1890
1890 # Exact matches come first.
1891 # Exact matches come first.
1891 if avalue == prefvalue:
1892 if avalue == prefvalue:
1892 return -1
1893 return -1
1893 if bvalue == prefvalue:
1894 if bvalue == prefvalue:
1894 return 1
1895 return 1
1895
1896
1896 # Fall back to next attribute.
1897 # Fall back to next attribute.
1897 continue
1898 continue
1898
1899
1899 # If we got here we couldn't sort by attributes and prefers. Fall
1900 # If we got here we couldn't sort by attributes and prefers. Fall
1900 # back to index order.
1901 # back to index order.
1901 return 0
1902 return 0
1902
1903
1903 return sorted(entries, cmp=compareentry)
1904 return sorted(entries, cmp=compareentry)
1904
1905
1905 def trypullbundlefromurl(ui, repo, url):
1906 def trypullbundlefromurl(ui, repo, url):
1906 """Attempt to apply a bundle from a URL."""
1907 """Attempt to apply a bundle from a URL."""
1907 lock = repo.lock()
1908 lock = repo.lock()
1908 try:
1909 try:
1909 tr = repo.transaction('bundleurl')
1910 tr = repo.transaction('bundleurl')
1910 try:
1911 try:
1911 try:
1912 try:
1912 fh = urlmod.open(ui, url)
1913 fh = urlmod.open(ui, url)
1913 cg = readbundle(ui, fh, 'stream')
1914 cg = readbundle(ui, fh, 'stream')
1914
1915
1915 if isinstance(cg, bundle2.unbundle20):
1916 if isinstance(cg, bundle2.unbundle20):
1916 bundle2.processbundle(repo, cg, lambda: tr)
1917 bundle2.processbundle(repo, cg, lambda: tr)
1917 elif isinstance(cg, streamclone.streamcloneapplier):
1918 elif isinstance(cg, streamclone.streamcloneapplier):
1918 cg.apply(repo)
1919 cg.apply(repo)
1919 else:
1920 else:
1920 cg.apply(repo, 'clonebundles', url)
1921 cg.apply(repo, 'clonebundles', url)
1921 tr.close()
1922 tr.close()
1922 return True
1923 return True
1923 except urlerr.httperror as e:
1924 except urlerr.httperror as e:
1924 ui.warn(_('HTTP error fetching bundle: %s\n') % str(e))
1925 ui.warn(_('HTTP error fetching bundle: %s\n') % str(e))
1925 except urlerr.urlerror as e:
1926 except urlerr.urlerror as e:
1926 ui.warn(_('error fetching bundle: %s\n') % e.reason[1])
1927 ui.warn(_('error fetching bundle: %s\n') % e.reason[1])
1927
1928
1928 return False
1929 return False
1929 finally:
1930 finally:
1930 tr.release()
1931 tr.release()
1931 finally:
1932 finally:
1932 lock.release()
1933 lock.release()
@@ -1,1117 +1,1117 b''
1 Test exchange of common information using bundle2
1 Test exchange of common information using bundle2
2
2
3
3
4 $ getmainid() {
4 $ getmainid() {
5 > hg -R main log --template '{node}\n' --rev "$1"
5 > hg -R main log --template '{node}\n' --rev "$1"
6 > }
6 > }
7
7
8 enable obsolescence
8 enable obsolescence
9
9
10 $ cp $HGRCPATH $TESTTMP/hgrc.orig
10 $ cp $HGRCPATH $TESTTMP/hgrc.orig
11 $ cat > $TESTTMP/bundle2-pushkey-hook.sh << EOF
11 $ cat > $TESTTMP/bundle2-pushkey-hook.sh << EOF
12 > echo pushkey: lock state after \"\$HG_NAMESPACE\"
12 > echo pushkey: lock state after \"\$HG_NAMESPACE\"
13 > hg debuglock
13 > hg debuglock
14 > EOF
14 > EOF
15
15
16 $ cat >> $HGRCPATH << EOF
16 $ cat >> $HGRCPATH << EOF
17 > [experimental]
17 > [experimental]
18 > evolution=createmarkers,exchange
18 > evolution=createmarkers,exchange
19 > bundle2-exp=True
19 > bundle2-exp=True
20 > bundle2-output-capture=True
20 > bundle2-output-capture=True
21 > [ui]
21 > [ui]
22 > ssh=python "$TESTDIR/dummyssh"
22 > ssh=python "$TESTDIR/dummyssh"
23 > logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
23 > logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
24 > [web]
24 > [web]
25 > push_ssl = false
25 > push_ssl = false
26 > allow_push = *
26 > allow_push = *
27 > [phases]
27 > [phases]
28 > publish=False
28 > publish=False
29 > [hooks]
29 > [hooks]
30 > pretxnclose.tip = hg log -r tip -T "pre-close-tip:{node|short} {phase} {bookmarks}\n"
30 > pretxnclose.tip = hg log -r tip -T "pre-close-tip:{node|short} {phase} {bookmarks}\n"
31 > txnclose.tip = hg log -r tip -T "postclose-tip:{node|short} {phase} {bookmarks}\n"
31 > txnclose.tip = hg log -r tip -T "postclose-tip:{node|short} {phase} {bookmarks}\n"
32 > txnclose.env = sh -c "HG_LOCAL= printenv.py txnclose"
32 > txnclose.env = sh -c "HG_LOCAL= printenv.py txnclose"
33 > pushkey= sh "$TESTTMP/bundle2-pushkey-hook.sh"
33 > pushkey= sh "$TESTTMP/bundle2-pushkey-hook.sh"
34 > EOF
34 > EOF
35
35
36 The extension requires a repo (currently unused)
36 The extension requires a repo (currently unused)
37
37
38 $ hg init main
38 $ hg init main
39 $ cd main
39 $ cd main
40 $ touch a
40 $ touch a
41 $ hg add a
41 $ hg add a
42 $ hg commit -m 'a'
42 $ hg commit -m 'a'
43 pre-close-tip:3903775176ed draft
43 pre-close-tip:3903775176ed draft
44 postclose-tip:3903775176ed draft
44 postclose-tip:3903775176ed draft
45 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
45 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
46
46
47 $ hg unbundle $TESTDIR/bundles/rebase.hg
47 $ hg unbundle $TESTDIR/bundles/rebase.hg
48 adding changesets
48 adding changesets
49 adding manifests
49 adding manifests
50 adding file changes
50 adding file changes
51 added 8 changesets with 7 changes to 7 files (+3 heads)
51 added 8 changesets with 7 changes to 7 files (+3 heads)
52 pre-close-tip:02de42196ebe draft
52 pre-close-tip:02de42196ebe draft
53 postclose-tip:02de42196ebe draft
53 postclose-tip:02de42196ebe draft
54 txnclose hook: HG_NODE=cd010b8cd998f3981a5a8115f94f8da4ab506089 HG_NODE_LAST=02de42196ebee42ef284b6780a87cdc96e8eaab6 HG_PHASES_MOVED=1 HG_SOURCE=unbundle HG_TXNID=TXN:* HG_TXNNAME=unbundle (glob)
54 txnclose hook: HG_NODE=cd010b8cd998f3981a5a8115f94f8da4ab506089 HG_NODE_LAST=02de42196ebee42ef284b6780a87cdc96e8eaab6 HG_PHASES_MOVED=1 HG_SOURCE=unbundle HG_TXNID=TXN:* HG_TXNNAME=unbundle (glob)
55 bundle:*/tests/bundles/rebase.hg HG_URL=bundle:*/tests/bundles/rebase.hg (glob)
55 bundle:*/tests/bundles/rebase.hg HG_URL=bundle:*/tests/bundles/rebase.hg (glob)
56 (run 'hg heads' to see heads, 'hg merge' to merge)
56 (run 'hg heads' to see heads, 'hg merge' to merge)
57
57
58 $ cd ..
58 $ cd ..
59
59
60 Real world exchange
60 Real world exchange
61 =====================
61 =====================
62
62
63 Add more obsolescence information
63 Add more obsolescence information
64
64
65 $ hg -R main debugobsolete -d '0 0' 1111111111111111111111111111111111111111 `getmainid 9520eea781bc`
65 $ hg -R main debugobsolete -d '0 0' 1111111111111111111111111111111111111111 `getmainid 9520eea781bc`
66 pre-close-tip:02de42196ebe draft
66 pre-close-tip:02de42196ebe draft
67 postclose-tip:02de42196ebe draft
67 postclose-tip:02de42196ebe draft
68 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
68 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
69 $ hg -R main debugobsolete -d '0 0' 2222222222222222222222222222222222222222 `getmainid 24b6387c8c8c`
69 $ hg -R main debugobsolete -d '0 0' 2222222222222222222222222222222222222222 `getmainid 24b6387c8c8c`
70 pre-close-tip:02de42196ebe draft
70 pre-close-tip:02de42196ebe draft
71 postclose-tip:02de42196ebe draft
71 postclose-tip:02de42196ebe draft
72 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
72 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
73
73
74 clone --pull
74 clone --pull
75
75
76 $ hg -R main phase --public cd010b8cd998
76 $ hg -R main phase --public cd010b8cd998
77 pre-close-tip:02de42196ebe draft
77 pre-close-tip:02de42196ebe draft
78 postclose-tip:02de42196ebe draft
78 postclose-tip:02de42196ebe draft
79 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob)
79 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob)
80 $ hg clone main other --pull --rev 9520eea781bc
80 $ hg clone main other --pull --rev 9520eea781bc
81 adding changesets
81 adding changesets
82 adding manifests
82 adding manifests
83 adding file changes
83 adding file changes
84 added 2 changesets with 2 changes to 2 files
84 added 2 changesets with 2 changes to 2 files
85 1 new obsolescence markers
85 1 new obsolescence markers
86 pre-close-tip:9520eea781bc draft
86 pre-close-tip:9520eea781bc draft
87 postclose-tip:9520eea781bc draft
87 postclose-tip:9520eea781bc draft
88 txnclose hook: HG_NEW_OBSMARKERS=1 HG_NODE=cd010b8cd998f3981a5a8115f94f8da4ab506089 HG_NODE_LAST=9520eea781bcca16c1e15acc0ba14335a0e8e5ba HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
88 txnclose hook: HG_NEW_OBSMARKERS=1 HG_NODE=cd010b8cd998f3981a5a8115f94f8da4ab506089 HG_NODE_LAST=9520eea781bcca16c1e15acc0ba14335a0e8e5ba HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
89 file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
89 file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
90 updating to branch default
90 updating to branch default
91 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
91 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
92 $ hg -R other log -G
92 $ hg -R other log -G
93 @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
93 @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
94 |
94 |
95 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
95 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
96
96
97 $ hg -R other debugobsolete
97 $ hg -R other debugobsolete
98 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
98 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
99
99
100 pull
100 pull
101
101
102 $ hg -R main phase --public 9520eea781bc
102 $ hg -R main phase --public 9520eea781bc
103 pre-close-tip:02de42196ebe draft
103 pre-close-tip:02de42196ebe draft
104 postclose-tip:02de42196ebe draft
104 postclose-tip:02de42196ebe draft
105 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob)
105 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob)
106 $ hg -R other pull -r 24b6387c8c8c
106 $ hg -R other pull -r 24b6387c8c8c
107 pulling from $TESTTMP/main (glob)
107 pulling from $TESTTMP/main (glob)
108 searching for changes
108 searching for changes
109 adding changesets
109 adding changesets
110 adding manifests
110 adding manifests
111 adding file changes
111 adding file changes
112 added 1 changesets with 1 changes to 1 files (+1 heads)
112 added 1 changesets with 1 changes to 1 files (+1 heads)
113 1 new obsolescence markers
113 1 new obsolescence markers
114 pre-close-tip:24b6387c8c8c draft
114 pre-close-tip:24b6387c8c8c draft
115 postclose-tip:24b6387c8c8c draft
115 postclose-tip:24b6387c8c8c draft
116 txnclose hook: HG_NEW_OBSMARKERS=1 HG_NODE=24b6387c8c8cae37178880f3fa95ded3cb1cf785 HG_NODE_LAST=24b6387c8c8cae37178880f3fa95ded3cb1cf785 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
116 txnclose hook: HG_NEW_OBSMARKERS=1 HG_NODE=24b6387c8c8cae37178880f3fa95ded3cb1cf785 HG_NODE_LAST=24b6387c8c8cae37178880f3fa95ded3cb1cf785 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
117 file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
117 file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
118 (run 'hg heads' to see heads, 'hg merge' to merge)
118 (run 'hg heads' to see heads, 'hg merge' to merge)
119 $ hg -R other log -G
119 $ hg -R other log -G
120 o 2:24b6387c8c8c draft Nicolas Dumazet <nicdumz.commits@gmail.com> F
120 o 2:24b6387c8c8c draft Nicolas Dumazet <nicdumz.commits@gmail.com> F
121 |
121 |
122 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
122 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
123 |/
123 |/
124 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
124 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
125
125
126 $ hg -R other debugobsolete
126 $ hg -R other debugobsolete
127 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
127 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
128 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
128 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
129
129
130 pull empty (with phase movement)
130 pull empty (with phase movement)
131
131
132 $ hg -R main phase --public 24b6387c8c8c
132 $ hg -R main phase --public 24b6387c8c8c
133 pre-close-tip:02de42196ebe draft
133 pre-close-tip:02de42196ebe draft
134 postclose-tip:02de42196ebe draft
134 postclose-tip:02de42196ebe draft
135 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob)
135 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob)
136 $ hg -R other pull -r 24b6387c8c8c
136 $ hg -R other pull -r 24b6387c8c8c
137 pulling from $TESTTMP/main (glob)
137 pulling from $TESTTMP/main (glob)
138 no changes found
138 no changes found
139 pre-close-tip:24b6387c8c8c public
139 pre-close-tip:24b6387c8c8c public
140 postclose-tip:24b6387c8c8c public
140 postclose-tip:24b6387c8c8c public
141 txnclose hook: HG_NEW_OBSMARKERS=0 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
141 txnclose hook: HG_NEW_OBSMARKERS=0 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
142 file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
142 file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
143 $ hg -R other log -G
143 $ hg -R other log -G
144 o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
144 o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
145 |
145 |
146 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
146 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
147 |/
147 |/
148 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
148 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
149
149
150 $ hg -R other debugobsolete
150 $ hg -R other debugobsolete
151 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
151 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
152 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
152 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
153
153
154 pull empty
154 pull empty
155
155
156 $ hg -R other pull -r 24b6387c8c8c
156 $ hg -R other pull -r 24b6387c8c8c
157 pulling from $TESTTMP/main (glob)
157 pulling from $TESTTMP/main (glob)
158 no changes found
158 no changes found
159 pre-close-tip:24b6387c8c8c public
159 pre-close-tip:24b6387c8c8c public
160 postclose-tip:24b6387c8c8c public
160 postclose-tip:24b6387c8c8c public
161 txnclose hook: HG_NEW_OBSMARKERS=0 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
161 txnclose hook: HG_NEW_OBSMARKERS=0 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
162 file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
162 file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
163 $ hg -R other log -G
163 $ hg -R other log -G
164 o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
164 o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
165 |
165 |
166 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
166 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
167 |/
167 |/
168 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
168 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
169
169
170 $ hg -R other debugobsolete
170 $ hg -R other debugobsolete
171 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
171 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
172 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
172 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
173
173
174 add extra data to test their exchange during push
174 add extra data to test their exchange during push
175
175
176 $ hg -R main bookmark --rev eea13746799a book_eea1
176 $ hg -R main bookmark --rev eea13746799a book_eea1
177 pre-close-tip:02de42196ebe draft
177 pre-close-tip:02de42196ebe draft
178 postclose-tip:02de42196ebe draft
178 postclose-tip:02de42196ebe draft
179 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
179 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
180 $ hg -R main debugobsolete -d '0 0' 3333333333333333333333333333333333333333 `getmainid eea13746799a`
180 $ hg -R main debugobsolete -d '0 0' 3333333333333333333333333333333333333333 `getmainid eea13746799a`
181 pre-close-tip:02de42196ebe draft
181 pre-close-tip:02de42196ebe draft
182 postclose-tip:02de42196ebe draft
182 postclose-tip:02de42196ebe draft
183 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
183 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
184 $ hg -R main bookmark --rev 02de42196ebe book_02de
184 $ hg -R main bookmark --rev 02de42196ebe book_02de
185 pre-close-tip:02de42196ebe draft book_02de
185 pre-close-tip:02de42196ebe draft book_02de
186 postclose-tip:02de42196ebe draft book_02de
186 postclose-tip:02de42196ebe draft book_02de
187 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
187 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
188 $ hg -R main debugobsolete -d '0 0' 4444444444444444444444444444444444444444 `getmainid 02de42196ebe`
188 $ hg -R main debugobsolete -d '0 0' 4444444444444444444444444444444444444444 `getmainid 02de42196ebe`
189 pre-close-tip:02de42196ebe draft book_02de
189 pre-close-tip:02de42196ebe draft book_02de
190 postclose-tip:02de42196ebe draft book_02de
190 postclose-tip:02de42196ebe draft book_02de
191 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
191 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
192 $ hg -R main bookmark --rev 42ccdea3bb16 book_42cc
192 $ hg -R main bookmark --rev 42ccdea3bb16 book_42cc
193 pre-close-tip:02de42196ebe draft book_02de
193 pre-close-tip:02de42196ebe draft book_02de
194 postclose-tip:02de42196ebe draft book_02de
194 postclose-tip:02de42196ebe draft book_02de
195 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
195 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
196 $ hg -R main debugobsolete -d '0 0' 5555555555555555555555555555555555555555 `getmainid 42ccdea3bb16`
196 $ hg -R main debugobsolete -d '0 0' 5555555555555555555555555555555555555555 `getmainid 42ccdea3bb16`
197 pre-close-tip:02de42196ebe draft book_02de
197 pre-close-tip:02de42196ebe draft book_02de
198 postclose-tip:02de42196ebe draft book_02de
198 postclose-tip:02de42196ebe draft book_02de
199 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
199 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
200 $ hg -R main bookmark --rev 5fddd98957c8 book_5fdd
200 $ hg -R main bookmark --rev 5fddd98957c8 book_5fdd
201 pre-close-tip:02de42196ebe draft book_02de
201 pre-close-tip:02de42196ebe draft book_02de
202 postclose-tip:02de42196ebe draft book_02de
202 postclose-tip:02de42196ebe draft book_02de
203 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
203 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
204 $ hg -R main debugobsolete -d '0 0' 6666666666666666666666666666666666666666 `getmainid 5fddd98957c8`
204 $ hg -R main debugobsolete -d '0 0' 6666666666666666666666666666666666666666 `getmainid 5fddd98957c8`
205 pre-close-tip:02de42196ebe draft book_02de
205 pre-close-tip:02de42196ebe draft book_02de
206 postclose-tip:02de42196ebe draft book_02de
206 postclose-tip:02de42196ebe draft book_02de
207 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
207 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
208 $ hg -R main bookmark --rev 32af7686d403 book_32af
208 $ hg -R main bookmark --rev 32af7686d403 book_32af
209 pre-close-tip:02de42196ebe draft book_02de
209 pre-close-tip:02de42196ebe draft book_02de
210 postclose-tip:02de42196ebe draft book_02de
210 postclose-tip:02de42196ebe draft book_02de
211 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
211 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
212 $ hg -R main debugobsolete -d '0 0' 7777777777777777777777777777777777777777 `getmainid 32af7686d403`
212 $ hg -R main debugobsolete -d '0 0' 7777777777777777777777777777777777777777 `getmainid 32af7686d403`
213 pre-close-tip:02de42196ebe draft book_02de
213 pre-close-tip:02de42196ebe draft book_02de
214 postclose-tip:02de42196ebe draft book_02de
214 postclose-tip:02de42196ebe draft book_02de
215 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
215 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
216
216
217 $ hg -R other bookmark --rev cd010b8cd998 book_eea1
217 $ hg -R other bookmark --rev cd010b8cd998 book_eea1
218 pre-close-tip:24b6387c8c8c public
218 pre-close-tip:24b6387c8c8c public
219 postclose-tip:24b6387c8c8c public
219 postclose-tip:24b6387c8c8c public
220 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
220 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
221 $ hg -R other bookmark --rev cd010b8cd998 book_02de
221 $ hg -R other bookmark --rev cd010b8cd998 book_02de
222 pre-close-tip:24b6387c8c8c public
222 pre-close-tip:24b6387c8c8c public
223 postclose-tip:24b6387c8c8c public
223 postclose-tip:24b6387c8c8c public
224 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
224 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
225 $ hg -R other bookmark --rev cd010b8cd998 book_42cc
225 $ hg -R other bookmark --rev cd010b8cd998 book_42cc
226 pre-close-tip:24b6387c8c8c public
226 pre-close-tip:24b6387c8c8c public
227 postclose-tip:24b6387c8c8c public
227 postclose-tip:24b6387c8c8c public
228 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
228 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
229 $ hg -R other bookmark --rev cd010b8cd998 book_5fdd
229 $ hg -R other bookmark --rev cd010b8cd998 book_5fdd
230 pre-close-tip:24b6387c8c8c public
230 pre-close-tip:24b6387c8c8c public
231 postclose-tip:24b6387c8c8c public
231 postclose-tip:24b6387c8c8c public
232 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
232 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
233 $ hg -R other bookmark --rev cd010b8cd998 book_32af
233 $ hg -R other bookmark --rev cd010b8cd998 book_32af
234 pre-close-tip:24b6387c8c8c public
234 pre-close-tip:24b6387c8c8c public
235 postclose-tip:24b6387c8c8c public
235 postclose-tip:24b6387c8c8c public
236 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
236 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
237
237
238 $ hg -R main phase --public eea13746799a
238 $ hg -R main phase --public eea13746799a
239 pre-close-tip:02de42196ebe draft book_02de
239 pre-close-tip:02de42196ebe draft book_02de
240 postclose-tip:02de42196ebe draft book_02de
240 postclose-tip:02de42196ebe draft book_02de
241 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob)
241 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob)
242
242
243 push
243 push
244 $ hg -R main push other --rev eea13746799a --bookmark book_eea1
244 $ hg -R main push other --rev eea13746799a --bookmark book_eea1
245 pushing to other
245 pushing to other
246 searching for changes
246 searching for changes
247 remote: adding changesets
247 remote: adding changesets
248 remote: adding manifests
248 remote: adding manifests
249 remote: adding file changes
249 remote: adding file changes
250 remote: added 1 changesets with 0 changes to 0 files (-1 heads)
250 remote: added 1 changesets with 0 changes to 0 files (-1 heads)
251 remote: 1 new obsolescence markers
251 remote: 1 new obsolescence markers
252 remote: pre-close-tip:eea13746799a public book_eea1
252 remote: pre-close-tip:eea13746799a public book_eea1
253 remote: pushkey: lock state after "phases"
253 remote: pushkey: lock state after "phases"
254 remote: lock: free
254 remote: lock: free
255 remote: wlock: free
255 remote: wlock: free
256 remote: pushkey: lock state after "bookmarks"
256 remote: pushkey: lock state after "bookmarks"
257 remote: lock: free
257 remote: lock: free
258 remote: wlock: free
258 remote: wlock: free
259 remote: postclose-tip:eea13746799a public book_eea1
259 remote: postclose-tip:eea13746799a public book_eea1
260 remote: txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_NEW_OBSMARKERS=1 HG_NODE=eea13746799a9e0bfd88f29d3c2e9dc9389f524f HG_NODE_LAST=eea13746799a9e0bfd88f29d3c2e9dc9389f524f HG_PHASES_MOVED=1 HG_SOURCE=push HG_TXNID=TXN:* HG_TXNNAME=push HG_URL=push (glob)
260 remote: txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_NEW_OBSMARKERS=1 HG_NODE=eea13746799a9e0bfd88f29d3c2e9dc9389f524f HG_NODE_LAST=eea13746799a9e0bfd88f29d3c2e9dc9389f524f HG_PHASES_MOVED=1 HG_SOURCE=push HG_TXNID=TXN:* HG_TXNNAME=push HG_URL=file:$TESTTMP/other (glob)
261 updating bookmark book_eea1
261 updating bookmark book_eea1
262 pre-close-tip:02de42196ebe draft book_02de
262 pre-close-tip:02de42196ebe draft book_02de
263 postclose-tip:02de42196ebe draft book_02de
263 postclose-tip:02de42196ebe draft book_02de
264 txnclose hook: HG_SOURCE=push-response HG_TXNID=TXN:* HG_TXNNAME=push-response (glob)
264 txnclose hook: HG_SOURCE=push-response HG_TXNID=TXN:* HG_TXNNAME=push-response (glob)
265 file:/*/$TESTTMP/other HG_URL=file:$TESTTMP/other (glob)
265 file:/*/$TESTTMP/other HG_URL=file:$TESTTMP/other (glob)
266 $ hg -R other log -G
266 $ hg -R other log -G
267 o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
267 o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
268 |\
268 |\
269 | o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
269 | o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
270 | |
270 | |
271 @ | 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
271 @ | 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
272 |/
272 |/
273 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de book_32af book_42cc book_5fdd A
273 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de book_32af book_42cc book_5fdd A
274
274
275 $ hg -R other debugobsolete
275 $ hg -R other debugobsolete
276 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
276 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
277 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
277 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
278 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
278 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
279
279
280 pull over ssh
280 pull over ssh
281
281
282 $ hg -R other pull ssh://user@dummy/main -r 02de42196ebe --bookmark book_02de
282 $ hg -R other pull ssh://user@dummy/main -r 02de42196ebe --bookmark book_02de
283 pulling from ssh://user@dummy/main
283 pulling from ssh://user@dummy/main
284 searching for changes
284 searching for changes
285 adding changesets
285 adding changesets
286 adding manifests
286 adding manifests
287 adding file changes
287 adding file changes
288 added 1 changesets with 1 changes to 1 files (+1 heads)
288 added 1 changesets with 1 changes to 1 files (+1 heads)
289 1 new obsolescence markers
289 1 new obsolescence markers
290 updating bookmark book_02de
290 updating bookmark book_02de
291 pre-close-tip:02de42196ebe draft book_02de
291 pre-close-tip:02de42196ebe draft book_02de
292 postclose-tip:02de42196ebe draft book_02de
292 postclose-tip:02de42196ebe draft book_02de
293 txnclose hook: HG_BOOKMARK_MOVED=1 HG_NEW_OBSMARKERS=1 HG_NODE=02de42196ebee42ef284b6780a87cdc96e8eaab6 HG_NODE_LAST=02de42196ebee42ef284b6780a87cdc96e8eaab6 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
293 txnclose hook: HG_BOOKMARK_MOVED=1 HG_NEW_OBSMARKERS=1 HG_NODE=02de42196ebee42ef284b6780a87cdc96e8eaab6 HG_NODE_LAST=02de42196ebee42ef284b6780a87cdc96e8eaab6 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
294 ssh://user@dummy/main HG_URL=ssh://user@dummy/main
294 ssh://user@dummy/main HG_URL=ssh://user@dummy/main
295 (run 'hg heads' to see heads, 'hg merge' to merge)
295 (run 'hg heads' to see heads, 'hg merge' to merge)
296 $ hg -R other debugobsolete
296 $ hg -R other debugobsolete
297 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
297 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
298 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
298 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
299 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
299 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
300 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
300 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
301
301
302 pull over http
302 pull over http
303
303
304 $ hg serve -R main -p $HGPORT -d --pid-file=main.pid -E main-error.log
304 $ hg serve -R main -p $HGPORT -d --pid-file=main.pid -E main-error.log
305 $ cat main.pid >> $DAEMON_PIDS
305 $ cat main.pid >> $DAEMON_PIDS
306
306
307 $ hg -R other pull http://localhost:$HGPORT/ -r 42ccdea3bb16 --bookmark book_42cc
307 $ hg -R other pull http://localhost:$HGPORT/ -r 42ccdea3bb16 --bookmark book_42cc
308 pulling from http://localhost:$HGPORT/
308 pulling from http://localhost:$HGPORT/
309 searching for changes
309 searching for changes
310 adding changesets
310 adding changesets
311 adding manifests
311 adding manifests
312 adding file changes
312 adding file changes
313 added 1 changesets with 1 changes to 1 files (+1 heads)
313 added 1 changesets with 1 changes to 1 files (+1 heads)
314 1 new obsolescence markers
314 1 new obsolescence markers
315 updating bookmark book_42cc
315 updating bookmark book_42cc
316 pre-close-tip:42ccdea3bb16 draft book_42cc
316 pre-close-tip:42ccdea3bb16 draft book_42cc
317 postclose-tip:42ccdea3bb16 draft book_42cc
317 postclose-tip:42ccdea3bb16 draft book_42cc
318 txnclose hook: HG_BOOKMARK_MOVED=1 HG_NEW_OBSMARKERS=1 HG_NODE=42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 HG_NODE_LAST=42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
318 txnclose hook: HG_BOOKMARK_MOVED=1 HG_NEW_OBSMARKERS=1 HG_NODE=42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 HG_NODE_LAST=42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
319 http://localhost:$HGPORT/ HG_URL=http://localhost:$HGPORT/
319 http://localhost:$HGPORT/ HG_URL=http://localhost:$HGPORT/
320 (run 'hg heads .' to see heads, 'hg merge' to merge)
320 (run 'hg heads .' to see heads, 'hg merge' to merge)
321 $ cat main-error.log
321 $ cat main-error.log
322 $ hg -R other debugobsolete
322 $ hg -R other debugobsolete
323 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
323 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
324 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
324 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
325 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
325 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
326 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
326 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
327 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
327 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
328
328
329 push over ssh
329 push over ssh
330
330
331 $ hg -R main push ssh://user@dummy/other -r 5fddd98957c8 --bookmark book_5fdd
331 $ hg -R main push ssh://user@dummy/other -r 5fddd98957c8 --bookmark book_5fdd
332 pushing to ssh://user@dummy/other
332 pushing to ssh://user@dummy/other
333 searching for changes
333 searching for changes
334 remote: adding changesets
334 remote: adding changesets
335 remote: adding manifests
335 remote: adding manifests
336 remote: adding file changes
336 remote: adding file changes
337 remote: added 1 changesets with 1 changes to 1 files
337 remote: added 1 changesets with 1 changes to 1 files
338 remote: 1 new obsolescence markers
338 remote: 1 new obsolescence markers
339 remote: pre-close-tip:5fddd98957c8 draft book_5fdd
339 remote: pre-close-tip:5fddd98957c8 draft book_5fdd
340 remote: pushkey: lock state after "bookmarks"
340 remote: pushkey: lock state after "bookmarks"
341 remote: lock: free
341 remote: lock: free
342 remote: wlock: free
342 remote: wlock: free
343 remote: postclose-tip:5fddd98957c8 draft book_5fdd
343 remote: postclose-tip:5fddd98957c8 draft book_5fdd
344 remote: txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_NEW_OBSMARKERS=1 HG_NODE=5fddd98957c8a54a4d436dfe1da9d87f21a1b97b HG_NODE_LAST=5fddd98957c8a54a4d436dfe1da9d87f21a1b97b HG_SOURCE=serve HG_TXNID=TXN:* HG_TXNNAME=serve HG_URL=remote:ssh:127.0.0.1 (glob)
344 remote: txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_NEW_OBSMARKERS=1 HG_NODE=5fddd98957c8a54a4d436dfe1da9d87f21a1b97b HG_NODE_LAST=5fddd98957c8a54a4d436dfe1da9d87f21a1b97b HG_SOURCE=serve HG_TXNID=TXN:* HG_TXNNAME=serve HG_URL=remote:ssh:127.0.0.1 (glob)
345 updating bookmark book_5fdd
345 updating bookmark book_5fdd
346 pre-close-tip:02de42196ebe draft book_02de
346 pre-close-tip:02de42196ebe draft book_02de
347 postclose-tip:02de42196ebe draft book_02de
347 postclose-tip:02de42196ebe draft book_02de
348 txnclose hook: HG_SOURCE=push-response HG_TXNID=TXN:* HG_TXNNAME=push-response (glob)
348 txnclose hook: HG_SOURCE=push-response HG_TXNID=TXN:* HG_TXNNAME=push-response (glob)
349 ssh://user@dummy/other HG_URL=ssh://user@dummy/other
349 ssh://user@dummy/other HG_URL=ssh://user@dummy/other
350 $ hg -R other log -G
350 $ hg -R other log -G
351 o 6:5fddd98957c8 draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_5fdd C
351 o 6:5fddd98957c8 draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_5fdd C
352 |
352 |
353 o 5:42ccdea3bb16 draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_42cc B
353 o 5:42ccdea3bb16 draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_42cc B
354 |
354 |
355 | o 4:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de H
355 | o 4:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de H
356 | |
356 | |
357 | | o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
357 | | o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
358 | |/|
358 | |/|
359 | o | 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
359 | o | 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
360 |/ /
360 |/ /
361 | @ 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
361 | @ 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
362 |/
362 |/
363 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_32af A
363 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_32af A
364
364
365 $ hg -R other debugobsolete
365 $ hg -R other debugobsolete
366 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
366 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
367 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
367 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
368 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
368 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
369 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
369 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
370 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
370 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
371 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
371 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
372
372
373 push over http
373 push over http
374
374
375 $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
375 $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
376 $ cat other.pid >> $DAEMON_PIDS
376 $ cat other.pid >> $DAEMON_PIDS
377
377
378 $ hg -R main phase --public 32af7686d403
378 $ hg -R main phase --public 32af7686d403
379 pre-close-tip:02de42196ebe draft book_02de
379 pre-close-tip:02de42196ebe draft book_02de
380 postclose-tip:02de42196ebe draft book_02de
380 postclose-tip:02de42196ebe draft book_02de
381 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob)
381 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob)
382 $ hg -R main push http://localhost:$HGPORT2/ -r 32af7686d403 --bookmark book_32af
382 $ hg -R main push http://localhost:$HGPORT2/ -r 32af7686d403 --bookmark book_32af
383 pushing to http://localhost:$HGPORT2/
383 pushing to http://localhost:$HGPORT2/
384 searching for changes
384 searching for changes
385 remote: adding changesets
385 remote: adding changesets
386 remote: adding manifests
386 remote: adding manifests
387 remote: adding file changes
387 remote: adding file changes
388 remote: added 1 changesets with 1 changes to 1 files
388 remote: added 1 changesets with 1 changes to 1 files
389 remote: 1 new obsolescence markers
389 remote: 1 new obsolescence markers
390 remote: pre-close-tip:32af7686d403 public book_32af
390 remote: pre-close-tip:32af7686d403 public book_32af
391 remote: pushkey: lock state after "phases"
391 remote: pushkey: lock state after "phases"
392 remote: lock: free
392 remote: lock: free
393 remote: wlock: free
393 remote: wlock: free
394 remote: pushkey: lock state after "bookmarks"
394 remote: pushkey: lock state after "bookmarks"
395 remote: lock: free
395 remote: lock: free
396 remote: wlock: free
396 remote: wlock: free
397 remote: postclose-tip:32af7686d403 public book_32af
397 remote: postclose-tip:32af7686d403 public book_32af
398 remote: txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_NEW_OBSMARKERS=1 HG_NODE=32af7686d403cf45b5d95f2d70cebea587ac806a HG_NODE_LAST=32af7686d403cf45b5d95f2d70cebea587ac806a HG_PHASES_MOVED=1 HG_SOURCE=serve HG_TXNID=TXN:* HG_TXNNAME=serve HG_URL=remote:http:127.0.0.1: (glob)
398 remote: txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_NEW_OBSMARKERS=1 HG_NODE=32af7686d403cf45b5d95f2d70cebea587ac806a HG_NODE_LAST=32af7686d403cf45b5d95f2d70cebea587ac806a HG_PHASES_MOVED=1 HG_SOURCE=serve HG_TXNID=TXN:* HG_TXNNAME=serve HG_URL=remote:http:127.0.0.1: (glob)
399 updating bookmark book_32af
399 updating bookmark book_32af
400 pre-close-tip:02de42196ebe draft book_02de
400 pre-close-tip:02de42196ebe draft book_02de
401 postclose-tip:02de42196ebe draft book_02de
401 postclose-tip:02de42196ebe draft book_02de
402 txnclose hook: HG_SOURCE=push-response HG_TXNID=TXN:* HG_TXNNAME=push-response (glob)
402 txnclose hook: HG_SOURCE=push-response HG_TXNID=TXN:* HG_TXNNAME=push-response (glob)
403 http://localhost:$HGPORT2/ HG_URL=http://localhost:$HGPORT2/
403 http://localhost:$HGPORT2/ HG_URL=http://localhost:$HGPORT2/
404 $ cat other-error.log
404 $ cat other-error.log
405
405
406 Check final content.
406 Check final content.
407
407
408 $ hg -R other log -G
408 $ hg -R other log -G
409 o 7:32af7686d403 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_32af D
409 o 7:32af7686d403 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_32af D
410 |
410 |
411 o 6:5fddd98957c8 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_5fdd C
411 o 6:5fddd98957c8 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_5fdd C
412 |
412 |
413 o 5:42ccdea3bb16 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_42cc B
413 o 5:42ccdea3bb16 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_42cc B
414 |
414 |
415 | o 4:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de H
415 | o 4:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de H
416 | |
416 | |
417 | | o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
417 | | o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
418 | |/|
418 | |/|
419 | o | 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
419 | o | 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
420 |/ /
420 |/ /
421 | @ 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
421 | @ 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
422 |/
422 |/
423 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
423 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
424
424
425 $ hg -R other debugobsolete
425 $ hg -R other debugobsolete
426 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
426 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
427 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
427 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
428 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
428 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
429 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
429 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
430 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
430 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
431 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
431 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
432 7777777777777777777777777777777777777777 32af7686d403cf45b5d95f2d70cebea587ac806a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
432 7777777777777777777777777777777777777777 32af7686d403cf45b5d95f2d70cebea587ac806a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
433
433
434 (check that no 'pending' files remain)
434 (check that no 'pending' files remain)
435
435
436 $ ls -1 other/.hg/bookmarks*
436 $ ls -1 other/.hg/bookmarks*
437 other/.hg/bookmarks
437 other/.hg/bookmarks
438 $ ls -1 other/.hg/store/phaseroots*
438 $ ls -1 other/.hg/store/phaseroots*
439 other/.hg/store/phaseroots
439 other/.hg/store/phaseroots
440 $ ls -1 other/.hg/store/00changelog.i*
440 $ ls -1 other/.hg/store/00changelog.i*
441 other/.hg/store/00changelog.i
441 other/.hg/store/00changelog.i
442
442
443 Error Handling
443 Error Handling
444 ==============
444 ==============
445
445
446 Check that errors are properly returned to the client during push.
446 Check that errors are properly returned to the client during push.
447
447
448 Setting up
448 Setting up
449
449
450 $ cat > failpush.py << EOF
450 $ cat > failpush.py << EOF
451 > """A small extension that makes push fails when using bundle2
451 > """A small extension that makes push fails when using bundle2
452 >
452 >
453 > used to test error handling in bundle2
453 > used to test error handling in bundle2
454 > """
454 > """
455 >
455 >
456 > from mercurial import error
456 > from mercurial import error
457 > from mercurial import bundle2
457 > from mercurial import bundle2
458 > from mercurial import exchange
458 > from mercurial import exchange
459 > from mercurial import extensions
459 > from mercurial import extensions
460 >
460 >
461 > def _pushbundle2failpart(pushop, bundler):
461 > def _pushbundle2failpart(pushop, bundler):
462 > reason = pushop.ui.config('failpush', 'reason', None)
462 > reason = pushop.ui.config('failpush', 'reason', None)
463 > part = None
463 > part = None
464 > if reason == 'abort':
464 > if reason == 'abort':
465 > bundler.newpart('test:abort')
465 > bundler.newpart('test:abort')
466 > if reason == 'unknown':
466 > if reason == 'unknown':
467 > bundler.newpart('test:unknown')
467 > bundler.newpart('test:unknown')
468 > if reason == 'race':
468 > if reason == 'race':
469 > # 20 Bytes of crap
469 > # 20 Bytes of crap
470 > bundler.newpart('check:heads', data='01234567890123456789')
470 > bundler.newpart('check:heads', data='01234567890123456789')
471 >
471 >
472 > @bundle2.parthandler("test:abort")
472 > @bundle2.parthandler("test:abort")
473 > def handleabort(op, part):
473 > def handleabort(op, part):
474 > raise error.Abort('Abandon ship!', hint="don't panic")
474 > raise error.Abort('Abandon ship!', hint="don't panic")
475 >
475 >
476 > def uisetup(ui):
476 > def uisetup(ui):
477 > exchange.b2partsgenmapping['failpart'] = _pushbundle2failpart
477 > exchange.b2partsgenmapping['failpart'] = _pushbundle2failpart
478 > exchange.b2partsgenorder.insert(0, 'failpart')
478 > exchange.b2partsgenorder.insert(0, 'failpart')
479 >
479 >
480 > EOF
480 > EOF
481
481
482 $ cd main
482 $ cd main
483 $ hg up tip
483 $ hg up tip
484 3 files updated, 0 files merged, 1 files removed, 0 files unresolved
484 3 files updated, 0 files merged, 1 files removed, 0 files unresolved
485 $ echo 'I' > I
485 $ echo 'I' > I
486 $ hg add I
486 $ hg add I
487 $ hg ci -m 'I'
487 $ hg ci -m 'I'
488 pre-close-tip:e7ec4e813ba6 draft
488 pre-close-tip:e7ec4e813ba6 draft
489 postclose-tip:e7ec4e813ba6 draft
489 postclose-tip:e7ec4e813ba6 draft
490 txnclose hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
490 txnclose hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
491 $ hg id
491 $ hg id
492 e7ec4e813ba6 tip
492 e7ec4e813ba6 tip
493 $ cd ..
493 $ cd ..
494
494
495 $ cat << EOF >> $HGRCPATH
495 $ cat << EOF >> $HGRCPATH
496 > [extensions]
496 > [extensions]
497 > failpush=$TESTTMP/failpush.py
497 > failpush=$TESTTMP/failpush.py
498 > EOF
498 > EOF
499
499
500 $ killdaemons.py
500 $ killdaemons.py
501 $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
501 $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
502 $ cat other.pid >> $DAEMON_PIDS
502 $ cat other.pid >> $DAEMON_PIDS
503
503
504 Doing the actual push: Abort error
504 Doing the actual push: Abort error
505
505
506 $ cat << EOF >> $HGRCPATH
506 $ cat << EOF >> $HGRCPATH
507 > [failpush]
507 > [failpush]
508 > reason = abort
508 > reason = abort
509 > EOF
509 > EOF
510
510
511 $ hg -R main push other -r e7ec4e813ba6
511 $ hg -R main push other -r e7ec4e813ba6
512 pushing to other
512 pushing to other
513 searching for changes
513 searching for changes
514 abort: Abandon ship!
514 abort: Abandon ship!
515 (don't panic)
515 (don't panic)
516 [255]
516 [255]
517
517
518 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
518 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
519 pushing to ssh://user@dummy/other
519 pushing to ssh://user@dummy/other
520 searching for changes
520 searching for changes
521 remote: Abandon ship!
521 remote: Abandon ship!
522 abort: push failed on remote
522 abort: push failed on remote
523 (don't panic)
523 (don't panic)
524 [255]
524 [255]
525
525
526 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
526 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
527 pushing to http://localhost:$HGPORT2/
527 pushing to http://localhost:$HGPORT2/
528 searching for changes
528 searching for changes
529 remote: Abandon ship!
529 remote: Abandon ship!
530 abort: push failed on remote
530 abort: push failed on remote
531 (don't panic)
531 (don't panic)
532 [255]
532 [255]
533
533
534
534
535 Doing the actual push: unknown mandatory parts
535 Doing the actual push: unknown mandatory parts
536
536
537 $ cat << EOF >> $HGRCPATH
537 $ cat << EOF >> $HGRCPATH
538 > [failpush]
538 > [failpush]
539 > reason = unknown
539 > reason = unknown
540 > EOF
540 > EOF
541
541
542 $ hg -R main push other -r e7ec4e813ba6
542 $ hg -R main push other -r e7ec4e813ba6
543 pushing to other
543 pushing to other
544 searching for changes
544 searching for changes
545 abort: missing support for test:unknown
545 abort: missing support for test:unknown
546 [255]
546 [255]
547
547
548 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
548 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
549 pushing to ssh://user@dummy/other
549 pushing to ssh://user@dummy/other
550 searching for changes
550 searching for changes
551 abort: missing support for test:unknown
551 abort: missing support for test:unknown
552 [255]
552 [255]
553
553
554 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
554 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
555 pushing to http://localhost:$HGPORT2/
555 pushing to http://localhost:$HGPORT2/
556 searching for changes
556 searching for changes
557 abort: missing support for test:unknown
557 abort: missing support for test:unknown
558 [255]
558 [255]
559
559
560 Doing the actual push: race
560 Doing the actual push: race
561
561
562 $ cat << EOF >> $HGRCPATH
562 $ cat << EOF >> $HGRCPATH
563 > [failpush]
563 > [failpush]
564 > reason = race
564 > reason = race
565 > EOF
565 > EOF
566
566
567 $ hg -R main push other -r e7ec4e813ba6
567 $ hg -R main push other -r e7ec4e813ba6
568 pushing to other
568 pushing to other
569 searching for changes
569 searching for changes
570 abort: push failed:
570 abort: push failed:
571 'repository changed while pushing - please try again'
571 'repository changed while pushing - please try again'
572 [255]
572 [255]
573
573
574 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
574 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
575 pushing to ssh://user@dummy/other
575 pushing to ssh://user@dummy/other
576 searching for changes
576 searching for changes
577 abort: push failed:
577 abort: push failed:
578 'repository changed while pushing - please try again'
578 'repository changed while pushing - please try again'
579 [255]
579 [255]
580
580
581 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
581 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
582 pushing to http://localhost:$HGPORT2/
582 pushing to http://localhost:$HGPORT2/
583 searching for changes
583 searching for changes
584 abort: push failed:
584 abort: push failed:
585 'repository changed while pushing - please try again'
585 'repository changed while pushing - please try again'
586 [255]
586 [255]
587
587
588 Doing the actual push: hook abort
588 Doing the actual push: hook abort
589
589
590 $ cat << EOF >> $HGRCPATH
590 $ cat << EOF >> $HGRCPATH
591 > [failpush]
591 > [failpush]
592 > reason =
592 > reason =
593 > [hooks]
593 > [hooks]
594 > pretxnclose.failpush = sh -c "echo 'You shall not pass!'; false"
594 > pretxnclose.failpush = sh -c "echo 'You shall not pass!'; false"
595 > txnabort.failpush = sh -c "echo 'Cleaning up the mess...'"
595 > txnabort.failpush = sh -c "echo 'Cleaning up the mess...'"
596 > EOF
596 > EOF
597
597
598 $ killdaemons.py
598 $ killdaemons.py
599 $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
599 $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
600 $ cat other.pid >> $DAEMON_PIDS
600 $ cat other.pid >> $DAEMON_PIDS
601
601
602 $ hg -R main push other -r e7ec4e813ba6
602 $ hg -R main push other -r e7ec4e813ba6
603 pushing to other
603 pushing to other
604 searching for changes
604 searching for changes
605 remote: adding changesets
605 remote: adding changesets
606 remote: adding manifests
606 remote: adding manifests
607 remote: adding file changes
607 remote: adding file changes
608 remote: added 1 changesets with 1 changes to 1 files
608 remote: added 1 changesets with 1 changes to 1 files
609 remote: pre-close-tip:e7ec4e813ba6 draft
609 remote: pre-close-tip:e7ec4e813ba6 draft
610 remote: You shall not pass!
610 remote: You shall not pass!
611 remote: transaction abort!
611 remote: transaction abort!
612 remote: Cleaning up the mess...
612 remote: Cleaning up the mess...
613 remote: rollback completed
613 remote: rollback completed
614 abort: pretxnclose.failpush hook exited with status 1
614 abort: pretxnclose.failpush hook exited with status 1
615 [255]
615 [255]
616
616
617 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
617 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
618 pushing to ssh://user@dummy/other
618 pushing to ssh://user@dummy/other
619 searching for changes
619 searching for changes
620 remote: adding changesets
620 remote: adding changesets
621 remote: adding manifests
621 remote: adding manifests
622 remote: adding file changes
622 remote: adding file changes
623 remote: added 1 changesets with 1 changes to 1 files
623 remote: added 1 changesets with 1 changes to 1 files
624 remote: pre-close-tip:e7ec4e813ba6 draft
624 remote: pre-close-tip:e7ec4e813ba6 draft
625 remote: You shall not pass!
625 remote: You shall not pass!
626 remote: transaction abort!
626 remote: transaction abort!
627 remote: Cleaning up the mess...
627 remote: Cleaning up the mess...
628 remote: rollback completed
628 remote: rollback completed
629 remote: pretxnclose.failpush hook exited with status 1
629 remote: pretxnclose.failpush hook exited with status 1
630 abort: push failed on remote
630 abort: push failed on remote
631 [255]
631 [255]
632
632
633 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
633 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
634 pushing to http://localhost:$HGPORT2/
634 pushing to http://localhost:$HGPORT2/
635 searching for changes
635 searching for changes
636 remote: adding changesets
636 remote: adding changesets
637 remote: adding manifests
637 remote: adding manifests
638 remote: adding file changes
638 remote: adding file changes
639 remote: added 1 changesets with 1 changes to 1 files
639 remote: added 1 changesets with 1 changes to 1 files
640 remote: pre-close-tip:e7ec4e813ba6 draft
640 remote: pre-close-tip:e7ec4e813ba6 draft
641 remote: You shall not pass!
641 remote: You shall not pass!
642 remote: transaction abort!
642 remote: transaction abort!
643 remote: Cleaning up the mess...
643 remote: Cleaning up the mess...
644 remote: rollback completed
644 remote: rollback completed
645 remote: pretxnclose.failpush hook exited with status 1
645 remote: pretxnclose.failpush hook exited with status 1
646 abort: push failed on remote
646 abort: push failed on remote
647 [255]
647 [255]
648
648
649 (check that no 'pending' files remain)
649 (check that no 'pending' files remain)
650
650
651 $ ls -1 other/.hg/bookmarks*
651 $ ls -1 other/.hg/bookmarks*
652 other/.hg/bookmarks
652 other/.hg/bookmarks
653 $ ls -1 other/.hg/store/phaseroots*
653 $ ls -1 other/.hg/store/phaseroots*
654 other/.hg/store/phaseroots
654 other/.hg/store/phaseroots
655 $ ls -1 other/.hg/store/00changelog.i*
655 $ ls -1 other/.hg/store/00changelog.i*
656 other/.hg/store/00changelog.i
656 other/.hg/store/00changelog.i
657
657
658 Check error from hook during the unbundling process itself
658 Check error from hook during the unbundling process itself
659
659
660 $ cat << EOF >> $HGRCPATH
660 $ cat << EOF >> $HGRCPATH
661 > pretxnchangegroup = sh -c "echo 'Fail early!'; false"
661 > pretxnchangegroup = sh -c "echo 'Fail early!'; false"
662 > EOF
662 > EOF
663 $ killdaemons.py # reload http config
663 $ killdaemons.py # reload http config
664 $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
664 $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
665 $ cat other.pid >> $DAEMON_PIDS
665 $ cat other.pid >> $DAEMON_PIDS
666
666
667 $ hg -R main push other -r e7ec4e813ba6
667 $ hg -R main push other -r e7ec4e813ba6
668 pushing to other
668 pushing to other
669 searching for changes
669 searching for changes
670 remote: adding changesets
670 remote: adding changesets
671 remote: adding manifests
671 remote: adding manifests
672 remote: adding file changes
672 remote: adding file changes
673 remote: added 1 changesets with 1 changes to 1 files
673 remote: added 1 changesets with 1 changes to 1 files
674 remote: Fail early!
674 remote: Fail early!
675 remote: transaction abort!
675 remote: transaction abort!
676 remote: Cleaning up the mess...
676 remote: Cleaning up the mess...
677 remote: rollback completed
677 remote: rollback completed
678 abort: pretxnchangegroup hook exited with status 1
678 abort: pretxnchangegroup hook exited with status 1
679 [255]
679 [255]
680 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
680 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
681 pushing to ssh://user@dummy/other
681 pushing to ssh://user@dummy/other
682 searching for changes
682 searching for changes
683 remote: adding changesets
683 remote: adding changesets
684 remote: adding manifests
684 remote: adding manifests
685 remote: adding file changes
685 remote: adding file changes
686 remote: added 1 changesets with 1 changes to 1 files
686 remote: added 1 changesets with 1 changes to 1 files
687 remote: Fail early!
687 remote: Fail early!
688 remote: transaction abort!
688 remote: transaction abort!
689 remote: Cleaning up the mess...
689 remote: Cleaning up the mess...
690 remote: rollback completed
690 remote: rollback completed
691 remote: pretxnchangegroup hook exited with status 1
691 remote: pretxnchangegroup hook exited with status 1
692 abort: push failed on remote
692 abort: push failed on remote
693 [255]
693 [255]
694 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
694 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
695 pushing to http://localhost:$HGPORT2/
695 pushing to http://localhost:$HGPORT2/
696 searching for changes
696 searching for changes
697 remote: adding changesets
697 remote: adding changesets
698 remote: adding manifests
698 remote: adding manifests
699 remote: adding file changes
699 remote: adding file changes
700 remote: added 1 changesets with 1 changes to 1 files
700 remote: added 1 changesets with 1 changes to 1 files
701 remote: Fail early!
701 remote: Fail early!
702 remote: transaction abort!
702 remote: transaction abort!
703 remote: Cleaning up the mess...
703 remote: Cleaning up the mess...
704 remote: rollback completed
704 remote: rollback completed
705 remote: pretxnchangegroup hook exited with status 1
705 remote: pretxnchangegroup hook exited with status 1
706 abort: push failed on remote
706 abort: push failed on remote
707 [255]
707 [255]
708
708
709 Check output capture control.
709 Check output capture control.
710
710
711 (should be still forced for http, disabled for local and ssh)
711 (should be still forced for http, disabled for local and ssh)
712
712
713 $ cat >> $HGRCPATH << EOF
713 $ cat >> $HGRCPATH << EOF
714 > [experimental]
714 > [experimental]
715 > bundle2-output-capture=False
715 > bundle2-output-capture=False
716 > EOF
716 > EOF
717
717
718 $ hg -R main push other -r e7ec4e813ba6
718 $ hg -R main push other -r e7ec4e813ba6
719 pushing to other
719 pushing to other
720 searching for changes
720 searching for changes
721 adding changesets
721 adding changesets
722 adding manifests
722 adding manifests
723 adding file changes
723 adding file changes
724 added 1 changesets with 1 changes to 1 files
724 added 1 changesets with 1 changes to 1 files
725 Fail early!
725 Fail early!
726 transaction abort!
726 transaction abort!
727 Cleaning up the mess...
727 Cleaning up the mess...
728 rollback completed
728 rollback completed
729 abort: pretxnchangegroup hook exited with status 1
729 abort: pretxnchangegroup hook exited with status 1
730 [255]
730 [255]
731 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
731 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
732 pushing to ssh://user@dummy/other
732 pushing to ssh://user@dummy/other
733 searching for changes
733 searching for changes
734 remote: adding changesets
734 remote: adding changesets
735 remote: adding manifests
735 remote: adding manifests
736 remote: adding file changes
736 remote: adding file changes
737 remote: added 1 changesets with 1 changes to 1 files
737 remote: added 1 changesets with 1 changes to 1 files
738 remote: Fail early!
738 remote: Fail early!
739 remote: transaction abort!
739 remote: transaction abort!
740 remote: Cleaning up the mess...
740 remote: Cleaning up the mess...
741 remote: rollback completed
741 remote: rollback completed
742 remote: pretxnchangegroup hook exited with status 1
742 remote: pretxnchangegroup hook exited with status 1
743 abort: push failed on remote
743 abort: push failed on remote
744 [255]
744 [255]
745 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
745 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
746 pushing to http://localhost:$HGPORT2/
746 pushing to http://localhost:$HGPORT2/
747 searching for changes
747 searching for changes
748 remote: adding changesets
748 remote: adding changesets
749 remote: adding manifests
749 remote: adding manifests
750 remote: adding file changes
750 remote: adding file changes
751 remote: added 1 changesets with 1 changes to 1 files
751 remote: added 1 changesets with 1 changes to 1 files
752 remote: Fail early!
752 remote: Fail early!
753 remote: transaction abort!
753 remote: transaction abort!
754 remote: Cleaning up the mess...
754 remote: Cleaning up the mess...
755 remote: rollback completed
755 remote: rollback completed
756 remote: pretxnchangegroup hook exited with status 1
756 remote: pretxnchangegroup hook exited with status 1
757 abort: push failed on remote
757 abort: push failed on remote
758 [255]
758 [255]
759
759
760 Check abort from mandatory pushkey
760 Check abort from mandatory pushkey
761
761
762 $ cat > mandatorypart.py << EOF
762 $ cat > mandatorypart.py << EOF
763 > from mercurial import exchange
763 > from mercurial import exchange
764 > from mercurial import pushkey
764 > from mercurial import pushkey
765 > from mercurial import node
765 > from mercurial import node
766 > from mercurial import error
766 > from mercurial import error
767 > @exchange.b2partsgenerator('failingpuskey')
767 > @exchange.b2partsgenerator('failingpuskey')
768 > def addfailingpushey(pushop, bundler):
768 > def addfailingpushey(pushop, bundler):
769 > enc = pushkey.encode
769 > enc = pushkey.encode
770 > part = bundler.newpart('pushkey')
770 > part = bundler.newpart('pushkey')
771 > part.addparam('namespace', enc('phases'))
771 > part.addparam('namespace', enc('phases'))
772 > part.addparam('key', enc(pushop.repo['cd010b8cd998'].hex()))
772 > part.addparam('key', enc(pushop.repo['cd010b8cd998'].hex()))
773 > part.addparam('old', enc(str(0))) # successful update
773 > part.addparam('old', enc(str(0))) # successful update
774 > part.addparam('new', enc(str(0)))
774 > part.addparam('new', enc(str(0)))
775 > def fail(pushop, exc):
775 > def fail(pushop, exc):
776 > raise error.Abort('Correct phase push failed (because hooks)')
776 > raise error.Abort('Correct phase push failed (because hooks)')
777 > pushop.pkfailcb[part.id] = fail
777 > pushop.pkfailcb[part.id] = fail
778 > EOF
778 > EOF
779 $ cat >> $HGRCPATH << EOF
779 $ cat >> $HGRCPATH << EOF
780 > [hooks]
780 > [hooks]
781 > pretxnchangegroup=
781 > pretxnchangegroup=
782 > pretxnclose.failpush=
782 > pretxnclose.failpush=
783 > prepushkey.failpush = sh -c "echo 'do not push the key !'; false"
783 > prepushkey.failpush = sh -c "echo 'do not push the key !'; false"
784 > [extensions]
784 > [extensions]
785 > mandatorypart=$TESTTMP/mandatorypart.py
785 > mandatorypart=$TESTTMP/mandatorypart.py
786 > EOF
786 > EOF
787 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS # reload http config
787 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS # reload http config
788 $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
788 $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
789 $ cat other.pid >> $DAEMON_PIDS
789 $ cat other.pid >> $DAEMON_PIDS
790
790
791 (Failure from a hook)
791 (Failure from a hook)
792
792
793 $ hg -R main push other -r e7ec4e813ba6
793 $ hg -R main push other -r e7ec4e813ba6
794 pushing to other
794 pushing to other
795 searching for changes
795 searching for changes
796 adding changesets
796 adding changesets
797 adding manifests
797 adding manifests
798 adding file changes
798 adding file changes
799 added 1 changesets with 1 changes to 1 files
799 added 1 changesets with 1 changes to 1 files
800 do not push the key !
800 do not push the key !
801 pushkey-abort: prepushkey.failpush hook exited with status 1
801 pushkey-abort: prepushkey.failpush hook exited with status 1
802 transaction abort!
802 transaction abort!
803 Cleaning up the mess...
803 Cleaning up the mess...
804 rollback completed
804 rollback completed
805 abort: Correct phase push failed (because hooks)
805 abort: Correct phase push failed (because hooks)
806 [255]
806 [255]
807 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
807 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
808 pushing to ssh://user@dummy/other
808 pushing to ssh://user@dummy/other
809 searching for changes
809 searching for changes
810 remote: adding changesets
810 remote: adding changesets
811 remote: adding manifests
811 remote: adding manifests
812 remote: adding file changes
812 remote: adding file changes
813 remote: added 1 changesets with 1 changes to 1 files
813 remote: added 1 changesets with 1 changes to 1 files
814 remote: do not push the key !
814 remote: do not push the key !
815 remote: pushkey-abort: prepushkey.failpush hook exited with status 1
815 remote: pushkey-abort: prepushkey.failpush hook exited with status 1
816 remote: transaction abort!
816 remote: transaction abort!
817 remote: Cleaning up the mess...
817 remote: Cleaning up the mess...
818 remote: rollback completed
818 remote: rollback completed
819 abort: Correct phase push failed (because hooks)
819 abort: Correct phase push failed (because hooks)
820 [255]
820 [255]
821 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
821 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
822 pushing to http://localhost:$HGPORT2/
822 pushing to http://localhost:$HGPORT2/
823 searching for changes
823 searching for changes
824 remote: adding changesets
824 remote: adding changesets
825 remote: adding manifests
825 remote: adding manifests
826 remote: adding file changes
826 remote: adding file changes
827 remote: added 1 changesets with 1 changes to 1 files
827 remote: added 1 changesets with 1 changes to 1 files
828 remote: do not push the key !
828 remote: do not push the key !
829 remote: pushkey-abort: prepushkey.failpush hook exited with status 1
829 remote: pushkey-abort: prepushkey.failpush hook exited with status 1
830 remote: transaction abort!
830 remote: transaction abort!
831 remote: Cleaning up the mess...
831 remote: Cleaning up the mess...
832 remote: rollback completed
832 remote: rollback completed
833 abort: Correct phase push failed (because hooks)
833 abort: Correct phase push failed (because hooks)
834 [255]
834 [255]
835
835
836 (Failure from a the pushkey)
836 (Failure from a the pushkey)
837
837
838 $ cat > mandatorypart.py << EOF
838 $ cat > mandatorypart.py << EOF
839 > from mercurial import exchange
839 > from mercurial import exchange
840 > from mercurial import pushkey
840 > from mercurial import pushkey
841 > from mercurial import node
841 > from mercurial import node
842 > from mercurial import error
842 > from mercurial import error
843 > @exchange.b2partsgenerator('failingpuskey')
843 > @exchange.b2partsgenerator('failingpuskey')
844 > def addfailingpushey(pushop, bundler):
844 > def addfailingpushey(pushop, bundler):
845 > enc = pushkey.encode
845 > enc = pushkey.encode
846 > part = bundler.newpart('pushkey')
846 > part = bundler.newpart('pushkey')
847 > part.addparam('namespace', enc('phases'))
847 > part.addparam('namespace', enc('phases'))
848 > part.addparam('key', enc(pushop.repo['cd010b8cd998'].hex()))
848 > part.addparam('key', enc(pushop.repo['cd010b8cd998'].hex()))
849 > part.addparam('old', enc(str(4))) # will fail
849 > part.addparam('old', enc(str(4))) # will fail
850 > part.addparam('new', enc(str(3)))
850 > part.addparam('new', enc(str(3)))
851 > def fail(pushop, exc):
851 > def fail(pushop, exc):
852 > raise error.Abort('Clown phase push failed')
852 > raise error.Abort('Clown phase push failed')
853 > pushop.pkfailcb[part.id] = fail
853 > pushop.pkfailcb[part.id] = fail
854 > EOF
854 > EOF
855 $ cat >> $HGRCPATH << EOF
855 $ cat >> $HGRCPATH << EOF
856 > [hooks]
856 > [hooks]
857 > prepushkey.failpush =
857 > prepushkey.failpush =
858 > EOF
858 > EOF
859 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS # reload http config
859 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS # reload http config
860 $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
860 $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
861 $ cat other.pid >> $DAEMON_PIDS
861 $ cat other.pid >> $DAEMON_PIDS
862
862
863 $ hg -R main push other -r e7ec4e813ba6
863 $ hg -R main push other -r e7ec4e813ba6
864 pushing to other
864 pushing to other
865 searching for changes
865 searching for changes
866 adding changesets
866 adding changesets
867 adding manifests
867 adding manifests
868 adding file changes
868 adding file changes
869 added 1 changesets with 1 changes to 1 files
869 added 1 changesets with 1 changes to 1 files
870 transaction abort!
870 transaction abort!
871 Cleaning up the mess...
871 Cleaning up the mess...
872 rollback completed
872 rollback completed
873 pushkey: lock state after "phases"
873 pushkey: lock state after "phases"
874 lock: free
874 lock: free
875 wlock: free
875 wlock: free
876 abort: Clown phase push failed
876 abort: Clown phase push failed
877 [255]
877 [255]
878 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
878 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
879 pushing to ssh://user@dummy/other
879 pushing to ssh://user@dummy/other
880 searching for changes
880 searching for changes
881 remote: adding changesets
881 remote: adding changesets
882 remote: adding manifests
882 remote: adding manifests
883 remote: adding file changes
883 remote: adding file changes
884 remote: added 1 changesets with 1 changes to 1 files
884 remote: added 1 changesets with 1 changes to 1 files
885 remote: transaction abort!
885 remote: transaction abort!
886 remote: Cleaning up the mess...
886 remote: Cleaning up the mess...
887 remote: rollback completed
887 remote: rollback completed
888 remote: pushkey: lock state after "phases"
888 remote: pushkey: lock state after "phases"
889 remote: lock: free
889 remote: lock: free
890 remote: wlock: free
890 remote: wlock: free
891 abort: Clown phase push failed
891 abort: Clown phase push failed
892 [255]
892 [255]
893 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
893 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
894 pushing to http://localhost:$HGPORT2/
894 pushing to http://localhost:$HGPORT2/
895 searching for changes
895 searching for changes
896 remote: adding changesets
896 remote: adding changesets
897 remote: adding manifests
897 remote: adding manifests
898 remote: adding file changes
898 remote: adding file changes
899 remote: added 1 changesets with 1 changes to 1 files
899 remote: added 1 changesets with 1 changes to 1 files
900 remote: transaction abort!
900 remote: transaction abort!
901 remote: Cleaning up the mess...
901 remote: Cleaning up the mess...
902 remote: rollback completed
902 remote: rollback completed
903 remote: pushkey: lock state after "phases"
903 remote: pushkey: lock state after "phases"
904 remote: lock: free
904 remote: lock: free
905 remote: wlock: free
905 remote: wlock: free
906 abort: Clown phase push failed
906 abort: Clown phase push failed
907 [255]
907 [255]
908
908
909 Test lazily acquiring the lock during unbundle
909 Test lazily acquiring the lock during unbundle
910 $ cp $TESTTMP/hgrc.orig $HGRCPATH
910 $ cp $TESTTMP/hgrc.orig $HGRCPATH
911 $ cat >> $HGRCPATH <<EOF
911 $ cat >> $HGRCPATH <<EOF
912 > [ui]
912 > [ui]
913 > ssh=python "$TESTDIR/dummyssh"
913 > ssh=python "$TESTDIR/dummyssh"
914 > EOF
914 > EOF
915
915
916 $ cat >> $TESTTMP/locktester.py <<EOF
916 $ cat >> $TESTTMP/locktester.py <<EOF
917 > import os
917 > import os
918 > from mercurial import extensions, bundle2, util
918 > from mercurial import extensions, bundle2, util
919 > def checklock(orig, repo, *args, **kwargs):
919 > def checklock(orig, repo, *args, **kwargs):
920 > if repo.svfs.lexists("lock"):
920 > if repo.svfs.lexists("lock"):
921 > raise util.Abort("Lock should not be taken")
921 > raise util.Abort("Lock should not be taken")
922 > return orig(repo, *args, **kwargs)
922 > return orig(repo, *args, **kwargs)
923 > def extsetup(ui):
923 > def extsetup(ui):
924 > extensions.wrapfunction(bundle2, 'processbundle', checklock)
924 > extensions.wrapfunction(bundle2, 'processbundle', checklock)
925 > EOF
925 > EOF
926
926
927 $ hg init lazylock
927 $ hg init lazylock
928 $ cat >> lazylock/.hg/hgrc <<EOF
928 $ cat >> lazylock/.hg/hgrc <<EOF
929 > [extensions]
929 > [extensions]
930 > locktester=$TESTTMP/locktester.py
930 > locktester=$TESTTMP/locktester.py
931 > EOF
931 > EOF
932
932
933 $ hg clone -q ssh://user@dummy/lazylock lazylockclient
933 $ hg clone -q ssh://user@dummy/lazylock lazylockclient
934 $ cd lazylockclient
934 $ cd lazylockclient
935 $ touch a && hg ci -Aqm a
935 $ touch a && hg ci -Aqm a
936 $ hg push
936 $ hg push
937 pushing to ssh://user@dummy/lazylock
937 pushing to ssh://user@dummy/lazylock
938 searching for changes
938 searching for changes
939 remote: Lock should not be taken
939 remote: Lock should not be taken
940 abort: push failed on remote
940 abort: push failed on remote
941 [255]
941 [255]
942
942
943 $ cat >> ../lazylock/.hg/hgrc <<EOF
943 $ cat >> ../lazylock/.hg/hgrc <<EOF
944 > [experimental]
944 > [experimental]
945 > bundle2lazylocking=True
945 > bundle2lazylocking=True
946 > EOF
946 > EOF
947 $ hg push
947 $ hg push
948 pushing to ssh://user@dummy/lazylock
948 pushing to ssh://user@dummy/lazylock
949 searching for changes
949 searching for changes
950 remote: adding changesets
950 remote: adding changesets
951 remote: adding manifests
951 remote: adding manifests
952 remote: adding file changes
952 remote: adding file changes
953 remote: added 1 changesets with 1 changes to 1 files
953 remote: added 1 changesets with 1 changes to 1 files
954
954
955 $ cd ..
955 $ cd ..
956
956
957 Servers can disable bundle1 for clone/pull operations
957 Servers can disable bundle1 for clone/pull operations
958
958
959 $ killdaemons.py
959 $ killdaemons.py
960 $ hg init bundle2onlyserver
960 $ hg init bundle2onlyserver
961 $ cd bundle2onlyserver
961 $ cd bundle2onlyserver
962 $ cat > .hg/hgrc << EOF
962 $ cat > .hg/hgrc << EOF
963 > [server]
963 > [server]
964 > bundle1.pull = false
964 > bundle1.pull = false
965 > EOF
965 > EOF
966
966
967 $ touch foo
967 $ touch foo
968 $ hg -q commit -A -m initial
968 $ hg -q commit -A -m initial
969
969
970 $ hg serve -p $HGPORT -d --pid-file=hg.pid
970 $ hg serve -p $HGPORT -d --pid-file=hg.pid
971 $ cat hg.pid >> $DAEMON_PIDS
971 $ cat hg.pid >> $DAEMON_PIDS
972
972
973 $ hg --config experimental.bundle2-exp=false clone http://localhost:$HGPORT/ not-bundle2
973 $ hg --config experimental.bundle2-exp=false clone http://localhost:$HGPORT/ not-bundle2
974 requesting all changes
974 requesting all changes
975 abort: remote error:
975 abort: remote error:
976 incompatible Mercurial client; bundle2 required
976 incompatible Mercurial client; bundle2 required
977 (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
977 (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
978 [255]
978 [255]
979 $ killdaemons.py
979 $ killdaemons.py
980 $ cd ..
980 $ cd ..
981
981
982 bundle1 can still pull non-generaldelta repos when generaldelta bundle1 disabled
982 bundle1 can still pull non-generaldelta repos when generaldelta bundle1 disabled
983
983
984 $ hg --config format.usegeneraldelta=false init notgdserver
984 $ hg --config format.usegeneraldelta=false init notgdserver
985 $ cd notgdserver
985 $ cd notgdserver
986 $ cat > .hg/hgrc << EOF
986 $ cat > .hg/hgrc << EOF
987 > [server]
987 > [server]
988 > bundle1gd.pull = false
988 > bundle1gd.pull = false
989 > EOF
989 > EOF
990
990
991 $ touch foo
991 $ touch foo
992 $ hg -q commit -A -m initial
992 $ hg -q commit -A -m initial
993 $ hg serve -p $HGPORT -d --pid-file=hg.pid
993 $ hg serve -p $HGPORT -d --pid-file=hg.pid
994 $ cat hg.pid >> $DAEMON_PIDS
994 $ cat hg.pid >> $DAEMON_PIDS
995
995
996 $ hg --config experimental.bundle2-exp=false clone http://localhost:$HGPORT/ not-bundle2-1
996 $ hg --config experimental.bundle2-exp=false clone http://localhost:$HGPORT/ not-bundle2-1
997 requesting all changes
997 requesting all changes
998 adding changesets
998 adding changesets
999 adding manifests
999 adding manifests
1000 adding file changes
1000 adding file changes
1001 added 1 changesets with 1 changes to 1 files
1001 added 1 changesets with 1 changes to 1 files
1002 updating to branch default
1002 updating to branch default
1003 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1003 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1004
1004
1005 $ killdaemons.py
1005 $ killdaemons.py
1006 $ cd ../bundle2onlyserver
1006 $ cd ../bundle2onlyserver
1007
1007
1008 bundle1 pull can be disabled for generaldelta repos only
1008 bundle1 pull can be disabled for generaldelta repos only
1009
1009
1010 $ cat > .hg/hgrc << EOF
1010 $ cat > .hg/hgrc << EOF
1011 > [server]
1011 > [server]
1012 > bundle1gd.pull = false
1012 > bundle1gd.pull = false
1013 > EOF
1013 > EOF
1014
1014
1015 $ hg serve -p $HGPORT -d --pid-file=hg.pid
1015 $ hg serve -p $HGPORT -d --pid-file=hg.pid
1016 $ cat hg.pid >> $DAEMON_PIDS
1016 $ cat hg.pid >> $DAEMON_PIDS
1017 $ hg --config experimental.bundle2-exp=false clone http://localhost:$HGPORT/ not-bundle2
1017 $ hg --config experimental.bundle2-exp=false clone http://localhost:$HGPORT/ not-bundle2
1018 requesting all changes
1018 requesting all changes
1019 abort: remote error:
1019 abort: remote error:
1020 incompatible Mercurial client; bundle2 required
1020 incompatible Mercurial client; bundle2 required
1021 (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
1021 (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
1022 [255]
1022 [255]
1023
1023
1024 $ killdaemons.py
1024 $ killdaemons.py
1025
1025
1026 Verify the global server.bundle1 option works
1026 Verify the global server.bundle1 option works
1027
1027
1028 $ cat > .hg/hgrc << EOF
1028 $ cat > .hg/hgrc << EOF
1029 > [server]
1029 > [server]
1030 > bundle1 = false
1030 > bundle1 = false
1031 > EOF
1031 > EOF
1032 $ hg serve -p $HGPORT -d --pid-file=hg.pid
1032 $ hg serve -p $HGPORT -d --pid-file=hg.pid
1033 $ cat hg.pid >> $DAEMON_PIDS
1033 $ cat hg.pid >> $DAEMON_PIDS
1034 $ hg --config experimental.bundle2-exp=false clone http://localhost:$HGPORT not-bundle2
1034 $ hg --config experimental.bundle2-exp=false clone http://localhost:$HGPORT not-bundle2
1035 requesting all changes
1035 requesting all changes
1036 abort: remote error:
1036 abort: remote error:
1037 incompatible Mercurial client; bundle2 required
1037 incompatible Mercurial client; bundle2 required
1038 (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
1038 (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
1039 [255]
1039 [255]
1040 $ killdaemons.py
1040 $ killdaemons.py
1041
1041
1042 $ cat > .hg/hgrc << EOF
1042 $ cat > .hg/hgrc << EOF
1043 > [server]
1043 > [server]
1044 > bundle1gd = false
1044 > bundle1gd = false
1045 > EOF
1045 > EOF
1046 $ hg serve -p $HGPORT -d --pid-file=hg.pid
1046 $ hg serve -p $HGPORT -d --pid-file=hg.pid
1047 $ cat hg.pid >> $DAEMON_PIDS
1047 $ cat hg.pid >> $DAEMON_PIDS
1048
1048
1049 $ hg --config experimental.bundle2-exp=false clone http://localhost:$HGPORT/ not-bundle2
1049 $ hg --config experimental.bundle2-exp=false clone http://localhost:$HGPORT/ not-bundle2
1050 requesting all changes
1050 requesting all changes
1051 abort: remote error:
1051 abort: remote error:
1052 incompatible Mercurial client; bundle2 required
1052 incompatible Mercurial client; bundle2 required
1053 (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
1053 (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
1054 [255]
1054 [255]
1055
1055
1056 $ killdaemons.py
1056 $ killdaemons.py
1057
1057
1058 $ cd ../notgdserver
1058 $ cd ../notgdserver
1059 $ cat > .hg/hgrc << EOF
1059 $ cat > .hg/hgrc << EOF
1060 > [server]
1060 > [server]
1061 > bundle1gd = false
1061 > bundle1gd = false
1062 > EOF
1062 > EOF
1063 $ hg serve -p $HGPORT -d --pid-file=hg.pid
1063 $ hg serve -p $HGPORT -d --pid-file=hg.pid
1064 $ cat hg.pid >> $DAEMON_PIDS
1064 $ cat hg.pid >> $DAEMON_PIDS
1065
1065
1066 $ hg --config experimental.bundle2-exp=false clone http://localhost:$HGPORT/ not-bundle2-2
1066 $ hg --config experimental.bundle2-exp=false clone http://localhost:$HGPORT/ not-bundle2-2
1067 requesting all changes
1067 requesting all changes
1068 adding changesets
1068 adding changesets
1069 adding manifests
1069 adding manifests
1070 adding file changes
1070 adding file changes
1071 added 1 changesets with 1 changes to 1 files
1071 added 1 changesets with 1 changes to 1 files
1072 updating to branch default
1072 updating to branch default
1073 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1073 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1074
1074
1075 $ killdaemons.py
1075 $ killdaemons.py
1076 $ cd ../bundle2onlyserver
1076 $ cd ../bundle2onlyserver
1077
1077
1078 Verify bundle1 pushes can be disabled
1078 Verify bundle1 pushes can be disabled
1079
1079
1080 $ cat > .hg/hgrc << EOF
1080 $ cat > .hg/hgrc << EOF
1081 > [server]
1081 > [server]
1082 > bundle1.push = false
1082 > bundle1.push = false
1083 > [web]
1083 > [web]
1084 > allow_push = *
1084 > allow_push = *
1085 > push_ssl = false
1085 > push_ssl = false
1086 > EOF
1086 > EOF
1087
1087
1088 $ hg serve -p $HGPORT -d --pid-file=hg.pid -E error.log
1088 $ hg serve -p $HGPORT -d --pid-file=hg.pid -E error.log
1089 $ cat hg.pid >> $DAEMON_PIDS
1089 $ cat hg.pid >> $DAEMON_PIDS
1090 $ cd ..
1090 $ cd ..
1091
1091
1092 $ hg clone http://localhost:$HGPORT bundle2-only
1092 $ hg clone http://localhost:$HGPORT bundle2-only
1093 requesting all changes
1093 requesting all changes
1094 adding changesets
1094 adding changesets
1095 adding manifests
1095 adding manifests
1096 adding file changes
1096 adding file changes
1097 added 1 changesets with 1 changes to 1 files
1097 added 1 changesets with 1 changes to 1 files
1098 updating to branch default
1098 updating to branch default
1099 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1099 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1100 $ cd bundle2-only
1100 $ cd bundle2-only
1101 $ echo commit > foo
1101 $ echo commit > foo
1102 $ hg commit -m commit
1102 $ hg commit -m commit
1103 $ hg --config experimental.bundle2-exp=false push
1103 $ hg --config experimental.bundle2-exp=false push
1104 pushing to http://localhost:$HGPORT/
1104 pushing to http://localhost:$HGPORT/
1105 searching for changes
1105 searching for changes
1106 abort: remote error:
1106 abort: remote error:
1107 incompatible Mercurial client; bundle2 required
1107 incompatible Mercurial client; bundle2 required
1108 (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
1108 (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
1109 [255]
1109 [255]
1110
1110
1111 $ hg push
1111 $ hg push
1112 pushing to http://localhost:$HGPORT/
1112 pushing to http://localhost:$HGPORT/
1113 searching for changes
1113 searching for changes
1114 remote: adding changesets
1114 remote: adding changesets
1115 remote: adding manifests
1115 remote: adding manifests
1116 remote: adding file changes
1116 remote: adding file changes
1117 remote: added 1 changesets with 1 changes to 1 files
1117 remote: added 1 changesets with 1 changes to 1 files
@@ -1,875 +1,875 b''
1 commit hooks can see env vars
1 commit hooks can see env vars
2 (and post-transaction one are run unlocked)
2 (and post-transaction one are run unlocked)
3
3
4 $ cat << EOF >> $HGRCPATH
4 $ cat << EOF >> $HGRCPATH
5 > [experimental]
5 > [experimental]
6 > # drop me once bundle2 is the default,
6 > # drop me once bundle2 is the default,
7 > # added to get test change early.
7 > # added to get test change early.
8 > bundle2-exp = True
8 > bundle2-exp = True
9 > EOF
9 > EOF
10
10
11 $ cat > $TESTTMP/txnabort.checkargs.py <<EOF
11 $ cat > $TESTTMP/txnabort.checkargs.py <<EOF
12 > def showargs(ui, repo, hooktype, **kwargs):
12 > def showargs(ui, repo, hooktype, **kwargs):
13 > ui.write('%s python hook: %s\n' % (hooktype, ','.join(sorted(kwargs))))
13 > ui.write('%s python hook: %s\n' % (hooktype, ','.join(sorted(kwargs))))
14 > EOF
14 > EOF
15
15
16 $ hg init a
16 $ hg init a
17 $ cd a
17 $ cd a
18 $ cat > .hg/hgrc <<EOF
18 $ cat > .hg/hgrc <<EOF
19 > [hooks]
19 > [hooks]
20 > commit = sh -c "HG_LOCAL= HG_TAG= printenv.py commit"
20 > commit = sh -c "HG_LOCAL= HG_TAG= printenv.py commit"
21 > commit.b = sh -c "HG_LOCAL= HG_TAG= printenv.py commit.b"
21 > commit.b = sh -c "HG_LOCAL= HG_TAG= printenv.py commit.b"
22 > precommit = sh -c "HG_LOCAL= HG_NODE= HG_TAG= printenv.py precommit"
22 > precommit = sh -c "HG_LOCAL= HG_NODE= HG_TAG= printenv.py precommit"
23 > pretxncommit = sh -c "HG_LOCAL= HG_TAG= printenv.py pretxncommit"
23 > pretxncommit = sh -c "HG_LOCAL= HG_TAG= printenv.py pretxncommit"
24 > pretxncommit.tip = hg -q tip
24 > pretxncommit.tip = hg -q tip
25 > pre-identify = printenv.py pre-identify 1
25 > pre-identify = printenv.py pre-identify 1
26 > pre-cat = printenv.py pre-cat
26 > pre-cat = printenv.py pre-cat
27 > post-cat = printenv.py post-cat
27 > post-cat = printenv.py post-cat
28 > pretxnopen = sh -c "HG_LOCAL= HG_TAG= printenv.py pretxnopen"
28 > pretxnopen = sh -c "HG_LOCAL= HG_TAG= printenv.py pretxnopen"
29 > pretxnclose = sh -c "HG_LOCAL= HG_TAG= printenv.py pretxnclose"
29 > pretxnclose = sh -c "HG_LOCAL= HG_TAG= printenv.py pretxnclose"
30 > txnclose = sh -c "HG_LOCAL= HG_TAG= printenv.py txnclose"
30 > txnclose = sh -c "HG_LOCAL= HG_TAG= printenv.py txnclose"
31 > txnabort.0 = python:$TESTTMP/txnabort.checkargs.py:showargs
31 > txnabort.0 = python:$TESTTMP/txnabort.checkargs.py:showargs
32 > txnabort.1 = sh -c "HG_LOCAL= HG_TAG= printenv.py txnabort"
32 > txnabort.1 = sh -c "HG_LOCAL= HG_TAG= printenv.py txnabort"
33 > txnclose.checklock = sh -c "hg debuglock > /dev/null"
33 > txnclose.checklock = sh -c "hg debuglock > /dev/null"
34 > EOF
34 > EOF
35 $ echo a > a
35 $ echo a > a
36 $ hg add a
36 $ hg add a
37 $ hg commit -m a
37 $ hg commit -m a
38 precommit hook: HG_PARENT1=0000000000000000000000000000000000000000
38 precommit hook: HG_PARENT1=0000000000000000000000000000000000000000
39 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
39 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
40 pretxncommit hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000 HG_PENDING=$TESTTMP/a
40 pretxncommit hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000 HG_PENDING=$TESTTMP/a
41 0:cb9a9f314b8b
41 0:cb9a9f314b8b
42 pretxnclose hook: HG_PENDING=$TESTTMP/a HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
42 pretxnclose hook: HG_PENDING=$TESTTMP/a HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
43 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
43 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
44 commit hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000
44 commit hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000
45 commit.b hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000
45 commit.b hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000
46
46
47 $ hg clone . ../b
47 $ hg clone . ../b
48 updating to branch default
48 updating to branch default
49 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
49 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
50 $ cd ../b
50 $ cd ../b
51
51
52 changegroup hooks can see env vars
52 changegroup hooks can see env vars
53
53
54 $ cat > .hg/hgrc <<EOF
54 $ cat > .hg/hgrc <<EOF
55 > [hooks]
55 > [hooks]
56 > prechangegroup = printenv.py prechangegroup
56 > prechangegroup = printenv.py prechangegroup
57 > changegroup = printenv.py changegroup
57 > changegroup = printenv.py changegroup
58 > incoming = printenv.py incoming
58 > incoming = printenv.py incoming
59 > EOF
59 > EOF
60
60
61 pretxncommit and commit hooks can see both parents of merge
61 pretxncommit and commit hooks can see both parents of merge
62
62
63 $ cd ../a
63 $ cd ../a
64 $ echo b >> a
64 $ echo b >> a
65 $ hg commit -m a1 -d "1 0"
65 $ hg commit -m a1 -d "1 0"
66 precommit hook: HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
66 precommit hook: HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
67 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
67 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
68 pretxncommit hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PENDING=$TESTTMP/a
68 pretxncommit hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PENDING=$TESTTMP/a
69 1:ab228980c14d
69 1:ab228980c14d
70 pretxnclose hook: HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
70 pretxnclose hook: HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
71 txnclose hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
71 txnclose hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
72 commit hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
72 commit hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
73 commit.b hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
73 commit.b hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
74 $ hg update -C 0
74 $ hg update -C 0
75 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
75 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
76 $ echo b > b
76 $ echo b > b
77 $ hg add b
77 $ hg add b
78 $ hg commit -m b -d '1 0'
78 $ hg commit -m b -d '1 0'
79 precommit hook: HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
79 precommit hook: HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
80 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
80 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
81 pretxncommit hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PENDING=$TESTTMP/a
81 pretxncommit hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PENDING=$TESTTMP/a
82 2:ee9deb46ab31
82 2:ee9deb46ab31
83 pretxnclose hook: HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
83 pretxnclose hook: HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
84 created new head
84 created new head
85 txnclose hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
85 txnclose hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
86 commit hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
86 commit hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
87 commit.b hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
87 commit.b hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
88 $ hg merge 1
88 $ hg merge 1
89 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
89 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
90 (branch merge, don't forget to commit)
90 (branch merge, don't forget to commit)
91 $ hg commit -m merge -d '2 0'
91 $ hg commit -m merge -d '2 0'
92 precommit hook: HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
92 precommit hook: HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
93 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
93 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
94 pretxncommit hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd HG_PENDING=$TESTTMP/a
94 pretxncommit hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd HG_PENDING=$TESTTMP/a
95 3:07f3376c1e65
95 3:07f3376c1e65
96 pretxnclose hook: HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
96 pretxnclose hook: HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
97 txnclose hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
97 txnclose hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
98 commit hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
98 commit hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
99 commit.b hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
99 commit.b hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
100
100
101 test generic hooks
101 test generic hooks
102
102
103 $ hg id
103 $ hg id
104 pre-identify hook: HG_ARGS=id HG_OPTS={'bookmarks': None, 'branch': None, 'id': None, 'insecure': None, 'num': None, 'remotecmd': '', 'rev': '', 'ssh': '', 'tags': None} HG_PATS=[]
104 pre-identify hook: HG_ARGS=id HG_OPTS={'bookmarks': None, 'branch': None, 'id': None, 'insecure': None, 'num': None, 'remotecmd': '', 'rev': '', 'ssh': '', 'tags': None} HG_PATS=[]
105 abort: pre-identify hook exited with status 1
105 abort: pre-identify hook exited with status 1
106 [255]
106 [255]
107 $ hg cat b
107 $ hg cat b
108 pre-cat hook: HG_ARGS=cat b HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': ''} HG_PATS=['b']
108 pre-cat hook: HG_ARGS=cat b HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': ''} HG_PATS=['b']
109 b
109 b
110 post-cat hook: HG_ARGS=cat b HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': ''} HG_PATS=['b'] HG_RESULT=0
110 post-cat hook: HG_ARGS=cat b HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': ''} HG_PATS=['b'] HG_RESULT=0
111
111
112 $ cd ../b
112 $ cd ../b
113 $ hg pull ../a
113 $ hg pull ../a
114 pulling from ../a
114 pulling from ../a
115 searching for changes
115 searching for changes
116 prechangegroup hook: HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
116 prechangegroup hook: HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
117 adding changesets
117 adding changesets
118 adding manifests
118 adding manifests
119 adding file changes
119 adding file changes
120 added 3 changesets with 2 changes to 2 files
120 added 3 changesets with 2 changes to 2 files
121 changegroup hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_NODE_LAST=07f3376c1e655977439df2a814e3cc14b27abac2 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
121 changegroup hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_NODE_LAST=07f3376c1e655977439df2a814e3cc14b27abac2 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
122 incoming hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
122 incoming hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
123 incoming hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
123 incoming hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
124 incoming hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
124 incoming hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
125 (run 'hg update' to get a working copy)
125 (run 'hg update' to get a working copy)
126
126
127 tag hooks can see env vars
127 tag hooks can see env vars
128
128
129 $ cd ../a
129 $ cd ../a
130 $ cat >> .hg/hgrc <<EOF
130 $ cat >> .hg/hgrc <<EOF
131 > pretag = printenv.py pretag
131 > pretag = printenv.py pretag
132 > tag = sh -c "HG_PARENT1= HG_PARENT2= printenv.py tag"
132 > tag = sh -c "HG_PARENT1= HG_PARENT2= printenv.py tag"
133 > EOF
133 > EOF
134 $ hg tag -d '3 0' a
134 $ hg tag -d '3 0' a
135 pretag hook: HG_LOCAL=0 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_TAG=a
135 pretag hook: HG_LOCAL=0 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_TAG=a
136 precommit hook: HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
136 precommit hook: HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
137 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
137 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
138 pretxncommit hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PENDING=$TESTTMP/a
138 pretxncommit hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PENDING=$TESTTMP/a
139 4:539e4b31b6dc
139 4:539e4b31b6dc
140 pretxnclose hook: HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
140 pretxnclose hook: HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
141 tag hook: HG_LOCAL=0 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_TAG=a
141 tag hook: HG_LOCAL=0 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_TAG=a
142 txnclose hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
142 txnclose hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
143 commit hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
143 commit hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
144 commit.b hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
144 commit.b hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
145 $ hg tag -l la
145 $ hg tag -l la
146 pretag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=la
146 pretag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=la
147 tag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=la
147 tag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=la
148
148
149 pretag hook can forbid tagging
149 pretag hook can forbid tagging
150
150
151 $ echo "pretag.forbid = printenv.py pretag.forbid 1" >> .hg/hgrc
151 $ echo "pretag.forbid = printenv.py pretag.forbid 1" >> .hg/hgrc
152 $ hg tag -d '4 0' fa
152 $ hg tag -d '4 0' fa
153 pretag hook: HG_LOCAL=0 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fa
153 pretag hook: HG_LOCAL=0 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fa
154 pretag.forbid hook: HG_LOCAL=0 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fa
154 pretag.forbid hook: HG_LOCAL=0 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fa
155 abort: pretag.forbid hook exited with status 1
155 abort: pretag.forbid hook exited with status 1
156 [255]
156 [255]
157 $ hg tag -l fla
157 $ hg tag -l fla
158 pretag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fla
158 pretag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fla
159 pretag.forbid hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fla
159 pretag.forbid hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fla
160 abort: pretag.forbid hook exited with status 1
160 abort: pretag.forbid hook exited with status 1
161 [255]
161 [255]
162
162
163 pretxncommit hook can see changeset, can roll back txn, changeset no
163 pretxncommit hook can see changeset, can roll back txn, changeset no
164 more there after
164 more there after
165
165
166 $ echo "pretxncommit.forbid0 = hg tip -q" >> .hg/hgrc
166 $ echo "pretxncommit.forbid0 = hg tip -q" >> .hg/hgrc
167 $ echo "pretxncommit.forbid1 = printenv.py pretxncommit.forbid 1" >> .hg/hgrc
167 $ echo "pretxncommit.forbid1 = printenv.py pretxncommit.forbid 1" >> .hg/hgrc
168 $ echo z > z
168 $ echo z > z
169 $ hg add z
169 $ hg add z
170 $ hg -q tip
170 $ hg -q tip
171 4:539e4b31b6dc
171 4:539e4b31b6dc
172 $ hg commit -m 'fail' -d '4 0'
172 $ hg commit -m 'fail' -d '4 0'
173 precommit hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
173 precommit hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
174 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
174 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
175 pretxncommit hook: HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/a
175 pretxncommit hook: HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/a
176 5:6f611f8018c1
176 5:6f611f8018c1
177 5:6f611f8018c1
177 5:6f611f8018c1
178 pretxncommit.forbid hook: HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/a
178 pretxncommit.forbid hook: HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/a
179 transaction abort!
179 transaction abort!
180 txnabort python hook: txnid,txnname
180 txnabort python hook: txnid,txnname
181 txnabort hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
181 txnabort hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
182 rollback completed
182 rollback completed
183 abort: pretxncommit.forbid1 hook exited with status 1
183 abort: pretxncommit.forbid1 hook exited with status 1
184 [255]
184 [255]
185 $ hg -q tip
185 $ hg -q tip
186 4:539e4b31b6dc
186 4:539e4b31b6dc
187
187
188 (Check that no 'changelog.i.a' file were left behind)
188 (Check that no 'changelog.i.a' file were left behind)
189
189
190 $ ls -1 .hg/store/
190 $ ls -1 .hg/store/
191 00changelog.i
191 00changelog.i
192 00manifest.i
192 00manifest.i
193 data
193 data
194 fncache
194 fncache
195 journal.phaseroots
195 journal.phaseroots
196 phaseroots
196 phaseroots
197 undo
197 undo
198 undo.backup.fncache
198 undo.backup.fncache
199 undo.backupfiles
199 undo.backupfiles
200 undo.phaseroots
200 undo.phaseroots
201
201
202
202
203 precommit hook can prevent commit
203 precommit hook can prevent commit
204
204
205 $ echo "precommit.forbid = printenv.py precommit.forbid 1" >> .hg/hgrc
205 $ echo "precommit.forbid = printenv.py precommit.forbid 1" >> .hg/hgrc
206 $ hg commit -m 'fail' -d '4 0'
206 $ hg commit -m 'fail' -d '4 0'
207 precommit hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
207 precommit hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
208 precommit.forbid hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
208 precommit.forbid hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
209 abort: precommit.forbid hook exited with status 1
209 abort: precommit.forbid hook exited with status 1
210 [255]
210 [255]
211 $ hg -q tip
211 $ hg -q tip
212 4:539e4b31b6dc
212 4:539e4b31b6dc
213
213
214 preupdate hook can prevent update
214 preupdate hook can prevent update
215
215
216 $ echo "preupdate = printenv.py preupdate" >> .hg/hgrc
216 $ echo "preupdate = printenv.py preupdate" >> .hg/hgrc
217 $ hg update 1
217 $ hg update 1
218 preupdate hook: HG_PARENT1=ab228980c14d
218 preupdate hook: HG_PARENT1=ab228980c14d
219 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
219 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
220
220
221 update hook
221 update hook
222
222
223 $ echo "update = printenv.py update" >> .hg/hgrc
223 $ echo "update = printenv.py update" >> .hg/hgrc
224 $ hg update
224 $ hg update
225 preupdate hook: HG_PARENT1=539e4b31b6dc
225 preupdate hook: HG_PARENT1=539e4b31b6dc
226 update hook: HG_ERROR=0 HG_PARENT1=539e4b31b6dc
226 update hook: HG_ERROR=0 HG_PARENT1=539e4b31b6dc
227 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
227 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
228
228
229 pushkey hook
229 pushkey hook
230
230
231 $ echo "pushkey = printenv.py pushkey" >> .hg/hgrc
231 $ echo "pushkey = printenv.py pushkey" >> .hg/hgrc
232 $ cd ../b
232 $ cd ../b
233 $ hg bookmark -r null foo
233 $ hg bookmark -r null foo
234 $ hg push -B foo ../a
234 $ hg push -B foo ../a
235 pushing to ../a
235 pushing to ../a
236 searching for changes
236 searching for changes
237 no changes found
237 no changes found
238 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=push (glob)
238 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=push (glob)
239 pretxnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_PENDING=$TESTTMP/a HG_SOURCE=push HG_TXNID=TXN:* HG_TXNNAME=push HG_URL=push (glob)
239 pretxnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_PENDING=$TESTTMP/a HG_SOURCE=push HG_TXNID=TXN:* HG_TXNNAME=push HG_URL=file:$TESTTMP/a (glob)
240 pushkey hook: HG_KEY=foo HG_NAMESPACE=bookmarks HG_NEW=0000000000000000000000000000000000000000 HG_RET=1
240 pushkey hook: HG_KEY=foo HG_NAMESPACE=bookmarks HG_NEW=0000000000000000000000000000000000000000 HG_RET=1
241 txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_SOURCE=push HG_TXNID=TXN:* HG_TXNNAME=push HG_URL=push (glob)
241 txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_SOURCE=push HG_TXNID=TXN:* HG_TXNNAME=push HG_URL=file:$TESTTMP/a (glob)
242 exporting bookmark foo
242 exporting bookmark foo
243 [1]
243 [1]
244 $ cd ../a
244 $ cd ../a
245
245
246 listkeys hook
246 listkeys hook
247
247
248 $ echo "listkeys = printenv.py listkeys" >> .hg/hgrc
248 $ echo "listkeys = printenv.py listkeys" >> .hg/hgrc
249 $ hg bookmark -r null bar
249 $ hg bookmark -r null bar
250 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
250 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
251 pretxnclose hook: HG_BOOKMARK_MOVED=1 HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
251 pretxnclose hook: HG_BOOKMARK_MOVED=1 HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
252 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
252 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
253 $ cd ../b
253 $ cd ../b
254 $ hg pull -B bar ../a
254 $ hg pull -B bar ../a
255 pulling from ../a
255 pulling from ../a
256 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
256 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
257 no changes found
257 no changes found
258 listkeys hook: HG_NAMESPACE=phases HG_VALUES={'cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b': '1', 'publishing': 'True'}
258 listkeys hook: HG_NAMESPACE=phases HG_VALUES={'cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b': '1', 'publishing': 'True'}
259 adding remote bookmark bar
259 adding remote bookmark bar
260 $ cd ../a
260 $ cd ../a
261
261
262 test that prepushkey can prevent incoming keys
262 test that prepushkey can prevent incoming keys
263
263
264 $ echo "prepushkey = printenv.py prepushkey.forbid 1" >> .hg/hgrc
264 $ echo "prepushkey = printenv.py prepushkey.forbid 1" >> .hg/hgrc
265 $ cd ../b
265 $ cd ../b
266 $ hg bookmark -r null baz
266 $ hg bookmark -r null baz
267 $ hg push -B baz ../a
267 $ hg push -B baz ../a
268 pushing to ../a
268 pushing to ../a
269 searching for changes
269 searching for changes
270 listkeys hook: HG_NAMESPACE=phases HG_VALUES={'cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b': '1', 'publishing': 'True'}
270 listkeys hook: HG_NAMESPACE=phases HG_VALUES={'cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b': '1', 'publishing': 'True'}
271 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
271 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
272 no changes found
272 no changes found
273 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=push (glob)
273 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=push (glob)
274 prepushkey.forbid hook: HG_BUNDLE2=1 HG_KEY=baz HG_NAMESPACE=bookmarks HG_NEW=0000000000000000000000000000000000000000 HG_SOURCE=push HG_TXNID=TXN:* HG_URL=push (glob)
274 prepushkey.forbid hook: HG_BUNDLE2=1 HG_KEY=baz HG_NAMESPACE=bookmarks HG_NEW=0000000000000000000000000000000000000000 HG_SOURCE=push HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
275 pushkey-abort: prepushkey hook exited with status 1
275 pushkey-abort: prepushkey hook exited with status 1
276 abort: exporting bookmark baz failed!
276 abort: exporting bookmark baz failed!
277 [255]
277 [255]
278 $ cd ../a
278 $ cd ../a
279
279
280 test that prelistkeys can prevent listing keys
280 test that prelistkeys can prevent listing keys
281
281
282 $ echo "prelistkeys = printenv.py prelistkeys.forbid 1" >> .hg/hgrc
282 $ echo "prelistkeys = printenv.py prelistkeys.forbid 1" >> .hg/hgrc
283 $ hg bookmark -r null quux
283 $ hg bookmark -r null quux
284 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
284 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
285 pretxnclose hook: HG_BOOKMARK_MOVED=1 HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
285 pretxnclose hook: HG_BOOKMARK_MOVED=1 HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
286 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
286 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
287 $ cd ../b
287 $ cd ../b
288 $ hg pull -B quux ../a
288 $ hg pull -B quux ../a
289 pulling from ../a
289 pulling from ../a
290 prelistkeys.forbid hook: HG_NAMESPACE=bookmarks
290 prelistkeys.forbid hook: HG_NAMESPACE=bookmarks
291 abort: prelistkeys hook exited with status 1
291 abort: prelistkeys hook exited with status 1
292 [255]
292 [255]
293 $ cd ../a
293 $ cd ../a
294 $ rm .hg/hgrc
294 $ rm .hg/hgrc
295
295
296 prechangegroup hook can prevent incoming changes
296 prechangegroup hook can prevent incoming changes
297
297
298 $ cd ../b
298 $ cd ../b
299 $ hg -q tip
299 $ hg -q tip
300 3:07f3376c1e65
300 3:07f3376c1e65
301 $ cat > .hg/hgrc <<EOF
301 $ cat > .hg/hgrc <<EOF
302 > [hooks]
302 > [hooks]
303 > prechangegroup.forbid = printenv.py prechangegroup.forbid 1
303 > prechangegroup.forbid = printenv.py prechangegroup.forbid 1
304 > EOF
304 > EOF
305 $ hg pull ../a
305 $ hg pull ../a
306 pulling from ../a
306 pulling from ../a
307 searching for changes
307 searching for changes
308 prechangegroup.forbid hook: HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
308 prechangegroup.forbid hook: HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
309 abort: prechangegroup.forbid hook exited with status 1
309 abort: prechangegroup.forbid hook exited with status 1
310 [255]
310 [255]
311
311
312 pretxnchangegroup hook can see incoming changes, can roll back txn,
312 pretxnchangegroup hook can see incoming changes, can roll back txn,
313 incoming changes no longer there after
313 incoming changes no longer there after
314
314
315 $ cat > .hg/hgrc <<EOF
315 $ cat > .hg/hgrc <<EOF
316 > [hooks]
316 > [hooks]
317 > pretxnchangegroup.forbid0 = hg tip -q
317 > pretxnchangegroup.forbid0 = hg tip -q
318 > pretxnchangegroup.forbid1 = printenv.py pretxnchangegroup.forbid 1
318 > pretxnchangegroup.forbid1 = printenv.py pretxnchangegroup.forbid 1
319 > EOF
319 > EOF
320 $ hg pull ../a
320 $ hg pull ../a
321 pulling from ../a
321 pulling from ../a
322 searching for changes
322 searching for changes
323 adding changesets
323 adding changesets
324 adding manifests
324 adding manifests
325 adding file changes
325 adding file changes
326 added 1 changesets with 1 changes to 1 files
326 added 1 changesets with 1 changes to 1 files
327 4:539e4b31b6dc
327 4:539e4b31b6dc
328 pretxnchangegroup.forbid hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_NODE_LAST=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/b HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
328 pretxnchangegroup.forbid hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_NODE_LAST=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/b HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
329 transaction abort!
329 transaction abort!
330 rollback completed
330 rollback completed
331 abort: pretxnchangegroup.forbid1 hook exited with status 1
331 abort: pretxnchangegroup.forbid1 hook exited with status 1
332 [255]
332 [255]
333 $ hg -q tip
333 $ hg -q tip
334 3:07f3376c1e65
334 3:07f3376c1e65
335
335
336 outgoing hooks can see env vars
336 outgoing hooks can see env vars
337
337
338 $ rm .hg/hgrc
338 $ rm .hg/hgrc
339 $ cat > ../a/.hg/hgrc <<EOF
339 $ cat > ../a/.hg/hgrc <<EOF
340 > [hooks]
340 > [hooks]
341 > preoutgoing = printenv.py preoutgoing
341 > preoutgoing = printenv.py preoutgoing
342 > outgoing = printenv.py outgoing
342 > outgoing = printenv.py outgoing
343 > EOF
343 > EOF
344 $ hg pull ../a
344 $ hg pull ../a
345 pulling from ../a
345 pulling from ../a
346 searching for changes
346 searching for changes
347 preoutgoing hook: HG_SOURCE=pull
347 preoutgoing hook: HG_SOURCE=pull
348 outgoing hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_SOURCE=pull
348 outgoing hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_SOURCE=pull
349 adding changesets
349 adding changesets
350 adding manifests
350 adding manifests
351 adding file changes
351 adding file changes
352 added 1 changesets with 1 changes to 1 files
352 added 1 changesets with 1 changes to 1 files
353 adding remote bookmark quux
353 adding remote bookmark quux
354 (run 'hg update' to get a working copy)
354 (run 'hg update' to get a working copy)
355 $ hg rollback
355 $ hg rollback
356 repository tip rolled back to revision 3 (undo pull)
356 repository tip rolled back to revision 3 (undo pull)
357
357
358 preoutgoing hook can prevent outgoing changes
358 preoutgoing hook can prevent outgoing changes
359
359
360 $ echo "preoutgoing.forbid = printenv.py preoutgoing.forbid 1" >> ../a/.hg/hgrc
360 $ echo "preoutgoing.forbid = printenv.py preoutgoing.forbid 1" >> ../a/.hg/hgrc
361 $ hg pull ../a
361 $ hg pull ../a
362 pulling from ../a
362 pulling from ../a
363 searching for changes
363 searching for changes
364 preoutgoing hook: HG_SOURCE=pull
364 preoutgoing hook: HG_SOURCE=pull
365 preoutgoing.forbid hook: HG_SOURCE=pull
365 preoutgoing.forbid hook: HG_SOURCE=pull
366 abort: preoutgoing.forbid hook exited with status 1
366 abort: preoutgoing.forbid hook exited with status 1
367 [255]
367 [255]
368
368
369 outgoing hooks work for local clones
369 outgoing hooks work for local clones
370
370
371 $ cd ..
371 $ cd ..
372 $ cat > a/.hg/hgrc <<EOF
372 $ cat > a/.hg/hgrc <<EOF
373 > [hooks]
373 > [hooks]
374 > preoutgoing = printenv.py preoutgoing
374 > preoutgoing = printenv.py preoutgoing
375 > outgoing = printenv.py outgoing
375 > outgoing = printenv.py outgoing
376 > EOF
376 > EOF
377 $ hg clone a c
377 $ hg clone a c
378 preoutgoing hook: HG_SOURCE=clone
378 preoutgoing hook: HG_SOURCE=clone
379 outgoing hook: HG_NODE=0000000000000000000000000000000000000000 HG_SOURCE=clone
379 outgoing hook: HG_NODE=0000000000000000000000000000000000000000 HG_SOURCE=clone
380 updating to branch default
380 updating to branch default
381 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
381 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
382 $ rm -rf c
382 $ rm -rf c
383
383
384 preoutgoing hook can prevent outgoing changes for local clones
384 preoutgoing hook can prevent outgoing changes for local clones
385
385
386 $ echo "preoutgoing.forbid = printenv.py preoutgoing.forbid 1" >> a/.hg/hgrc
386 $ echo "preoutgoing.forbid = printenv.py preoutgoing.forbid 1" >> a/.hg/hgrc
387 $ hg clone a zzz
387 $ hg clone a zzz
388 preoutgoing hook: HG_SOURCE=clone
388 preoutgoing hook: HG_SOURCE=clone
389 preoutgoing.forbid hook: HG_SOURCE=clone
389 preoutgoing.forbid hook: HG_SOURCE=clone
390 abort: preoutgoing.forbid hook exited with status 1
390 abort: preoutgoing.forbid hook exited with status 1
391 [255]
391 [255]
392
392
393 $ cd "$TESTTMP/b"
393 $ cd "$TESTTMP/b"
394
394
395 $ cat > hooktests.py <<EOF
395 $ cat > hooktests.py <<EOF
396 > from mercurial import error
396 > from mercurial import error
397 >
397 >
398 > uncallable = 0
398 > uncallable = 0
399 >
399 >
400 > def printargs(args):
400 > def printargs(args):
401 > args.pop('ui', None)
401 > args.pop('ui', None)
402 > args.pop('repo', None)
402 > args.pop('repo', None)
403 > a = list(args.items())
403 > a = list(args.items())
404 > a.sort()
404 > a.sort()
405 > print 'hook args:'
405 > print 'hook args:'
406 > for k, v in a:
406 > for k, v in a:
407 > print ' ', k, v
407 > print ' ', k, v
408 >
408 >
409 > def passhook(**args):
409 > def passhook(**args):
410 > printargs(args)
410 > printargs(args)
411 >
411 >
412 > def failhook(**args):
412 > def failhook(**args):
413 > printargs(args)
413 > printargs(args)
414 > return True
414 > return True
415 >
415 >
416 > class LocalException(Exception):
416 > class LocalException(Exception):
417 > pass
417 > pass
418 >
418 >
419 > def raisehook(**args):
419 > def raisehook(**args):
420 > raise LocalException('exception from hook')
420 > raise LocalException('exception from hook')
421 >
421 >
422 > def aborthook(**args):
422 > def aborthook(**args):
423 > raise error.Abort('raise abort from hook')
423 > raise error.Abort('raise abort from hook')
424 >
424 >
425 > def brokenhook(**args):
425 > def brokenhook(**args):
426 > return 1 + {}
426 > return 1 + {}
427 >
427 >
428 > def verbosehook(ui, **args):
428 > def verbosehook(ui, **args):
429 > ui.note('verbose output from hook\n')
429 > ui.note('verbose output from hook\n')
430 >
430 >
431 > def printtags(ui, repo, **args):
431 > def printtags(ui, repo, **args):
432 > print sorted(repo.tags())
432 > print sorted(repo.tags())
433 >
433 >
434 > class container:
434 > class container:
435 > unreachable = 1
435 > unreachable = 1
436 > EOF
436 > EOF
437
437
438 $ cat > syntaxerror.py << EOF
438 $ cat > syntaxerror.py << EOF
439 > (foo
439 > (foo
440 > EOF
440 > EOF
441
441
442 test python hooks
442 test python hooks
443
443
444 #if windows
444 #if windows
445 $ PYTHONPATH="$TESTTMP/b;$PYTHONPATH"
445 $ PYTHONPATH="$TESTTMP/b;$PYTHONPATH"
446 #else
446 #else
447 $ PYTHONPATH="$TESTTMP/b:$PYTHONPATH"
447 $ PYTHONPATH="$TESTTMP/b:$PYTHONPATH"
448 #endif
448 #endif
449 $ export PYTHONPATH
449 $ export PYTHONPATH
450
450
451 $ echo '[hooks]' > ../a/.hg/hgrc
451 $ echo '[hooks]' > ../a/.hg/hgrc
452 $ echo 'preoutgoing.broken = python:hooktests.brokenhook' >> ../a/.hg/hgrc
452 $ echo 'preoutgoing.broken = python:hooktests.brokenhook' >> ../a/.hg/hgrc
453 $ hg pull ../a 2>&1 | grep 'raised an exception'
453 $ hg pull ../a 2>&1 | grep 'raised an exception'
454 error: preoutgoing.broken hook raised an exception: unsupported operand type(s) for +: 'int' and 'dict'
454 error: preoutgoing.broken hook raised an exception: unsupported operand type(s) for +: 'int' and 'dict'
455
455
456 $ echo '[hooks]' > ../a/.hg/hgrc
456 $ echo '[hooks]' > ../a/.hg/hgrc
457 $ echo 'preoutgoing.raise = python:hooktests.raisehook' >> ../a/.hg/hgrc
457 $ echo 'preoutgoing.raise = python:hooktests.raisehook' >> ../a/.hg/hgrc
458 $ hg pull ../a 2>&1 | grep 'raised an exception'
458 $ hg pull ../a 2>&1 | grep 'raised an exception'
459 error: preoutgoing.raise hook raised an exception: exception from hook
459 error: preoutgoing.raise hook raised an exception: exception from hook
460
460
461 $ echo '[hooks]' > ../a/.hg/hgrc
461 $ echo '[hooks]' > ../a/.hg/hgrc
462 $ echo 'preoutgoing.abort = python:hooktests.aborthook' >> ../a/.hg/hgrc
462 $ echo 'preoutgoing.abort = python:hooktests.aborthook' >> ../a/.hg/hgrc
463 $ hg pull ../a
463 $ hg pull ../a
464 pulling from ../a
464 pulling from ../a
465 searching for changes
465 searching for changes
466 error: preoutgoing.abort hook failed: raise abort from hook
466 error: preoutgoing.abort hook failed: raise abort from hook
467 abort: raise abort from hook
467 abort: raise abort from hook
468 [255]
468 [255]
469
469
470 $ echo '[hooks]' > ../a/.hg/hgrc
470 $ echo '[hooks]' > ../a/.hg/hgrc
471 $ echo 'preoutgoing.fail = python:hooktests.failhook' >> ../a/.hg/hgrc
471 $ echo 'preoutgoing.fail = python:hooktests.failhook' >> ../a/.hg/hgrc
472 $ hg pull ../a
472 $ hg pull ../a
473 pulling from ../a
473 pulling from ../a
474 searching for changes
474 searching for changes
475 hook args:
475 hook args:
476 hooktype preoutgoing
476 hooktype preoutgoing
477 source pull
477 source pull
478 abort: preoutgoing.fail hook failed
478 abort: preoutgoing.fail hook failed
479 [255]
479 [255]
480
480
481 $ echo '[hooks]' > ../a/.hg/hgrc
481 $ echo '[hooks]' > ../a/.hg/hgrc
482 $ echo 'preoutgoing.uncallable = python:hooktests.uncallable' >> ../a/.hg/hgrc
482 $ echo 'preoutgoing.uncallable = python:hooktests.uncallable' >> ../a/.hg/hgrc
483 $ hg pull ../a
483 $ hg pull ../a
484 pulling from ../a
484 pulling from ../a
485 searching for changes
485 searching for changes
486 abort: preoutgoing.uncallable hook is invalid: "hooktests.uncallable" is not callable
486 abort: preoutgoing.uncallable hook is invalid: "hooktests.uncallable" is not callable
487 [255]
487 [255]
488
488
489 $ echo '[hooks]' > ../a/.hg/hgrc
489 $ echo '[hooks]' > ../a/.hg/hgrc
490 $ echo 'preoutgoing.nohook = python:hooktests.nohook' >> ../a/.hg/hgrc
490 $ echo 'preoutgoing.nohook = python:hooktests.nohook' >> ../a/.hg/hgrc
491 $ hg pull ../a
491 $ hg pull ../a
492 pulling from ../a
492 pulling from ../a
493 searching for changes
493 searching for changes
494 abort: preoutgoing.nohook hook is invalid: "hooktests.nohook" is not defined
494 abort: preoutgoing.nohook hook is invalid: "hooktests.nohook" is not defined
495 [255]
495 [255]
496
496
497 $ echo '[hooks]' > ../a/.hg/hgrc
497 $ echo '[hooks]' > ../a/.hg/hgrc
498 $ echo 'preoutgoing.nomodule = python:nomodule' >> ../a/.hg/hgrc
498 $ echo 'preoutgoing.nomodule = python:nomodule' >> ../a/.hg/hgrc
499 $ hg pull ../a
499 $ hg pull ../a
500 pulling from ../a
500 pulling from ../a
501 searching for changes
501 searching for changes
502 abort: preoutgoing.nomodule hook is invalid: "nomodule" not in a module
502 abort: preoutgoing.nomodule hook is invalid: "nomodule" not in a module
503 [255]
503 [255]
504
504
505 $ echo '[hooks]' > ../a/.hg/hgrc
505 $ echo '[hooks]' > ../a/.hg/hgrc
506 $ echo 'preoutgoing.badmodule = python:nomodule.nowhere' >> ../a/.hg/hgrc
506 $ echo 'preoutgoing.badmodule = python:nomodule.nowhere' >> ../a/.hg/hgrc
507 $ hg pull ../a
507 $ hg pull ../a
508 pulling from ../a
508 pulling from ../a
509 searching for changes
509 searching for changes
510 abort: preoutgoing.badmodule hook is invalid: import of "nomodule" failed
510 abort: preoutgoing.badmodule hook is invalid: import of "nomodule" failed
511 (run with --traceback for stack trace)
511 (run with --traceback for stack trace)
512 [255]
512 [255]
513
513
514 $ echo '[hooks]' > ../a/.hg/hgrc
514 $ echo '[hooks]' > ../a/.hg/hgrc
515 $ echo 'preoutgoing.unreachable = python:hooktests.container.unreachable' >> ../a/.hg/hgrc
515 $ echo 'preoutgoing.unreachable = python:hooktests.container.unreachable' >> ../a/.hg/hgrc
516 $ hg pull ../a
516 $ hg pull ../a
517 pulling from ../a
517 pulling from ../a
518 searching for changes
518 searching for changes
519 abort: preoutgoing.unreachable hook is invalid: import of "hooktests.container" failed
519 abort: preoutgoing.unreachable hook is invalid: import of "hooktests.container" failed
520 (run with --traceback for stack trace)
520 (run with --traceback for stack trace)
521 [255]
521 [255]
522
522
523 $ echo '[hooks]' > ../a/.hg/hgrc
523 $ echo '[hooks]' > ../a/.hg/hgrc
524 $ echo 'preoutgoing.syntaxerror = python:syntaxerror.syntaxerror' >> ../a/.hg/hgrc
524 $ echo 'preoutgoing.syntaxerror = python:syntaxerror.syntaxerror' >> ../a/.hg/hgrc
525 $ hg pull ../a
525 $ hg pull ../a
526 pulling from ../a
526 pulling from ../a
527 searching for changes
527 searching for changes
528 abort: preoutgoing.syntaxerror hook is invalid: import of "syntaxerror" failed
528 abort: preoutgoing.syntaxerror hook is invalid: import of "syntaxerror" failed
529 (run with --traceback for stack trace)
529 (run with --traceback for stack trace)
530 [255]
530 [255]
531
531
532 The second egrep is to filter out lines like ' ^', which are slightly
532 The second egrep is to filter out lines like ' ^', which are slightly
533 different between Python 2.6 and Python 2.7.
533 different between Python 2.6 and Python 2.7.
534 $ hg pull ../a --traceback 2>&1 | egrep -v '^( +File| [_a-zA-Z*(])' | egrep -v '^( )+(\^)?$'
534 $ hg pull ../a --traceback 2>&1 | egrep -v '^( +File| [_a-zA-Z*(])' | egrep -v '^( )+(\^)?$'
535 pulling from ../a
535 pulling from ../a
536 searching for changes
536 searching for changes
537 exception from first failed import attempt:
537 exception from first failed import attempt:
538 Traceback (most recent call last):
538 Traceback (most recent call last):
539 SyntaxError: * (glob)
539 SyntaxError: * (glob)
540 exception from second failed import attempt:
540 exception from second failed import attempt:
541 Traceback (most recent call last):
541 Traceback (most recent call last):
542 ImportError: No module named hgext_syntaxerror
542 ImportError: No module named hgext_syntaxerror
543 Traceback (most recent call last):
543 Traceback (most recent call last):
544 HookLoadError: preoutgoing.syntaxerror hook is invalid: import of "syntaxerror" failed
544 HookLoadError: preoutgoing.syntaxerror hook is invalid: import of "syntaxerror" failed
545 abort: preoutgoing.syntaxerror hook is invalid: import of "syntaxerror" failed
545 abort: preoutgoing.syntaxerror hook is invalid: import of "syntaxerror" failed
546
546
547 $ echo '[hooks]' > ../a/.hg/hgrc
547 $ echo '[hooks]' > ../a/.hg/hgrc
548 $ echo 'preoutgoing.pass = python:hooktests.passhook' >> ../a/.hg/hgrc
548 $ echo 'preoutgoing.pass = python:hooktests.passhook' >> ../a/.hg/hgrc
549 $ hg pull ../a
549 $ hg pull ../a
550 pulling from ../a
550 pulling from ../a
551 searching for changes
551 searching for changes
552 hook args:
552 hook args:
553 hooktype preoutgoing
553 hooktype preoutgoing
554 source pull
554 source pull
555 adding changesets
555 adding changesets
556 adding manifests
556 adding manifests
557 adding file changes
557 adding file changes
558 added 1 changesets with 1 changes to 1 files
558 added 1 changesets with 1 changes to 1 files
559 adding remote bookmark quux
559 adding remote bookmark quux
560 (run 'hg update' to get a working copy)
560 (run 'hg update' to get a working copy)
561
561
562 post- python hooks that fail to *run* don't cause an abort
562 post- python hooks that fail to *run* don't cause an abort
563 $ rm ../a/.hg/hgrc
563 $ rm ../a/.hg/hgrc
564 $ echo '[hooks]' > .hg/hgrc
564 $ echo '[hooks]' > .hg/hgrc
565 $ echo 'post-pull.broken = python:hooktests.brokenhook' >> .hg/hgrc
565 $ echo 'post-pull.broken = python:hooktests.brokenhook' >> .hg/hgrc
566 $ hg pull ../a
566 $ hg pull ../a
567 pulling from ../a
567 pulling from ../a
568 searching for changes
568 searching for changes
569 no changes found
569 no changes found
570 error: post-pull.broken hook raised an exception: unsupported operand type(s) for +: 'int' and 'dict'
570 error: post-pull.broken hook raised an exception: unsupported operand type(s) for +: 'int' and 'dict'
571 (run with --traceback for stack trace)
571 (run with --traceback for stack trace)
572
572
573 but post- python hooks that fail to *load* do
573 but post- python hooks that fail to *load* do
574 $ echo '[hooks]' > .hg/hgrc
574 $ echo '[hooks]' > .hg/hgrc
575 $ echo 'post-pull.nomodule = python:nomodule' >> .hg/hgrc
575 $ echo 'post-pull.nomodule = python:nomodule' >> .hg/hgrc
576 $ hg pull ../a
576 $ hg pull ../a
577 pulling from ../a
577 pulling from ../a
578 searching for changes
578 searching for changes
579 no changes found
579 no changes found
580 abort: post-pull.nomodule hook is invalid: "nomodule" not in a module
580 abort: post-pull.nomodule hook is invalid: "nomodule" not in a module
581 [255]
581 [255]
582
582
583 $ echo '[hooks]' > .hg/hgrc
583 $ echo '[hooks]' > .hg/hgrc
584 $ echo 'post-pull.badmodule = python:nomodule.nowhere' >> .hg/hgrc
584 $ echo 'post-pull.badmodule = python:nomodule.nowhere' >> .hg/hgrc
585 $ hg pull ../a
585 $ hg pull ../a
586 pulling from ../a
586 pulling from ../a
587 searching for changes
587 searching for changes
588 no changes found
588 no changes found
589 abort: post-pull.badmodule hook is invalid: import of "nomodule" failed
589 abort: post-pull.badmodule hook is invalid: import of "nomodule" failed
590 (run with --traceback for stack trace)
590 (run with --traceback for stack trace)
591 [255]
591 [255]
592
592
593 $ echo '[hooks]' > .hg/hgrc
593 $ echo '[hooks]' > .hg/hgrc
594 $ echo 'post-pull.nohook = python:hooktests.nohook' >> .hg/hgrc
594 $ echo 'post-pull.nohook = python:hooktests.nohook' >> .hg/hgrc
595 $ hg pull ../a
595 $ hg pull ../a
596 pulling from ../a
596 pulling from ../a
597 searching for changes
597 searching for changes
598 no changes found
598 no changes found
599 abort: post-pull.nohook hook is invalid: "hooktests.nohook" is not defined
599 abort: post-pull.nohook hook is invalid: "hooktests.nohook" is not defined
600 [255]
600 [255]
601
601
602 make sure --traceback works
602 make sure --traceback works
603
603
604 $ echo '[hooks]' > .hg/hgrc
604 $ echo '[hooks]' > .hg/hgrc
605 $ echo 'commit.abort = python:hooktests.aborthook' >> .hg/hgrc
605 $ echo 'commit.abort = python:hooktests.aborthook' >> .hg/hgrc
606
606
607 $ echo aa > a
607 $ echo aa > a
608 $ hg --traceback commit -d '0 0' -ma 2>&1 | grep '^Traceback'
608 $ hg --traceback commit -d '0 0' -ma 2>&1 | grep '^Traceback'
609 Traceback (most recent call last):
609 Traceback (most recent call last):
610
610
611 $ cd ..
611 $ cd ..
612 $ hg init c
612 $ hg init c
613 $ cd c
613 $ cd c
614
614
615 $ cat > hookext.py <<EOF
615 $ cat > hookext.py <<EOF
616 > def autohook(**args):
616 > def autohook(**args):
617 > print "Automatically installed hook"
617 > print "Automatically installed hook"
618 >
618 >
619 > def reposetup(ui, repo):
619 > def reposetup(ui, repo):
620 > repo.ui.setconfig("hooks", "commit.auto", autohook)
620 > repo.ui.setconfig("hooks", "commit.auto", autohook)
621 > EOF
621 > EOF
622 $ echo '[extensions]' >> .hg/hgrc
622 $ echo '[extensions]' >> .hg/hgrc
623 $ echo 'hookext = hookext.py' >> .hg/hgrc
623 $ echo 'hookext = hookext.py' >> .hg/hgrc
624
624
625 $ touch foo
625 $ touch foo
626 $ hg add foo
626 $ hg add foo
627 $ hg ci -d '0 0' -m 'add foo'
627 $ hg ci -d '0 0' -m 'add foo'
628 Automatically installed hook
628 Automatically installed hook
629 $ echo >> foo
629 $ echo >> foo
630 $ hg ci --debug -d '0 0' -m 'change foo'
630 $ hg ci --debug -d '0 0' -m 'change foo'
631 committing files:
631 committing files:
632 foo
632 foo
633 committing manifest
633 committing manifest
634 committing changelog
634 committing changelog
635 committed changeset 1:52998019f6252a2b893452765fcb0a47351a5708
635 committed changeset 1:52998019f6252a2b893452765fcb0a47351a5708
636 calling hook commit.auto: hgext_hookext.autohook
636 calling hook commit.auto: hgext_hookext.autohook
637 Automatically installed hook
637 Automatically installed hook
638
638
639 $ hg showconfig hooks
639 $ hg showconfig hooks
640 hooks.commit.auto=<function autohook at *> (glob)
640 hooks.commit.auto=<function autohook at *> (glob)
641
641
642 test python hook configured with python:[file]:[hook] syntax
642 test python hook configured with python:[file]:[hook] syntax
643
643
644 $ cd ..
644 $ cd ..
645 $ mkdir d
645 $ mkdir d
646 $ cd d
646 $ cd d
647 $ hg init repo
647 $ hg init repo
648 $ mkdir hooks
648 $ mkdir hooks
649
649
650 $ cd hooks
650 $ cd hooks
651 $ cat > testhooks.py <<EOF
651 $ cat > testhooks.py <<EOF
652 > def testhook(**args):
652 > def testhook(**args):
653 > print 'hook works'
653 > print 'hook works'
654 > EOF
654 > EOF
655 $ echo '[hooks]' > ../repo/.hg/hgrc
655 $ echo '[hooks]' > ../repo/.hg/hgrc
656 $ echo "pre-commit.test = python:`pwd`/testhooks.py:testhook" >> ../repo/.hg/hgrc
656 $ echo "pre-commit.test = python:`pwd`/testhooks.py:testhook" >> ../repo/.hg/hgrc
657
657
658 $ cd ../repo
658 $ cd ../repo
659 $ hg commit -d '0 0'
659 $ hg commit -d '0 0'
660 hook works
660 hook works
661 nothing changed
661 nothing changed
662 [1]
662 [1]
663
663
664 $ echo '[hooks]' > .hg/hgrc
664 $ echo '[hooks]' > .hg/hgrc
665 $ echo "update.ne = python:`pwd`/nonexistent.py:testhook" >> .hg/hgrc
665 $ echo "update.ne = python:`pwd`/nonexistent.py:testhook" >> .hg/hgrc
666 $ echo "pre-identify.npmd = python:`pwd`/:no_python_module_dir" >> .hg/hgrc
666 $ echo "pre-identify.npmd = python:`pwd`/:no_python_module_dir" >> .hg/hgrc
667
667
668 $ hg up null
668 $ hg up null
669 loading update.ne hook failed:
669 loading update.ne hook failed:
670 abort: No such file or directory: $TESTTMP/d/repo/nonexistent.py
670 abort: No such file or directory: $TESTTMP/d/repo/nonexistent.py
671 [255]
671 [255]
672
672
673 $ hg id
673 $ hg id
674 loading pre-identify.npmd hook failed:
674 loading pre-identify.npmd hook failed:
675 abort: No module named repo!
675 abort: No module named repo!
676 [255]
676 [255]
677
677
678 $ cd ../../b
678 $ cd ../../b
679
679
680 make sure --traceback works on hook import failure
680 make sure --traceback works on hook import failure
681
681
682 $ cat > importfail.py <<EOF
682 $ cat > importfail.py <<EOF
683 > import somebogusmodule
683 > import somebogusmodule
684 > # dereference something in the module to force demandimport to load it
684 > # dereference something in the module to force demandimport to load it
685 > somebogusmodule.whatever
685 > somebogusmodule.whatever
686 > EOF
686 > EOF
687
687
688 $ echo '[hooks]' > .hg/hgrc
688 $ echo '[hooks]' > .hg/hgrc
689 $ echo 'precommit.importfail = python:importfail.whatever' >> .hg/hgrc
689 $ echo 'precommit.importfail = python:importfail.whatever' >> .hg/hgrc
690
690
691 $ echo a >> a
691 $ echo a >> a
692 $ hg --traceback commit -ma 2>&1 | egrep -v '^( +File| [a-zA-Z(])'
692 $ hg --traceback commit -ma 2>&1 | egrep -v '^( +File| [a-zA-Z(])'
693 exception from first failed import attempt:
693 exception from first failed import attempt:
694 Traceback (most recent call last):
694 Traceback (most recent call last):
695 ImportError: No module named somebogusmodule
695 ImportError: No module named somebogusmodule
696 exception from second failed import attempt:
696 exception from second failed import attempt:
697 Traceback (most recent call last):
697 Traceback (most recent call last):
698 ImportError: No module named hgext_importfail
698 ImportError: No module named hgext_importfail
699 Traceback (most recent call last):
699 Traceback (most recent call last):
700 HookLoadError: precommit.importfail hook is invalid: import of "importfail" failed
700 HookLoadError: precommit.importfail hook is invalid: import of "importfail" failed
701 abort: precommit.importfail hook is invalid: import of "importfail" failed
701 abort: precommit.importfail hook is invalid: import of "importfail" failed
702
702
703 Issue1827: Hooks Update & Commit not completely post operation
703 Issue1827: Hooks Update & Commit not completely post operation
704
704
705 commit and update hooks should run after command completion. The largefiles
705 commit and update hooks should run after command completion. The largefiles
706 use demonstrates a recursive wlock, showing the hook doesn't run until the
706 use demonstrates a recursive wlock, showing the hook doesn't run until the
707 final release (and dirstate flush).
707 final release (and dirstate flush).
708
708
709 $ echo '[hooks]' > .hg/hgrc
709 $ echo '[hooks]' > .hg/hgrc
710 $ echo 'commit = hg id' >> .hg/hgrc
710 $ echo 'commit = hg id' >> .hg/hgrc
711 $ echo 'update = hg id' >> .hg/hgrc
711 $ echo 'update = hg id' >> .hg/hgrc
712 $ echo bb > a
712 $ echo bb > a
713 $ hg ci -ma
713 $ hg ci -ma
714 223eafe2750c tip
714 223eafe2750c tip
715 $ hg up 0 --config extensions.largefiles=
715 $ hg up 0 --config extensions.largefiles=
716 cb9a9f314b8b
716 cb9a9f314b8b
717 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
717 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
718
718
719 make sure --verbose (and --quiet/--debug etc.) are propagated to the local ui
719 make sure --verbose (and --quiet/--debug etc.) are propagated to the local ui
720 that is passed to pre/post hooks
720 that is passed to pre/post hooks
721
721
722 $ echo '[hooks]' > .hg/hgrc
722 $ echo '[hooks]' > .hg/hgrc
723 $ echo 'pre-identify = python:hooktests.verbosehook' >> .hg/hgrc
723 $ echo 'pre-identify = python:hooktests.verbosehook' >> .hg/hgrc
724 $ hg id
724 $ hg id
725 cb9a9f314b8b
725 cb9a9f314b8b
726 $ hg id --verbose
726 $ hg id --verbose
727 calling hook pre-identify: hooktests.verbosehook
727 calling hook pre-identify: hooktests.verbosehook
728 verbose output from hook
728 verbose output from hook
729 cb9a9f314b8b
729 cb9a9f314b8b
730
730
731 Ensure hooks can be prioritized
731 Ensure hooks can be prioritized
732
732
733 $ echo '[hooks]' > .hg/hgrc
733 $ echo '[hooks]' > .hg/hgrc
734 $ echo 'pre-identify.a = python:hooktests.verbosehook' >> .hg/hgrc
734 $ echo 'pre-identify.a = python:hooktests.verbosehook' >> .hg/hgrc
735 $ echo 'pre-identify.b = python:hooktests.verbosehook' >> .hg/hgrc
735 $ echo 'pre-identify.b = python:hooktests.verbosehook' >> .hg/hgrc
736 $ echo 'priority.pre-identify.b = 1' >> .hg/hgrc
736 $ echo 'priority.pre-identify.b = 1' >> .hg/hgrc
737 $ echo 'pre-identify.c = python:hooktests.verbosehook' >> .hg/hgrc
737 $ echo 'pre-identify.c = python:hooktests.verbosehook' >> .hg/hgrc
738 $ hg id --verbose
738 $ hg id --verbose
739 calling hook pre-identify.b: hooktests.verbosehook
739 calling hook pre-identify.b: hooktests.verbosehook
740 verbose output from hook
740 verbose output from hook
741 calling hook pre-identify.a: hooktests.verbosehook
741 calling hook pre-identify.a: hooktests.verbosehook
742 verbose output from hook
742 verbose output from hook
743 calling hook pre-identify.c: hooktests.verbosehook
743 calling hook pre-identify.c: hooktests.verbosehook
744 verbose output from hook
744 verbose output from hook
745 cb9a9f314b8b
745 cb9a9f314b8b
746
746
747 new tags must be visible in pretxncommit (issue3210)
747 new tags must be visible in pretxncommit (issue3210)
748
748
749 $ echo 'pretxncommit.printtags = python:hooktests.printtags' >> .hg/hgrc
749 $ echo 'pretxncommit.printtags = python:hooktests.printtags' >> .hg/hgrc
750 $ hg tag -f foo
750 $ hg tag -f foo
751 ['a', 'foo', 'tip']
751 ['a', 'foo', 'tip']
752
752
753 post-init hooks must not crash (issue4983)
753 post-init hooks must not crash (issue4983)
754 This also creates the `to` repo for the next test block.
754 This also creates the `to` repo for the next test block.
755
755
756 $ cd ..
756 $ cd ..
757 $ cat << EOF >> hgrc-with-post-init-hook
757 $ cat << EOF >> hgrc-with-post-init-hook
758 > [hooks]
758 > [hooks]
759 > post-init = printenv.py post-init
759 > post-init = printenv.py post-init
760 > EOF
760 > EOF
761 $ HGRCPATH=hgrc-with-post-init-hook hg init to
761 $ HGRCPATH=hgrc-with-post-init-hook hg init to
762 post-init hook: HG_ARGS=init to HG_OPTS={'insecure': None, 'remotecmd': '', 'ssh': ''} HG_PATS=['to'] HG_RESULT=0
762 post-init hook: HG_ARGS=init to HG_OPTS={'insecure': None, 'remotecmd': '', 'ssh': ''} HG_PATS=['to'] HG_RESULT=0
763
763
764 new commits must be visible in pretxnchangegroup (issue3428)
764 new commits must be visible in pretxnchangegroup (issue3428)
765
765
766 $ echo '[hooks]' >> to/.hg/hgrc
766 $ echo '[hooks]' >> to/.hg/hgrc
767 $ echo 'prechangegroup = hg --traceback tip' >> to/.hg/hgrc
767 $ echo 'prechangegroup = hg --traceback tip' >> to/.hg/hgrc
768 $ echo 'pretxnchangegroup = hg --traceback tip' >> to/.hg/hgrc
768 $ echo 'pretxnchangegroup = hg --traceback tip' >> to/.hg/hgrc
769 $ echo a >> to/a
769 $ echo a >> to/a
770 $ hg --cwd to ci -Ama
770 $ hg --cwd to ci -Ama
771 adding a
771 adding a
772 $ hg clone to from
772 $ hg clone to from
773 updating to branch default
773 updating to branch default
774 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
774 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
775 $ echo aa >> from/a
775 $ echo aa >> from/a
776 $ hg --cwd from ci -mb
776 $ hg --cwd from ci -mb
777 $ hg --cwd from push
777 $ hg --cwd from push
778 pushing to $TESTTMP/to (glob)
778 pushing to $TESTTMP/to (glob)
779 searching for changes
779 searching for changes
780 changeset: 0:cb9a9f314b8b
780 changeset: 0:cb9a9f314b8b
781 tag: tip
781 tag: tip
782 user: test
782 user: test
783 date: Thu Jan 01 00:00:00 1970 +0000
783 date: Thu Jan 01 00:00:00 1970 +0000
784 summary: a
784 summary: a
785
785
786 adding changesets
786 adding changesets
787 adding manifests
787 adding manifests
788 adding file changes
788 adding file changes
789 added 1 changesets with 1 changes to 1 files
789 added 1 changesets with 1 changes to 1 files
790 changeset: 1:9836a07b9b9d
790 changeset: 1:9836a07b9b9d
791 tag: tip
791 tag: tip
792 user: test
792 user: test
793 date: Thu Jan 01 00:00:00 1970 +0000
793 date: Thu Jan 01 00:00:00 1970 +0000
794 summary: b
794 summary: b
795
795
796
796
797 pretxnclose hook failure should abort the transaction
797 pretxnclose hook failure should abort the transaction
798
798
799 $ hg init txnfailure
799 $ hg init txnfailure
800 $ cd txnfailure
800 $ cd txnfailure
801 $ touch a && hg commit -Aqm a
801 $ touch a && hg commit -Aqm a
802 $ cat >> .hg/hgrc <<EOF
802 $ cat >> .hg/hgrc <<EOF
803 > [hooks]
803 > [hooks]
804 > pretxnclose.error = exit 1
804 > pretxnclose.error = exit 1
805 > EOF
805 > EOF
806 $ hg strip -r 0 --config extensions.strip=
806 $ hg strip -r 0 --config extensions.strip=
807 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
807 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
808 saved backup bundle to * (glob)
808 saved backup bundle to * (glob)
809 transaction abort!
809 transaction abort!
810 rollback completed
810 rollback completed
811 strip failed, full bundle stored in * (glob)
811 strip failed, full bundle stored in * (glob)
812 abort: pretxnclose.error hook exited with status 1
812 abort: pretxnclose.error hook exited with status 1
813 [255]
813 [255]
814 $ hg recover
814 $ hg recover
815 no interrupted transaction available
815 no interrupted transaction available
816 [1]
816 [1]
817 $ cd ..
817 $ cd ..
818
818
819 Hook from untrusted hgrc are reported as failure
819 Hook from untrusted hgrc are reported as failure
820 ================================================
820 ================================================
821
821
822 $ cat << EOF > $TESTTMP/untrusted.py
822 $ cat << EOF > $TESTTMP/untrusted.py
823 > from mercurial import scmutil, util
823 > from mercurial import scmutil, util
824 > def uisetup(ui):
824 > def uisetup(ui):
825 > class untrustedui(ui.__class__):
825 > class untrustedui(ui.__class__):
826 > def _trusted(self, fp, f):
826 > def _trusted(self, fp, f):
827 > if util.normpath(fp.name).endswith('untrusted/.hg/hgrc'):
827 > if util.normpath(fp.name).endswith('untrusted/.hg/hgrc'):
828 > return False
828 > return False
829 > return super(untrustedui, self)._trusted(fp, f)
829 > return super(untrustedui, self)._trusted(fp, f)
830 > ui.__class__ = untrustedui
830 > ui.__class__ = untrustedui
831 > EOF
831 > EOF
832 $ cat << EOF >> $HGRCPATH
832 $ cat << EOF >> $HGRCPATH
833 > [extensions]
833 > [extensions]
834 > untrusted=$TESTTMP/untrusted.py
834 > untrusted=$TESTTMP/untrusted.py
835 > EOF
835 > EOF
836 $ hg init untrusted
836 $ hg init untrusted
837 $ cd untrusted
837 $ cd untrusted
838
838
839 Non-blocking hook
839 Non-blocking hook
840 -----------------
840 -----------------
841
841
842 $ cat << EOF >> .hg/hgrc
842 $ cat << EOF >> .hg/hgrc
843 > [hooks]
843 > [hooks]
844 > txnclose.testing=echo txnclose hook called
844 > txnclose.testing=echo txnclose hook called
845 > EOF
845 > EOF
846 $ touch a && hg commit -Aqm a
846 $ touch a && hg commit -Aqm a
847 warning: untrusted hook txnclose not executed
847 warning: untrusted hook txnclose not executed
848 $ hg log
848 $ hg log
849 changeset: 0:3903775176ed
849 changeset: 0:3903775176ed
850 tag: tip
850 tag: tip
851 user: test
851 user: test
852 date: Thu Jan 01 00:00:00 1970 +0000
852 date: Thu Jan 01 00:00:00 1970 +0000
853 summary: a
853 summary: a
854
854
855
855
856 Non-blocking hook
856 Non-blocking hook
857 -----------------
857 -----------------
858
858
859 $ cat << EOF >> .hg/hgrc
859 $ cat << EOF >> .hg/hgrc
860 > [hooks]
860 > [hooks]
861 > pretxnclose.testing=echo pre-txnclose hook called
861 > pretxnclose.testing=echo pre-txnclose hook called
862 > EOF
862 > EOF
863 $ touch b && hg commit -Aqm a
863 $ touch b && hg commit -Aqm a
864 transaction abort!
864 transaction abort!
865 rollback completed
865 rollback completed
866 abort: untrusted hook pretxnclose not executed
866 abort: untrusted hook pretxnclose not executed
867 (see 'hg help config.trusted')
867 (see 'hg help config.trusted')
868 [255]
868 [255]
869 $ hg log
869 $ hg log
870 changeset: 0:3903775176ed
870 changeset: 0:3903775176ed
871 tag: tip
871 tag: tip
872 user: test
872 user: test
873 date: Thu Jan 01 00:00:00 1970 +0000
873 date: Thu Jan 01 00:00:00 1970 +0000
874 summary: a
874 summary: a
875
875
General Comments 0
You need to be logged in to leave comments. Login now