##// END OF EJS Templates
py3: convert kwargs keys' back to bytes using pycompat.byteskwargs()
Pulkit Goyal -
r33016:4e6dc34b default
parent child Browse files
Show More
@@ -1,2011 +1,2012 b''
1 # exchange.py - utility to exchange data between repos.
1 # exchange.py - utility to exchange data between repos.
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import hashlib
11 import hashlib
12
12
13 from .i18n import _
13 from .i18n import _
14 from .node import (
14 from .node import (
15 hex,
15 hex,
16 nullid,
16 nullid,
17 )
17 )
18 from . import (
18 from . import (
19 bookmarks as bookmod,
19 bookmarks as bookmod,
20 bundle2,
20 bundle2,
21 changegroup,
21 changegroup,
22 discovery,
22 discovery,
23 error,
23 error,
24 lock as lockmod,
24 lock as lockmod,
25 obsolete,
25 obsolete,
26 phases,
26 phases,
27 pushkey,
27 pushkey,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 sslutil,
30 sslutil,
31 streamclone,
31 streamclone,
32 url as urlmod,
32 url as urlmod,
33 util,
33 util,
34 )
34 )
35
35
36 urlerr = util.urlerr
36 urlerr = util.urlerr
37 urlreq = util.urlreq
37 urlreq = util.urlreq
38
38
39 # Maps bundle version human names to changegroup versions.
39 # Maps bundle version human names to changegroup versions.
40 _bundlespeccgversions = {'v1': '01',
40 _bundlespeccgversions = {'v1': '01',
41 'v2': '02',
41 'v2': '02',
42 'packed1': 's1',
42 'packed1': 's1',
43 'bundle2': '02', #legacy
43 'bundle2': '02', #legacy
44 }
44 }
45
45
46 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
46 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
47 _bundlespecv1compengines = {'gzip', 'bzip2', 'none'}
47 _bundlespecv1compengines = {'gzip', 'bzip2', 'none'}
48
48
49 def parsebundlespec(repo, spec, strict=True, externalnames=False):
49 def parsebundlespec(repo, spec, strict=True, externalnames=False):
50 """Parse a bundle string specification into parts.
50 """Parse a bundle string specification into parts.
51
51
52 Bundle specifications denote a well-defined bundle/exchange format.
52 Bundle specifications denote a well-defined bundle/exchange format.
53 The content of a given specification should not change over time in
53 The content of a given specification should not change over time in
54 order to ensure that bundles produced by a newer version of Mercurial are
54 order to ensure that bundles produced by a newer version of Mercurial are
55 readable from an older version.
55 readable from an older version.
56
56
57 The string currently has the form:
57 The string currently has the form:
58
58
59 <compression>-<type>[;<parameter0>[;<parameter1>]]
59 <compression>-<type>[;<parameter0>[;<parameter1>]]
60
60
61 Where <compression> is one of the supported compression formats
61 Where <compression> is one of the supported compression formats
62 and <type> is (currently) a version string. A ";" can follow the type and
62 and <type> is (currently) a version string. A ";" can follow the type and
63 all text afterwards is interpreted as URI encoded, ";" delimited key=value
63 all text afterwards is interpreted as URI encoded, ";" delimited key=value
64 pairs.
64 pairs.
65
65
66 If ``strict`` is True (the default) <compression> is required. Otherwise,
66 If ``strict`` is True (the default) <compression> is required. Otherwise,
67 it is optional.
67 it is optional.
68
68
69 If ``externalnames`` is False (the default), the human-centric names will
69 If ``externalnames`` is False (the default), the human-centric names will
70 be converted to their internal representation.
70 be converted to their internal representation.
71
71
72 Returns a 3-tuple of (compression, version, parameters). Compression will
72 Returns a 3-tuple of (compression, version, parameters). Compression will
73 be ``None`` if not in strict mode and a compression isn't defined.
73 be ``None`` if not in strict mode and a compression isn't defined.
74
74
75 An ``InvalidBundleSpecification`` is raised when the specification is
75 An ``InvalidBundleSpecification`` is raised when the specification is
76 not syntactically well formed.
76 not syntactically well formed.
77
77
78 An ``UnsupportedBundleSpecification`` is raised when the compression or
78 An ``UnsupportedBundleSpecification`` is raised when the compression or
79 bundle type/version is not recognized.
79 bundle type/version is not recognized.
80
80
81 Note: this function will likely eventually return a more complex data
81 Note: this function will likely eventually return a more complex data
82 structure, including bundle2 part information.
82 structure, including bundle2 part information.
83 """
83 """
84 def parseparams(s):
84 def parseparams(s):
85 if ';' not in s:
85 if ';' not in s:
86 return s, {}
86 return s, {}
87
87
88 params = {}
88 params = {}
89 version, paramstr = s.split(';', 1)
89 version, paramstr = s.split(';', 1)
90
90
91 for p in paramstr.split(';'):
91 for p in paramstr.split(';'):
92 if '=' not in p:
92 if '=' not in p:
93 raise error.InvalidBundleSpecification(
93 raise error.InvalidBundleSpecification(
94 _('invalid bundle specification: '
94 _('invalid bundle specification: '
95 'missing "=" in parameter: %s') % p)
95 'missing "=" in parameter: %s') % p)
96
96
97 key, value = p.split('=', 1)
97 key, value = p.split('=', 1)
98 key = urlreq.unquote(key)
98 key = urlreq.unquote(key)
99 value = urlreq.unquote(value)
99 value = urlreq.unquote(value)
100 params[key] = value
100 params[key] = value
101
101
102 return version, params
102 return version, params
103
103
104
104
105 if strict and '-' not in spec:
105 if strict and '-' not in spec:
106 raise error.InvalidBundleSpecification(
106 raise error.InvalidBundleSpecification(
107 _('invalid bundle specification; '
107 _('invalid bundle specification; '
108 'must be prefixed with compression: %s') % spec)
108 'must be prefixed with compression: %s') % spec)
109
109
110 if '-' in spec:
110 if '-' in spec:
111 compression, version = spec.split('-', 1)
111 compression, version = spec.split('-', 1)
112
112
113 if compression not in util.compengines.supportedbundlenames:
113 if compression not in util.compengines.supportedbundlenames:
114 raise error.UnsupportedBundleSpecification(
114 raise error.UnsupportedBundleSpecification(
115 _('%s compression is not supported') % compression)
115 _('%s compression is not supported') % compression)
116
116
117 version, params = parseparams(version)
117 version, params = parseparams(version)
118
118
119 if version not in _bundlespeccgversions:
119 if version not in _bundlespeccgversions:
120 raise error.UnsupportedBundleSpecification(
120 raise error.UnsupportedBundleSpecification(
121 _('%s is not a recognized bundle version') % version)
121 _('%s is not a recognized bundle version') % version)
122 else:
122 else:
123 # Value could be just the compression or just the version, in which
123 # Value could be just the compression or just the version, in which
124 # case some defaults are assumed (but only when not in strict mode).
124 # case some defaults are assumed (but only when not in strict mode).
125 assert not strict
125 assert not strict
126
126
127 spec, params = parseparams(spec)
127 spec, params = parseparams(spec)
128
128
129 if spec in util.compengines.supportedbundlenames:
129 if spec in util.compengines.supportedbundlenames:
130 compression = spec
130 compression = spec
131 version = 'v1'
131 version = 'v1'
132 # Generaldelta repos require v2.
132 # Generaldelta repos require v2.
133 if 'generaldelta' in repo.requirements:
133 if 'generaldelta' in repo.requirements:
134 version = 'v2'
134 version = 'v2'
135 # Modern compression engines require v2.
135 # Modern compression engines require v2.
136 if compression not in _bundlespecv1compengines:
136 if compression not in _bundlespecv1compengines:
137 version = 'v2'
137 version = 'v2'
138 elif spec in _bundlespeccgversions:
138 elif spec in _bundlespeccgversions:
139 if spec == 'packed1':
139 if spec == 'packed1':
140 compression = 'none'
140 compression = 'none'
141 else:
141 else:
142 compression = 'bzip2'
142 compression = 'bzip2'
143 version = spec
143 version = spec
144 else:
144 else:
145 raise error.UnsupportedBundleSpecification(
145 raise error.UnsupportedBundleSpecification(
146 _('%s is not a recognized bundle specification') % spec)
146 _('%s is not a recognized bundle specification') % spec)
147
147
148 # Bundle version 1 only supports a known set of compression engines.
148 # Bundle version 1 only supports a known set of compression engines.
149 if version == 'v1' and compression not in _bundlespecv1compengines:
149 if version == 'v1' and compression not in _bundlespecv1compengines:
150 raise error.UnsupportedBundleSpecification(
150 raise error.UnsupportedBundleSpecification(
151 _('compression engine %s is not supported on v1 bundles') %
151 _('compression engine %s is not supported on v1 bundles') %
152 compression)
152 compression)
153
153
154 # The specification for packed1 can optionally declare the data formats
154 # The specification for packed1 can optionally declare the data formats
155 # required to apply it. If we see this metadata, compare against what the
155 # required to apply it. If we see this metadata, compare against what the
156 # repo supports and error if the bundle isn't compatible.
156 # repo supports and error if the bundle isn't compatible.
157 if version == 'packed1' and 'requirements' in params:
157 if version == 'packed1' and 'requirements' in params:
158 requirements = set(params['requirements'].split(','))
158 requirements = set(params['requirements'].split(','))
159 missingreqs = requirements - repo.supportedformats
159 missingreqs = requirements - repo.supportedformats
160 if missingreqs:
160 if missingreqs:
161 raise error.UnsupportedBundleSpecification(
161 raise error.UnsupportedBundleSpecification(
162 _('missing support for repository features: %s') %
162 _('missing support for repository features: %s') %
163 ', '.join(sorted(missingreqs)))
163 ', '.join(sorted(missingreqs)))
164
164
165 if not externalnames:
165 if not externalnames:
166 engine = util.compengines.forbundlename(compression)
166 engine = util.compengines.forbundlename(compression)
167 compression = engine.bundletype()[1]
167 compression = engine.bundletype()[1]
168 version = _bundlespeccgversions[version]
168 version = _bundlespeccgversions[version]
169 return compression, version, params
169 return compression, version, params
170
170
171 def readbundle(ui, fh, fname, vfs=None):
171 def readbundle(ui, fh, fname, vfs=None):
172 header = changegroup.readexactly(fh, 4)
172 header = changegroup.readexactly(fh, 4)
173
173
174 alg = None
174 alg = None
175 if not fname:
175 if not fname:
176 fname = "stream"
176 fname = "stream"
177 if not header.startswith('HG') and header.startswith('\0'):
177 if not header.startswith('HG') and header.startswith('\0'):
178 fh = changegroup.headerlessfixup(fh, header)
178 fh = changegroup.headerlessfixup(fh, header)
179 header = "HG10"
179 header = "HG10"
180 alg = 'UN'
180 alg = 'UN'
181 elif vfs:
181 elif vfs:
182 fname = vfs.join(fname)
182 fname = vfs.join(fname)
183
183
184 magic, version = header[0:2], header[2:4]
184 magic, version = header[0:2], header[2:4]
185
185
186 if magic != 'HG':
186 if magic != 'HG':
187 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
187 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
188 if version == '10':
188 if version == '10':
189 if alg is None:
189 if alg is None:
190 alg = changegroup.readexactly(fh, 2)
190 alg = changegroup.readexactly(fh, 2)
191 return changegroup.cg1unpacker(fh, alg)
191 return changegroup.cg1unpacker(fh, alg)
192 elif version.startswith('2'):
192 elif version.startswith('2'):
193 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
193 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
194 elif version == 'S1':
194 elif version == 'S1':
195 return streamclone.streamcloneapplier(fh)
195 return streamclone.streamcloneapplier(fh)
196 else:
196 else:
197 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
197 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
198
198
199 def getbundlespec(ui, fh):
199 def getbundlespec(ui, fh):
200 """Infer the bundlespec from a bundle file handle.
200 """Infer the bundlespec from a bundle file handle.
201
201
202 The input file handle is seeked and the original seek position is not
202 The input file handle is seeked and the original seek position is not
203 restored.
203 restored.
204 """
204 """
205 def speccompression(alg):
205 def speccompression(alg):
206 try:
206 try:
207 return util.compengines.forbundletype(alg).bundletype()[0]
207 return util.compengines.forbundletype(alg).bundletype()[0]
208 except KeyError:
208 except KeyError:
209 return None
209 return None
210
210
211 b = readbundle(ui, fh, None)
211 b = readbundle(ui, fh, None)
212 if isinstance(b, changegroup.cg1unpacker):
212 if isinstance(b, changegroup.cg1unpacker):
213 alg = b._type
213 alg = b._type
214 if alg == '_truncatedBZ':
214 if alg == '_truncatedBZ':
215 alg = 'BZ'
215 alg = 'BZ'
216 comp = speccompression(alg)
216 comp = speccompression(alg)
217 if not comp:
217 if not comp:
218 raise error.Abort(_('unknown compression algorithm: %s') % alg)
218 raise error.Abort(_('unknown compression algorithm: %s') % alg)
219 return '%s-v1' % comp
219 return '%s-v1' % comp
220 elif isinstance(b, bundle2.unbundle20):
220 elif isinstance(b, bundle2.unbundle20):
221 if 'Compression' in b.params:
221 if 'Compression' in b.params:
222 comp = speccompression(b.params['Compression'])
222 comp = speccompression(b.params['Compression'])
223 if not comp:
223 if not comp:
224 raise error.Abort(_('unknown compression algorithm: %s') % comp)
224 raise error.Abort(_('unknown compression algorithm: %s') % comp)
225 else:
225 else:
226 comp = 'none'
226 comp = 'none'
227
227
228 version = None
228 version = None
229 for part in b.iterparts():
229 for part in b.iterparts():
230 if part.type == 'changegroup':
230 if part.type == 'changegroup':
231 version = part.params['version']
231 version = part.params['version']
232 if version in ('01', '02'):
232 if version in ('01', '02'):
233 version = 'v2'
233 version = 'v2'
234 else:
234 else:
235 raise error.Abort(_('changegroup version %s does not have '
235 raise error.Abort(_('changegroup version %s does not have '
236 'a known bundlespec') % version,
236 'a known bundlespec') % version,
237 hint=_('try upgrading your Mercurial '
237 hint=_('try upgrading your Mercurial '
238 'client'))
238 'client'))
239
239
240 if not version:
240 if not version:
241 raise error.Abort(_('could not identify changegroup version in '
241 raise error.Abort(_('could not identify changegroup version in '
242 'bundle'))
242 'bundle'))
243
243
244 return '%s-%s' % (comp, version)
244 return '%s-%s' % (comp, version)
245 elif isinstance(b, streamclone.streamcloneapplier):
245 elif isinstance(b, streamclone.streamcloneapplier):
246 requirements = streamclone.readbundle1header(fh)[2]
246 requirements = streamclone.readbundle1header(fh)[2]
247 params = 'requirements=%s' % ','.join(sorted(requirements))
247 params = 'requirements=%s' % ','.join(sorted(requirements))
248 return 'none-packed1;%s' % urlreq.quote(params)
248 return 'none-packed1;%s' % urlreq.quote(params)
249 else:
249 else:
250 raise error.Abort(_('unknown bundle type: %s') % b)
250 raise error.Abort(_('unknown bundle type: %s') % b)
251
251
252 def _computeoutgoing(repo, heads, common):
252 def _computeoutgoing(repo, heads, common):
253 """Computes which revs are outgoing given a set of common
253 """Computes which revs are outgoing given a set of common
254 and a set of heads.
254 and a set of heads.
255
255
256 This is a separate function so extensions can have access to
256 This is a separate function so extensions can have access to
257 the logic.
257 the logic.
258
258
259 Returns a discovery.outgoing object.
259 Returns a discovery.outgoing object.
260 """
260 """
261 cl = repo.changelog
261 cl = repo.changelog
262 if common:
262 if common:
263 hasnode = cl.hasnode
263 hasnode = cl.hasnode
264 common = [n for n in common if hasnode(n)]
264 common = [n for n in common if hasnode(n)]
265 else:
265 else:
266 common = [nullid]
266 common = [nullid]
267 if not heads:
267 if not heads:
268 heads = cl.heads()
268 heads = cl.heads()
269 return discovery.outgoing(repo, common, heads)
269 return discovery.outgoing(repo, common, heads)
270
270
271 def _forcebundle1(op):
271 def _forcebundle1(op):
272 """return true if a pull/push must use bundle1
272 """return true if a pull/push must use bundle1
273
273
274 This function is used to allow testing of the older bundle version"""
274 This function is used to allow testing of the older bundle version"""
275 ui = op.repo.ui
275 ui = op.repo.ui
276 forcebundle1 = False
276 forcebundle1 = False
277 # The goal is this config is to allow developer to choose the bundle
277 # The goal is this config is to allow developer to choose the bundle
278 # version used during exchanged. This is especially handy during test.
278 # version used during exchanged. This is especially handy during test.
279 # Value is a list of bundle version to be picked from, highest version
279 # Value is a list of bundle version to be picked from, highest version
280 # should be used.
280 # should be used.
281 #
281 #
282 # developer config: devel.legacy.exchange
282 # developer config: devel.legacy.exchange
283 exchange = ui.configlist('devel', 'legacy.exchange')
283 exchange = ui.configlist('devel', 'legacy.exchange')
284 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
284 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
285 return forcebundle1 or not op.remote.capable('bundle2')
285 return forcebundle1 or not op.remote.capable('bundle2')
286
286
287 class pushoperation(object):
287 class pushoperation(object):
288 """A object that represent a single push operation
288 """A object that represent a single push operation
289
289
290 Its purpose is to carry push related state and very common operations.
290 Its purpose is to carry push related state and very common operations.
291
291
292 A new pushoperation should be created at the beginning of each push and
292 A new pushoperation should be created at the beginning of each push and
293 discarded afterward.
293 discarded afterward.
294 """
294 """
295
295
296 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
296 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
297 bookmarks=()):
297 bookmarks=()):
298 # repo we push from
298 # repo we push from
299 self.repo = repo
299 self.repo = repo
300 self.ui = repo.ui
300 self.ui = repo.ui
301 # repo we push to
301 # repo we push to
302 self.remote = remote
302 self.remote = remote
303 # force option provided
303 # force option provided
304 self.force = force
304 self.force = force
305 # revs to be pushed (None is "all")
305 # revs to be pushed (None is "all")
306 self.revs = revs
306 self.revs = revs
307 # bookmark explicitly pushed
307 # bookmark explicitly pushed
308 self.bookmarks = bookmarks
308 self.bookmarks = bookmarks
309 # allow push of new branch
309 # allow push of new branch
310 self.newbranch = newbranch
310 self.newbranch = newbranch
311 # did a local lock get acquired?
311 # did a local lock get acquired?
312 self.locallocked = None
312 self.locallocked = None
313 # step already performed
313 # step already performed
314 # (used to check what steps have been already performed through bundle2)
314 # (used to check what steps have been already performed through bundle2)
315 self.stepsdone = set()
315 self.stepsdone = set()
316 # Integer version of the changegroup push result
316 # Integer version of the changegroup push result
317 # - None means nothing to push
317 # - None means nothing to push
318 # - 0 means HTTP error
318 # - 0 means HTTP error
319 # - 1 means we pushed and remote head count is unchanged *or*
319 # - 1 means we pushed and remote head count is unchanged *or*
320 # we have outgoing changesets but refused to push
320 # we have outgoing changesets but refused to push
321 # - other values as described by addchangegroup()
321 # - other values as described by addchangegroup()
322 self.cgresult = None
322 self.cgresult = None
323 # Boolean value for the bookmark push
323 # Boolean value for the bookmark push
324 self.bkresult = None
324 self.bkresult = None
325 # discover.outgoing object (contains common and outgoing data)
325 # discover.outgoing object (contains common and outgoing data)
326 self.outgoing = None
326 self.outgoing = None
327 # all remote topological heads before the push
327 # all remote topological heads before the push
328 self.remoteheads = None
328 self.remoteheads = None
329 # Details of the remote branch pre and post push
329 # Details of the remote branch pre and post push
330 #
330 #
331 # mapping: {'branch': ([remoteheads],
331 # mapping: {'branch': ([remoteheads],
332 # [newheads],
332 # [newheads],
333 # [unsyncedheads],
333 # [unsyncedheads],
334 # [discardedheads])}
334 # [discardedheads])}
335 # - branch: the branch name
335 # - branch: the branch name
336 # - remoteheads: the list of remote heads known locally
336 # - remoteheads: the list of remote heads known locally
337 # None if the branch is new
337 # None if the branch is new
338 # - newheads: the new remote heads (known locally) with outgoing pushed
338 # - newheads: the new remote heads (known locally) with outgoing pushed
339 # - unsyncedheads: the list of remote heads unknown locally.
339 # - unsyncedheads: the list of remote heads unknown locally.
340 # - discardedheads: the list of remote heads made obsolete by the push
340 # - discardedheads: the list of remote heads made obsolete by the push
341 self.pushbranchmap = None
341 self.pushbranchmap = None
342 # testable as a boolean indicating if any nodes are missing locally.
342 # testable as a boolean indicating if any nodes are missing locally.
343 self.incoming = None
343 self.incoming = None
344 # phases changes that must be pushed along side the changesets
344 # phases changes that must be pushed along side the changesets
345 self.outdatedphases = None
345 self.outdatedphases = None
346 # phases changes that must be pushed if changeset push fails
346 # phases changes that must be pushed if changeset push fails
347 self.fallbackoutdatedphases = None
347 self.fallbackoutdatedphases = None
348 # outgoing obsmarkers
348 # outgoing obsmarkers
349 self.outobsmarkers = set()
349 self.outobsmarkers = set()
350 # outgoing bookmarks
350 # outgoing bookmarks
351 self.outbookmarks = []
351 self.outbookmarks = []
352 # transaction manager
352 # transaction manager
353 self.trmanager = None
353 self.trmanager = None
354 # map { pushkey partid -> callback handling failure}
354 # map { pushkey partid -> callback handling failure}
355 # used to handle exception from mandatory pushkey part failure
355 # used to handle exception from mandatory pushkey part failure
356 self.pkfailcb = {}
356 self.pkfailcb = {}
357
357
358 @util.propertycache
358 @util.propertycache
359 def futureheads(self):
359 def futureheads(self):
360 """future remote heads if the changeset push succeeds"""
360 """future remote heads if the changeset push succeeds"""
361 return self.outgoing.missingheads
361 return self.outgoing.missingheads
362
362
363 @util.propertycache
363 @util.propertycache
364 def fallbackheads(self):
364 def fallbackheads(self):
365 """future remote heads if the changeset push fails"""
365 """future remote heads if the changeset push fails"""
366 if self.revs is None:
366 if self.revs is None:
367 # not target to push, all common are relevant
367 # not target to push, all common are relevant
368 return self.outgoing.commonheads
368 return self.outgoing.commonheads
369 unfi = self.repo.unfiltered()
369 unfi = self.repo.unfiltered()
370 # I want cheads = heads(::missingheads and ::commonheads)
370 # I want cheads = heads(::missingheads and ::commonheads)
371 # (missingheads is revs with secret changeset filtered out)
371 # (missingheads is revs with secret changeset filtered out)
372 #
372 #
373 # This can be expressed as:
373 # This can be expressed as:
374 # cheads = ( (missingheads and ::commonheads)
374 # cheads = ( (missingheads and ::commonheads)
375 # + (commonheads and ::missingheads))"
375 # + (commonheads and ::missingheads))"
376 # )
376 # )
377 #
377 #
378 # while trying to push we already computed the following:
378 # while trying to push we already computed the following:
379 # common = (::commonheads)
379 # common = (::commonheads)
380 # missing = ((commonheads::missingheads) - commonheads)
380 # missing = ((commonheads::missingheads) - commonheads)
381 #
381 #
382 # We can pick:
382 # We can pick:
383 # * missingheads part of common (::commonheads)
383 # * missingheads part of common (::commonheads)
384 common = self.outgoing.common
384 common = self.outgoing.common
385 nm = self.repo.changelog.nodemap
385 nm = self.repo.changelog.nodemap
386 cheads = [node for node in self.revs if nm[node] in common]
386 cheads = [node for node in self.revs if nm[node] in common]
387 # and
387 # and
388 # * commonheads parents on missing
388 # * commonheads parents on missing
389 revset = unfi.set('%ln and parents(roots(%ln))',
389 revset = unfi.set('%ln and parents(roots(%ln))',
390 self.outgoing.commonheads,
390 self.outgoing.commonheads,
391 self.outgoing.missing)
391 self.outgoing.missing)
392 cheads.extend(c.node() for c in revset)
392 cheads.extend(c.node() for c in revset)
393 return cheads
393 return cheads
394
394
395 @property
395 @property
396 def commonheads(self):
396 def commonheads(self):
397 """set of all common heads after changeset bundle push"""
397 """set of all common heads after changeset bundle push"""
398 if self.cgresult:
398 if self.cgresult:
399 return self.futureheads
399 return self.futureheads
400 else:
400 else:
401 return self.fallbackheads
401 return self.fallbackheads
402
402
403 # mapping of message used when pushing bookmark
403 # mapping of message used when pushing bookmark
404 bookmsgmap = {'update': (_("updating bookmark %s\n"),
404 bookmsgmap = {'update': (_("updating bookmark %s\n"),
405 _('updating bookmark %s failed!\n')),
405 _('updating bookmark %s failed!\n')),
406 'export': (_("exporting bookmark %s\n"),
406 'export': (_("exporting bookmark %s\n"),
407 _('exporting bookmark %s failed!\n')),
407 _('exporting bookmark %s failed!\n')),
408 'delete': (_("deleting remote bookmark %s\n"),
408 'delete': (_("deleting remote bookmark %s\n"),
409 _('deleting remote bookmark %s failed!\n')),
409 _('deleting remote bookmark %s failed!\n')),
410 }
410 }
411
411
412
412
413 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
413 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
414 opargs=None):
414 opargs=None):
415 '''Push outgoing changesets (limited by revs) from a local
415 '''Push outgoing changesets (limited by revs) from a local
416 repository to remote. Return an integer:
416 repository to remote. Return an integer:
417 - None means nothing to push
417 - None means nothing to push
418 - 0 means HTTP error
418 - 0 means HTTP error
419 - 1 means we pushed and remote head count is unchanged *or*
419 - 1 means we pushed and remote head count is unchanged *or*
420 we have outgoing changesets but refused to push
420 we have outgoing changesets but refused to push
421 - other values as described by addchangegroup()
421 - other values as described by addchangegroup()
422 '''
422 '''
423 if opargs is None:
423 if opargs is None:
424 opargs = {}
424 opargs = {}
425 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
425 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
426 **opargs)
426 **opargs)
427 if pushop.remote.local():
427 if pushop.remote.local():
428 missing = (set(pushop.repo.requirements)
428 missing = (set(pushop.repo.requirements)
429 - pushop.remote.local().supported)
429 - pushop.remote.local().supported)
430 if missing:
430 if missing:
431 msg = _("required features are not"
431 msg = _("required features are not"
432 " supported in the destination:"
432 " supported in the destination:"
433 " %s") % (', '.join(sorted(missing)))
433 " %s") % (', '.join(sorted(missing)))
434 raise error.Abort(msg)
434 raise error.Abort(msg)
435
435
436 # there are two ways to push to remote repo:
436 # there are two ways to push to remote repo:
437 #
437 #
438 # addchangegroup assumes local user can lock remote
438 # addchangegroup assumes local user can lock remote
439 # repo (local filesystem, old ssh servers).
439 # repo (local filesystem, old ssh servers).
440 #
440 #
441 # unbundle assumes local user cannot lock remote repo (new ssh
441 # unbundle assumes local user cannot lock remote repo (new ssh
442 # servers, http servers).
442 # servers, http servers).
443
443
444 if not pushop.remote.canpush():
444 if not pushop.remote.canpush():
445 raise error.Abort(_("destination does not support push"))
445 raise error.Abort(_("destination does not support push"))
446 # get local lock as we might write phase data
446 # get local lock as we might write phase data
447 localwlock = locallock = None
447 localwlock = locallock = None
448 try:
448 try:
449 # bundle2 push may receive a reply bundle touching bookmarks or other
449 # bundle2 push may receive a reply bundle touching bookmarks or other
450 # things requiring the wlock. Take it now to ensure proper ordering.
450 # things requiring the wlock. Take it now to ensure proper ordering.
451 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
451 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
452 if (not _forcebundle1(pushop)) and maypushback:
452 if (not _forcebundle1(pushop)) and maypushback:
453 localwlock = pushop.repo.wlock()
453 localwlock = pushop.repo.wlock()
454 locallock = pushop.repo.lock()
454 locallock = pushop.repo.lock()
455 pushop.locallocked = True
455 pushop.locallocked = True
456 except IOError as err:
456 except IOError as err:
457 pushop.locallocked = False
457 pushop.locallocked = False
458 if err.errno != errno.EACCES:
458 if err.errno != errno.EACCES:
459 raise
459 raise
460 # source repo cannot be locked.
460 # source repo cannot be locked.
461 # We do not abort the push, but just disable the local phase
461 # We do not abort the push, but just disable the local phase
462 # synchronisation.
462 # synchronisation.
463 msg = 'cannot lock source repository: %s\n' % err
463 msg = 'cannot lock source repository: %s\n' % err
464 pushop.ui.debug(msg)
464 pushop.ui.debug(msg)
465 try:
465 try:
466 if pushop.locallocked:
466 if pushop.locallocked:
467 pushop.trmanager = transactionmanager(pushop.repo,
467 pushop.trmanager = transactionmanager(pushop.repo,
468 'push-response',
468 'push-response',
469 pushop.remote.url())
469 pushop.remote.url())
470 pushop.repo.checkpush(pushop)
470 pushop.repo.checkpush(pushop)
471 lock = None
471 lock = None
472 unbundle = pushop.remote.capable('unbundle')
472 unbundle = pushop.remote.capable('unbundle')
473 if not unbundle:
473 if not unbundle:
474 lock = pushop.remote.lock()
474 lock = pushop.remote.lock()
475 try:
475 try:
476 _pushdiscovery(pushop)
476 _pushdiscovery(pushop)
477 if not _forcebundle1(pushop):
477 if not _forcebundle1(pushop):
478 _pushbundle2(pushop)
478 _pushbundle2(pushop)
479 _pushchangeset(pushop)
479 _pushchangeset(pushop)
480 _pushsyncphase(pushop)
480 _pushsyncphase(pushop)
481 _pushobsolete(pushop)
481 _pushobsolete(pushop)
482 _pushbookmark(pushop)
482 _pushbookmark(pushop)
483 finally:
483 finally:
484 if lock is not None:
484 if lock is not None:
485 lock.release()
485 lock.release()
486 if pushop.trmanager:
486 if pushop.trmanager:
487 pushop.trmanager.close()
487 pushop.trmanager.close()
488 finally:
488 finally:
489 if pushop.trmanager:
489 if pushop.trmanager:
490 pushop.trmanager.release()
490 pushop.trmanager.release()
491 if locallock is not None:
491 if locallock is not None:
492 locallock.release()
492 locallock.release()
493 if localwlock is not None:
493 if localwlock is not None:
494 localwlock.release()
494 localwlock.release()
495
495
496 return pushop
496 return pushop
497
497
498 # list of steps to perform discovery before push
498 # list of steps to perform discovery before push
499 pushdiscoveryorder = []
499 pushdiscoveryorder = []
500
500
501 # Mapping between step name and function
501 # Mapping between step name and function
502 #
502 #
503 # This exists to help extensions wrap steps if necessary
503 # This exists to help extensions wrap steps if necessary
504 pushdiscoverymapping = {}
504 pushdiscoverymapping = {}
505
505
506 def pushdiscovery(stepname):
506 def pushdiscovery(stepname):
507 """decorator for function performing discovery before push
507 """decorator for function performing discovery before push
508
508
509 The function is added to the step -> function mapping and appended to the
509 The function is added to the step -> function mapping and appended to the
510 list of steps. Beware that decorated function will be added in order (this
510 list of steps. Beware that decorated function will be added in order (this
511 may matter).
511 may matter).
512
512
513 You can only use this decorator for a new step, if you want to wrap a step
513 You can only use this decorator for a new step, if you want to wrap a step
514 from an extension, change the pushdiscovery dictionary directly."""
514 from an extension, change the pushdiscovery dictionary directly."""
515 def dec(func):
515 def dec(func):
516 assert stepname not in pushdiscoverymapping
516 assert stepname not in pushdiscoverymapping
517 pushdiscoverymapping[stepname] = func
517 pushdiscoverymapping[stepname] = func
518 pushdiscoveryorder.append(stepname)
518 pushdiscoveryorder.append(stepname)
519 return func
519 return func
520 return dec
520 return dec
521
521
522 def _pushdiscovery(pushop):
522 def _pushdiscovery(pushop):
523 """Run all discovery steps"""
523 """Run all discovery steps"""
524 for stepname in pushdiscoveryorder:
524 for stepname in pushdiscoveryorder:
525 step = pushdiscoverymapping[stepname]
525 step = pushdiscoverymapping[stepname]
526 step(pushop)
526 step(pushop)
527
527
528 @pushdiscovery('changeset')
528 @pushdiscovery('changeset')
529 def _pushdiscoverychangeset(pushop):
529 def _pushdiscoverychangeset(pushop):
530 """discover the changeset that need to be pushed"""
530 """discover the changeset that need to be pushed"""
531 fci = discovery.findcommonincoming
531 fci = discovery.findcommonincoming
532 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
532 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
533 common, inc, remoteheads = commoninc
533 common, inc, remoteheads = commoninc
534 fco = discovery.findcommonoutgoing
534 fco = discovery.findcommonoutgoing
535 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
535 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
536 commoninc=commoninc, force=pushop.force)
536 commoninc=commoninc, force=pushop.force)
537 pushop.outgoing = outgoing
537 pushop.outgoing = outgoing
538 pushop.remoteheads = remoteheads
538 pushop.remoteheads = remoteheads
539 pushop.incoming = inc
539 pushop.incoming = inc
540
540
541 @pushdiscovery('phase')
541 @pushdiscovery('phase')
542 def _pushdiscoveryphase(pushop):
542 def _pushdiscoveryphase(pushop):
543 """discover the phase that needs to be pushed
543 """discover the phase that needs to be pushed
544
544
545 (computed for both success and failure case for changesets push)"""
545 (computed for both success and failure case for changesets push)"""
546 outgoing = pushop.outgoing
546 outgoing = pushop.outgoing
547 unfi = pushop.repo.unfiltered()
547 unfi = pushop.repo.unfiltered()
548 remotephases = pushop.remote.listkeys('phases')
548 remotephases = pushop.remote.listkeys('phases')
549 publishing = remotephases.get('publishing', False)
549 publishing = remotephases.get('publishing', False)
550 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
550 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
551 and remotephases # server supports phases
551 and remotephases # server supports phases
552 and not pushop.outgoing.missing # no changesets to be pushed
552 and not pushop.outgoing.missing # no changesets to be pushed
553 and publishing):
553 and publishing):
554 # When:
554 # When:
555 # - this is a subrepo push
555 # - this is a subrepo push
556 # - and remote support phase
556 # - and remote support phase
557 # - and no changeset are to be pushed
557 # - and no changeset are to be pushed
558 # - and remote is publishing
558 # - and remote is publishing
559 # We may be in issue 3871 case!
559 # We may be in issue 3871 case!
560 # We drop the possible phase synchronisation done by
560 # We drop the possible phase synchronisation done by
561 # courtesy to publish changesets possibly locally draft
561 # courtesy to publish changesets possibly locally draft
562 # on the remote.
562 # on the remote.
563 remotephases = {'publishing': 'True'}
563 remotephases = {'publishing': 'True'}
564 ana = phases.analyzeremotephases(pushop.repo,
564 ana = phases.analyzeremotephases(pushop.repo,
565 pushop.fallbackheads,
565 pushop.fallbackheads,
566 remotephases)
566 remotephases)
567 pheads, droots = ana
567 pheads, droots = ana
568 extracond = ''
568 extracond = ''
569 if not publishing:
569 if not publishing:
570 extracond = ' and public()'
570 extracond = ' and public()'
571 revset = 'heads((%%ln::%%ln) %s)' % extracond
571 revset = 'heads((%%ln::%%ln) %s)' % extracond
572 # Get the list of all revs draft on remote by public here.
572 # Get the list of all revs draft on remote by public here.
573 # XXX Beware that revset break if droots is not strictly
573 # XXX Beware that revset break if droots is not strictly
574 # XXX root we may want to ensure it is but it is costly
574 # XXX root we may want to ensure it is but it is costly
575 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
575 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
576 if not outgoing.missing:
576 if not outgoing.missing:
577 future = fallback
577 future = fallback
578 else:
578 else:
579 # adds changeset we are going to push as draft
579 # adds changeset we are going to push as draft
580 #
580 #
581 # should not be necessary for publishing server, but because of an
581 # should not be necessary for publishing server, but because of an
582 # issue fixed in xxxxx we have to do it anyway.
582 # issue fixed in xxxxx we have to do it anyway.
583 fdroots = list(unfi.set('roots(%ln + %ln::)',
583 fdroots = list(unfi.set('roots(%ln + %ln::)',
584 outgoing.missing, droots))
584 outgoing.missing, droots))
585 fdroots = [f.node() for f in fdroots]
585 fdroots = [f.node() for f in fdroots]
586 future = list(unfi.set(revset, fdroots, pushop.futureheads))
586 future = list(unfi.set(revset, fdroots, pushop.futureheads))
587 pushop.outdatedphases = future
587 pushop.outdatedphases = future
588 pushop.fallbackoutdatedphases = fallback
588 pushop.fallbackoutdatedphases = fallback
589
589
590 @pushdiscovery('obsmarker')
590 @pushdiscovery('obsmarker')
591 def _pushdiscoveryobsmarkers(pushop):
591 def _pushdiscoveryobsmarkers(pushop):
592 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
592 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
593 and pushop.repo.obsstore
593 and pushop.repo.obsstore
594 and 'obsolete' in pushop.remote.listkeys('namespaces')):
594 and 'obsolete' in pushop.remote.listkeys('namespaces')):
595 repo = pushop.repo
595 repo = pushop.repo
596 # very naive computation, that can be quite expensive on big repo.
596 # very naive computation, that can be quite expensive on big repo.
597 # However: evolution is currently slow on them anyway.
597 # However: evolution is currently slow on them anyway.
598 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
598 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
599 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
599 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
600
600
601 @pushdiscovery('bookmarks')
601 @pushdiscovery('bookmarks')
602 def _pushdiscoverybookmarks(pushop):
602 def _pushdiscoverybookmarks(pushop):
603 ui = pushop.ui
603 ui = pushop.ui
604 repo = pushop.repo.unfiltered()
604 repo = pushop.repo.unfiltered()
605 remote = pushop.remote
605 remote = pushop.remote
606 ui.debug("checking for updated bookmarks\n")
606 ui.debug("checking for updated bookmarks\n")
607 ancestors = ()
607 ancestors = ()
608 if pushop.revs:
608 if pushop.revs:
609 revnums = map(repo.changelog.rev, pushop.revs)
609 revnums = map(repo.changelog.rev, pushop.revs)
610 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
610 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
611 remotebookmark = remote.listkeys('bookmarks')
611 remotebookmark = remote.listkeys('bookmarks')
612
612
613 explicit = set([repo._bookmarks.expandname(bookmark)
613 explicit = set([repo._bookmarks.expandname(bookmark)
614 for bookmark in pushop.bookmarks])
614 for bookmark in pushop.bookmarks])
615
615
616 remotebookmark = bookmod.unhexlifybookmarks(remotebookmark)
616 remotebookmark = bookmod.unhexlifybookmarks(remotebookmark)
617 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
617 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
618
618
619 def safehex(x):
619 def safehex(x):
620 if x is None:
620 if x is None:
621 return x
621 return x
622 return hex(x)
622 return hex(x)
623
623
624 def hexifycompbookmarks(bookmarks):
624 def hexifycompbookmarks(bookmarks):
625 for b, scid, dcid in bookmarks:
625 for b, scid, dcid in bookmarks:
626 yield b, safehex(scid), safehex(dcid)
626 yield b, safehex(scid), safehex(dcid)
627
627
628 comp = [hexifycompbookmarks(marks) for marks in comp]
628 comp = [hexifycompbookmarks(marks) for marks in comp]
629 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
629 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
630
630
631 for b, scid, dcid in advsrc:
631 for b, scid, dcid in advsrc:
632 if b in explicit:
632 if b in explicit:
633 explicit.remove(b)
633 explicit.remove(b)
634 if not ancestors or repo[scid].rev() in ancestors:
634 if not ancestors or repo[scid].rev() in ancestors:
635 pushop.outbookmarks.append((b, dcid, scid))
635 pushop.outbookmarks.append((b, dcid, scid))
636 # search added bookmark
636 # search added bookmark
637 for b, scid, dcid in addsrc:
637 for b, scid, dcid in addsrc:
638 if b in explicit:
638 if b in explicit:
639 explicit.remove(b)
639 explicit.remove(b)
640 pushop.outbookmarks.append((b, '', scid))
640 pushop.outbookmarks.append((b, '', scid))
641 # search for overwritten bookmark
641 # search for overwritten bookmark
642 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
642 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
643 if b in explicit:
643 if b in explicit:
644 explicit.remove(b)
644 explicit.remove(b)
645 pushop.outbookmarks.append((b, dcid, scid))
645 pushop.outbookmarks.append((b, dcid, scid))
646 # search for bookmark to delete
646 # search for bookmark to delete
647 for b, scid, dcid in adddst:
647 for b, scid, dcid in adddst:
648 if b in explicit:
648 if b in explicit:
649 explicit.remove(b)
649 explicit.remove(b)
650 # treat as "deleted locally"
650 # treat as "deleted locally"
651 pushop.outbookmarks.append((b, dcid, ''))
651 pushop.outbookmarks.append((b, dcid, ''))
652 # identical bookmarks shouldn't get reported
652 # identical bookmarks shouldn't get reported
653 for b, scid, dcid in same:
653 for b, scid, dcid in same:
654 if b in explicit:
654 if b in explicit:
655 explicit.remove(b)
655 explicit.remove(b)
656
656
657 if explicit:
657 if explicit:
658 explicit = sorted(explicit)
658 explicit = sorted(explicit)
659 # we should probably list all of them
659 # we should probably list all of them
660 ui.warn(_('bookmark %s does not exist on the local '
660 ui.warn(_('bookmark %s does not exist on the local '
661 'or remote repository!\n') % explicit[0])
661 'or remote repository!\n') % explicit[0])
662 pushop.bkresult = 2
662 pushop.bkresult = 2
663
663
664 pushop.outbookmarks.sort()
664 pushop.outbookmarks.sort()
665
665
666 def _pushcheckoutgoing(pushop):
666 def _pushcheckoutgoing(pushop):
667 outgoing = pushop.outgoing
667 outgoing = pushop.outgoing
668 unfi = pushop.repo.unfiltered()
668 unfi = pushop.repo.unfiltered()
669 if not outgoing.missing:
669 if not outgoing.missing:
670 # nothing to push
670 # nothing to push
671 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
671 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
672 return False
672 return False
673 # something to push
673 # something to push
674 if not pushop.force:
674 if not pushop.force:
675 # if repo.obsstore == False --> no obsolete
675 # if repo.obsstore == False --> no obsolete
676 # then, save the iteration
676 # then, save the iteration
677 if unfi.obsstore:
677 if unfi.obsstore:
678 # this message are here for 80 char limit reason
678 # this message are here for 80 char limit reason
679 mso = _("push includes obsolete changeset: %s!")
679 mso = _("push includes obsolete changeset: %s!")
680 mst = {"unstable": _("push includes unstable changeset: %s!"),
680 mst = {"unstable": _("push includes unstable changeset: %s!"),
681 "bumped": _("push includes bumped changeset: %s!"),
681 "bumped": _("push includes bumped changeset: %s!"),
682 "divergent": _("push includes divergent changeset: %s!")}
682 "divergent": _("push includes divergent changeset: %s!")}
683 # If we are to push if there is at least one
683 # If we are to push if there is at least one
684 # obsolete or unstable changeset in missing, at
684 # obsolete or unstable changeset in missing, at
685 # least one of the missinghead will be obsolete or
685 # least one of the missinghead will be obsolete or
686 # unstable. So checking heads only is ok
686 # unstable. So checking heads only is ok
687 for node in outgoing.missingheads:
687 for node in outgoing.missingheads:
688 ctx = unfi[node]
688 ctx = unfi[node]
689 if ctx.obsolete():
689 if ctx.obsolete():
690 raise error.Abort(mso % ctx)
690 raise error.Abort(mso % ctx)
691 elif ctx.troubled():
691 elif ctx.troubled():
692 raise error.Abort(mst[ctx.troubles()[0]] % ctx)
692 raise error.Abort(mst[ctx.troubles()[0]] % ctx)
693
693
694 discovery.checkheads(pushop)
694 discovery.checkheads(pushop)
695 return True
695 return True
696
696
697 # List of names of steps to perform for an outgoing bundle2, order matters.
697 # List of names of steps to perform for an outgoing bundle2, order matters.
698 b2partsgenorder = []
698 b2partsgenorder = []
699
699
700 # Mapping between step name and function
700 # Mapping between step name and function
701 #
701 #
702 # This exists to help extensions wrap steps if necessary
702 # This exists to help extensions wrap steps if necessary
703 b2partsgenmapping = {}
703 b2partsgenmapping = {}
704
704
705 def b2partsgenerator(stepname, idx=None):
705 def b2partsgenerator(stepname, idx=None):
706 """decorator for function generating bundle2 part
706 """decorator for function generating bundle2 part
707
707
708 The function is added to the step -> function mapping and appended to the
708 The function is added to the step -> function mapping and appended to the
709 list of steps. Beware that decorated functions will be added in order
709 list of steps. Beware that decorated functions will be added in order
710 (this may matter).
710 (this may matter).
711
711
712 You can only use this decorator for new steps, if you want to wrap a step
712 You can only use this decorator for new steps, if you want to wrap a step
713 from an extension, attack the b2partsgenmapping dictionary directly."""
713 from an extension, attack the b2partsgenmapping dictionary directly."""
714 def dec(func):
714 def dec(func):
715 assert stepname not in b2partsgenmapping
715 assert stepname not in b2partsgenmapping
716 b2partsgenmapping[stepname] = func
716 b2partsgenmapping[stepname] = func
717 if idx is None:
717 if idx is None:
718 b2partsgenorder.append(stepname)
718 b2partsgenorder.append(stepname)
719 else:
719 else:
720 b2partsgenorder.insert(idx, stepname)
720 b2partsgenorder.insert(idx, stepname)
721 return func
721 return func
722 return dec
722 return dec
723
723
724 def _pushb2ctxcheckheads(pushop, bundler):
724 def _pushb2ctxcheckheads(pushop, bundler):
725 """Generate race condition checking parts
725 """Generate race condition checking parts
726
726
727 Exists as an independent function to aid extensions
727 Exists as an independent function to aid extensions
728 """
728 """
729 # * 'force' do not check for push race,
729 # * 'force' do not check for push race,
730 # * if we don't push anything, there are nothing to check.
730 # * if we don't push anything, there are nothing to check.
731 if not pushop.force and pushop.outgoing.missingheads:
731 if not pushop.force and pushop.outgoing.missingheads:
732 allowunrelated = 'related' in bundler.capabilities.get('checkheads', ())
732 allowunrelated = 'related' in bundler.capabilities.get('checkheads', ())
733 if not allowunrelated:
733 if not allowunrelated:
734 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
734 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
735 else:
735 else:
736 affected = set()
736 affected = set()
737 for branch, heads in pushop.pushbranchmap.iteritems():
737 for branch, heads in pushop.pushbranchmap.iteritems():
738 remoteheads, newheads, unsyncedheads, discardedheads = heads
738 remoteheads, newheads, unsyncedheads, discardedheads = heads
739 if remoteheads is not None:
739 if remoteheads is not None:
740 remote = set(remoteheads)
740 remote = set(remoteheads)
741 affected |= set(discardedheads) & remote
741 affected |= set(discardedheads) & remote
742 affected |= remote - set(newheads)
742 affected |= remote - set(newheads)
743 if affected:
743 if affected:
744 data = iter(sorted(affected))
744 data = iter(sorted(affected))
745 bundler.newpart('check:updated-heads', data=data)
745 bundler.newpart('check:updated-heads', data=data)
746
746
747 @b2partsgenerator('changeset')
747 @b2partsgenerator('changeset')
748 def _pushb2ctx(pushop, bundler):
748 def _pushb2ctx(pushop, bundler):
749 """handle changegroup push through bundle2
749 """handle changegroup push through bundle2
750
750
751 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
751 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
752 """
752 """
753 if 'changesets' in pushop.stepsdone:
753 if 'changesets' in pushop.stepsdone:
754 return
754 return
755 pushop.stepsdone.add('changesets')
755 pushop.stepsdone.add('changesets')
756 # Send known heads to the server for race detection.
756 # Send known heads to the server for race detection.
757 if not _pushcheckoutgoing(pushop):
757 if not _pushcheckoutgoing(pushop):
758 return
758 return
759 pushop.repo.prepushoutgoinghooks(pushop)
759 pushop.repo.prepushoutgoinghooks(pushop)
760
760
761 _pushb2ctxcheckheads(pushop, bundler)
761 _pushb2ctxcheckheads(pushop, bundler)
762
762
763 b2caps = bundle2.bundle2caps(pushop.remote)
763 b2caps = bundle2.bundle2caps(pushop.remote)
764 version = '01'
764 version = '01'
765 cgversions = b2caps.get('changegroup')
765 cgversions = b2caps.get('changegroup')
766 if cgversions: # 3.1 and 3.2 ship with an empty value
766 if cgversions: # 3.1 and 3.2 ship with an empty value
767 cgversions = [v for v in cgversions
767 cgversions = [v for v in cgversions
768 if v in changegroup.supportedoutgoingversions(
768 if v in changegroup.supportedoutgoingversions(
769 pushop.repo)]
769 pushop.repo)]
770 if not cgversions:
770 if not cgversions:
771 raise ValueError(_('no common changegroup version'))
771 raise ValueError(_('no common changegroup version'))
772 version = max(cgversions)
772 version = max(cgversions)
773 cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push',
773 cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push',
774 pushop.outgoing,
774 pushop.outgoing,
775 version=version)
775 version=version)
776 cgpart = bundler.newpart('changegroup', data=cg)
776 cgpart = bundler.newpart('changegroup', data=cg)
777 if cgversions:
777 if cgversions:
778 cgpart.addparam('version', version)
778 cgpart.addparam('version', version)
779 if 'treemanifest' in pushop.repo.requirements:
779 if 'treemanifest' in pushop.repo.requirements:
780 cgpart.addparam('treemanifest', '1')
780 cgpart.addparam('treemanifest', '1')
781 def handlereply(op):
781 def handlereply(op):
782 """extract addchangegroup returns from server reply"""
782 """extract addchangegroup returns from server reply"""
783 cgreplies = op.records.getreplies(cgpart.id)
783 cgreplies = op.records.getreplies(cgpart.id)
784 assert len(cgreplies['changegroup']) == 1
784 assert len(cgreplies['changegroup']) == 1
785 pushop.cgresult = cgreplies['changegroup'][0]['return']
785 pushop.cgresult = cgreplies['changegroup'][0]['return']
786 return handlereply
786 return handlereply
787
787
788 @b2partsgenerator('phase')
788 @b2partsgenerator('phase')
789 def _pushb2phases(pushop, bundler):
789 def _pushb2phases(pushop, bundler):
790 """handle phase push through bundle2"""
790 """handle phase push through bundle2"""
791 if 'phases' in pushop.stepsdone:
791 if 'phases' in pushop.stepsdone:
792 return
792 return
793 b2caps = bundle2.bundle2caps(pushop.remote)
793 b2caps = bundle2.bundle2caps(pushop.remote)
794 if not 'pushkey' in b2caps:
794 if not 'pushkey' in b2caps:
795 return
795 return
796 pushop.stepsdone.add('phases')
796 pushop.stepsdone.add('phases')
797 part2node = []
797 part2node = []
798
798
799 def handlefailure(pushop, exc):
799 def handlefailure(pushop, exc):
800 targetid = int(exc.partid)
800 targetid = int(exc.partid)
801 for partid, node in part2node:
801 for partid, node in part2node:
802 if partid == targetid:
802 if partid == targetid:
803 raise error.Abort(_('updating %s to public failed') % node)
803 raise error.Abort(_('updating %s to public failed') % node)
804
804
805 enc = pushkey.encode
805 enc = pushkey.encode
806 for newremotehead in pushop.outdatedphases:
806 for newremotehead in pushop.outdatedphases:
807 part = bundler.newpart('pushkey')
807 part = bundler.newpart('pushkey')
808 part.addparam('namespace', enc('phases'))
808 part.addparam('namespace', enc('phases'))
809 part.addparam('key', enc(newremotehead.hex()))
809 part.addparam('key', enc(newremotehead.hex()))
810 part.addparam('old', enc(str(phases.draft)))
810 part.addparam('old', enc(str(phases.draft)))
811 part.addparam('new', enc(str(phases.public)))
811 part.addparam('new', enc(str(phases.public)))
812 part2node.append((part.id, newremotehead))
812 part2node.append((part.id, newremotehead))
813 pushop.pkfailcb[part.id] = handlefailure
813 pushop.pkfailcb[part.id] = handlefailure
814
814
815 def handlereply(op):
815 def handlereply(op):
816 for partid, node in part2node:
816 for partid, node in part2node:
817 partrep = op.records.getreplies(partid)
817 partrep = op.records.getreplies(partid)
818 results = partrep['pushkey']
818 results = partrep['pushkey']
819 assert len(results) <= 1
819 assert len(results) <= 1
820 msg = None
820 msg = None
821 if not results:
821 if not results:
822 msg = _('server ignored update of %s to public!\n') % node
822 msg = _('server ignored update of %s to public!\n') % node
823 elif not int(results[0]['return']):
823 elif not int(results[0]['return']):
824 msg = _('updating %s to public failed!\n') % node
824 msg = _('updating %s to public failed!\n') % node
825 if msg is not None:
825 if msg is not None:
826 pushop.ui.warn(msg)
826 pushop.ui.warn(msg)
827 return handlereply
827 return handlereply
828
828
829 @b2partsgenerator('obsmarkers')
829 @b2partsgenerator('obsmarkers')
830 def _pushb2obsmarkers(pushop, bundler):
830 def _pushb2obsmarkers(pushop, bundler):
831 if 'obsmarkers' in pushop.stepsdone:
831 if 'obsmarkers' in pushop.stepsdone:
832 return
832 return
833 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
833 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
834 if obsolete.commonversion(remoteversions) is None:
834 if obsolete.commonversion(remoteversions) is None:
835 return
835 return
836 pushop.stepsdone.add('obsmarkers')
836 pushop.stepsdone.add('obsmarkers')
837 if pushop.outobsmarkers:
837 if pushop.outobsmarkers:
838 markers = sorted(pushop.outobsmarkers)
838 markers = sorted(pushop.outobsmarkers)
839 bundle2.buildobsmarkerspart(bundler, markers)
839 bundle2.buildobsmarkerspart(bundler, markers)
840
840
841 @b2partsgenerator('bookmarks')
841 @b2partsgenerator('bookmarks')
842 def _pushb2bookmarks(pushop, bundler):
842 def _pushb2bookmarks(pushop, bundler):
843 """handle bookmark push through bundle2"""
843 """handle bookmark push through bundle2"""
844 if 'bookmarks' in pushop.stepsdone:
844 if 'bookmarks' in pushop.stepsdone:
845 return
845 return
846 b2caps = bundle2.bundle2caps(pushop.remote)
846 b2caps = bundle2.bundle2caps(pushop.remote)
847 if 'pushkey' not in b2caps:
847 if 'pushkey' not in b2caps:
848 return
848 return
849 pushop.stepsdone.add('bookmarks')
849 pushop.stepsdone.add('bookmarks')
850 part2book = []
850 part2book = []
851 enc = pushkey.encode
851 enc = pushkey.encode
852
852
853 def handlefailure(pushop, exc):
853 def handlefailure(pushop, exc):
854 targetid = int(exc.partid)
854 targetid = int(exc.partid)
855 for partid, book, action in part2book:
855 for partid, book, action in part2book:
856 if partid == targetid:
856 if partid == targetid:
857 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
857 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
858 # we should not be called for part we did not generated
858 # we should not be called for part we did not generated
859 assert False
859 assert False
860
860
861 for book, old, new in pushop.outbookmarks:
861 for book, old, new in pushop.outbookmarks:
862 part = bundler.newpart('pushkey')
862 part = bundler.newpart('pushkey')
863 part.addparam('namespace', enc('bookmarks'))
863 part.addparam('namespace', enc('bookmarks'))
864 part.addparam('key', enc(book))
864 part.addparam('key', enc(book))
865 part.addparam('old', enc(old))
865 part.addparam('old', enc(old))
866 part.addparam('new', enc(new))
866 part.addparam('new', enc(new))
867 action = 'update'
867 action = 'update'
868 if not old:
868 if not old:
869 action = 'export'
869 action = 'export'
870 elif not new:
870 elif not new:
871 action = 'delete'
871 action = 'delete'
872 part2book.append((part.id, book, action))
872 part2book.append((part.id, book, action))
873 pushop.pkfailcb[part.id] = handlefailure
873 pushop.pkfailcb[part.id] = handlefailure
874
874
875 def handlereply(op):
875 def handlereply(op):
876 ui = pushop.ui
876 ui = pushop.ui
877 for partid, book, action in part2book:
877 for partid, book, action in part2book:
878 partrep = op.records.getreplies(partid)
878 partrep = op.records.getreplies(partid)
879 results = partrep['pushkey']
879 results = partrep['pushkey']
880 assert len(results) <= 1
880 assert len(results) <= 1
881 if not results:
881 if not results:
882 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
882 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
883 else:
883 else:
884 ret = int(results[0]['return'])
884 ret = int(results[0]['return'])
885 if ret:
885 if ret:
886 ui.status(bookmsgmap[action][0] % book)
886 ui.status(bookmsgmap[action][0] % book)
887 else:
887 else:
888 ui.warn(bookmsgmap[action][1] % book)
888 ui.warn(bookmsgmap[action][1] % book)
889 if pushop.bkresult is not None:
889 if pushop.bkresult is not None:
890 pushop.bkresult = 1
890 pushop.bkresult = 1
891 return handlereply
891 return handlereply
892
892
893
893
894 def _pushbundle2(pushop):
894 def _pushbundle2(pushop):
895 """push data to the remote using bundle2
895 """push data to the remote using bundle2
896
896
897 The only currently supported type of data is changegroup but this will
897 The only currently supported type of data is changegroup but this will
898 evolve in the future."""
898 evolve in the future."""
899 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
899 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
900 pushback = (pushop.trmanager
900 pushback = (pushop.trmanager
901 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
901 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
902
902
903 # create reply capability
903 # create reply capability
904 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
904 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
905 allowpushback=pushback))
905 allowpushback=pushback))
906 bundler.newpart('replycaps', data=capsblob)
906 bundler.newpart('replycaps', data=capsblob)
907 replyhandlers = []
907 replyhandlers = []
908 for partgenname in b2partsgenorder:
908 for partgenname in b2partsgenorder:
909 partgen = b2partsgenmapping[partgenname]
909 partgen = b2partsgenmapping[partgenname]
910 ret = partgen(pushop, bundler)
910 ret = partgen(pushop, bundler)
911 if callable(ret):
911 if callable(ret):
912 replyhandlers.append(ret)
912 replyhandlers.append(ret)
913 # do not push if nothing to push
913 # do not push if nothing to push
914 if bundler.nbparts <= 1:
914 if bundler.nbparts <= 1:
915 return
915 return
916 stream = util.chunkbuffer(bundler.getchunks())
916 stream = util.chunkbuffer(bundler.getchunks())
917 try:
917 try:
918 try:
918 try:
919 reply = pushop.remote.unbundle(
919 reply = pushop.remote.unbundle(
920 stream, ['force'], pushop.remote.url())
920 stream, ['force'], pushop.remote.url())
921 except error.BundleValueError as exc:
921 except error.BundleValueError as exc:
922 raise error.Abort(_('missing support for %s') % exc)
922 raise error.Abort(_('missing support for %s') % exc)
923 try:
923 try:
924 trgetter = None
924 trgetter = None
925 if pushback:
925 if pushback:
926 trgetter = pushop.trmanager.transaction
926 trgetter = pushop.trmanager.transaction
927 op = bundle2.processbundle(pushop.repo, reply, trgetter)
927 op = bundle2.processbundle(pushop.repo, reply, trgetter)
928 except error.BundleValueError as exc:
928 except error.BundleValueError as exc:
929 raise error.Abort(_('missing support for %s') % exc)
929 raise error.Abort(_('missing support for %s') % exc)
930 except bundle2.AbortFromPart as exc:
930 except bundle2.AbortFromPart as exc:
931 pushop.ui.status(_('remote: %s\n') % exc)
931 pushop.ui.status(_('remote: %s\n') % exc)
932 if exc.hint is not None:
932 if exc.hint is not None:
933 pushop.ui.status(_('remote: %s\n') % ('(%s)' % exc.hint))
933 pushop.ui.status(_('remote: %s\n') % ('(%s)' % exc.hint))
934 raise error.Abort(_('push failed on remote'))
934 raise error.Abort(_('push failed on remote'))
935 except error.PushkeyFailed as exc:
935 except error.PushkeyFailed as exc:
936 partid = int(exc.partid)
936 partid = int(exc.partid)
937 if partid not in pushop.pkfailcb:
937 if partid not in pushop.pkfailcb:
938 raise
938 raise
939 pushop.pkfailcb[partid](pushop, exc)
939 pushop.pkfailcb[partid](pushop, exc)
940 for rephand in replyhandlers:
940 for rephand in replyhandlers:
941 rephand(op)
941 rephand(op)
942
942
943 def _pushchangeset(pushop):
943 def _pushchangeset(pushop):
944 """Make the actual push of changeset bundle to remote repo"""
944 """Make the actual push of changeset bundle to remote repo"""
945 if 'changesets' in pushop.stepsdone:
945 if 'changesets' in pushop.stepsdone:
946 return
946 return
947 pushop.stepsdone.add('changesets')
947 pushop.stepsdone.add('changesets')
948 if not _pushcheckoutgoing(pushop):
948 if not _pushcheckoutgoing(pushop):
949 return
949 return
950 pushop.repo.prepushoutgoinghooks(pushop)
950 pushop.repo.prepushoutgoinghooks(pushop)
951 outgoing = pushop.outgoing
951 outgoing = pushop.outgoing
952 unbundle = pushop.remote.capable('unbundle')
952 unbundle = pushop.remote.capable('unbundle')
953 # TODO: get bundlecaps from remote
953 # TODO: get bundlecaps from remote
954 bundlecaps = None
954 bundlecaps = None
955 # create a changegroup from local
955 # create a changegroup from local
956 if pushop.revs is None and not (outgoing.excluded
956 if pushop.revs is None and not (outgoing.excluded
957 or pushop.repo.changelog.filteredrevs):
957 or pushop.repo.changelog.filteredrevs):
958 # push everything,
958 # push everything,
959 # use the fast path, no race possible on push
959 # use the fast path, no race possible on push
960 bundler = changegroup.cg1packer(pushop.repo, bundlecaps)
960 bundler = changegroup.cg1packer(pushop.repo, bundlecaps)
961 cg = changegroup.getsubset(pushop.repo,
961 cg = changegroup.getsubset(pushop.repo,
962 outgoing,
962 outgoing,
963 bundler,
963 bundler,
964 'push',
964 'push',
965 fastpath=True)
965 fastpath=True)
966 else:
966 else:
967 cg = changegroup.getchangegroup(pushop.repo, 'push', outgoing,
967 cg = changegroup.getchangegroup(pushop.repo, 'push', outgoing,
968 bundlecaps=bundlecaps)
968 bundlecaps=bundlecaps)
969
969
970 # apply changegroup to remote
970 # apply changegroup to remote
971 if unbundle:
971 if unbundle:
972 # local repo finds heads on server, finds out what
972 # local repo finds heads on server, finds out what
973 # revs it must push. once revs transferred, if server
973 # revs it must push. once revs transferred, if server
974 # finds it has different heads (someone else won
974 # finds it has different heads (someone else won
975 # commit/push race), server aborts.
975 # commit/push race), server aborts.
976 if pushop.force:
976 if pushop.force:
977 remoteheads = ['force']
977 remoteheads = ['force']
978 else:
978 else:
979 remoteheads = pushop.remoteheads
979 remoteheads = pushop.remoteheads
980 # ssh: return remote's addchangegroup()
980 # ssh: return remote's addchangegroup()
981 # http: return remote's addchangegroup() or 0 for error
981 # http: return remote's addchangegroup() or 0 for error
982 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
982 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
983 pushop.repo.url())
983 pushop.repo.url())
984 else:
984 else:
985 # we return an integer indicating remote head count
985 # we return an integer indicating remote head count
986 # change
986 # change
987 pushop.cgresult = pushop.remote.addchangegroup(cg, 'push',
987 pushop.cgresult = pushop.remote.addchangegroup(cg, 'push',
988 pushop.repo.url())
988 pushop.repo.url())
989
989
990 def _pushsyncphase(pushop):
990 def _pushsyncphase(pushop):
991 """synchronise phase information locally and remotely"""
991 """synchronise phase information locally and remotely"""
992 cheads = pushop.commonheads
992 cheads = pushop.commonheads
993 # even when we don't push, exchanging phase data is useful
993 # even when we don't push, exchanging phase data is useful
994 remotephases = pushop.remote.listkeys('phases')
994 remotephases = pushop.remote.listkeys('phases')
995 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
995 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
996 and remotephases # server supports phases
996 and remotephases # server supports phases
997 and pushop.cgresult is None # nothing was pushed
997 and pushop.cgresult is None # nothing was pushed
998 and remotephases.get('publishing', False)):
998 and remotephases.get('publishing', False)):
999 # When:
999 # When:
1000 # - this is a subrepo push
1000 # - this is a subrepo push
1001 # - and remote support phase
1001 # - and remote support phase
1002 # - and no changeset was pushed
1002 # - and no changeset was pushed
1003 # - and remote is publishing
1003 # - and remote is publishing
1004 # We may be in issue 3871 case!
1004 # We may be in issue 3871 case!
1005 # We drop the possible phase synchronisation done by
1005 # We drop the possible phase synchronisation done by
1006 # courtesy to publish changesets possibly locally draft
1006 # courtesy to publish changesets possibly locally draft
1007 # on the remote.
1007 # on the remote.
1008 remotephases = {'publishing': 'True'}
1008 remotephases = {'publishing': 'True'}
1009 if not remotephases: # old server or public only reply from non-publishing
1009 if not remotephases: # old server or public only reply from non-publishing
1010 _localphasemove(pushop, cheads)
1010 _localphasemove(pushop, cheads)
1011 # don't push any phase data as there is nothing to push
1011 # don't push any phase data as there is nothing to push
1012 else:
1012 else:
1013 ana = phases.analyzeremotephases(pushop.repo, cheads,
1013 ana = phases.analyzeremotephases(pushop.repo, cheads,
1014 remotephases)
1014 remotephases)
1015 pheads, droots = ana
1015 pheads, droots = ana
1016 ### Apply remote phase on local
1016 ### Apply remote phase on local
1017 if remotephases.get('publishing', False):
1017 if remotephases.get('publishing', False):
1018 _localphasemove(pushop, cheads)
1018 _localphasemove(pushop, cheads)
1019 else: # publish = False
1019 else: # publish = False
1020 _localphasemove(pushop, pheads)
1020 _localphasemove(pushop, pheads)
1021 _localphasemove(pushop, cheads, phases.draft)
1021 _localphasemove(pushop, cheads, phases.draft)
1022 ### Apply local phase on remote
1022 ### Apply local phase on remote
1023
1023
1024 if pushop.cgresult:
1024 if pushop.cgresult:
1025 if 'phases' in pushop.stepsdone:
1025 if 'phases' in pushop.stepsdone:
1026 # phases already pushed though bundle2
1026 # phases already pushed though bundle2
1027 return
1027 return
1028 outdated = pushop.outdatedphases
1028 outdated = pushop.outdatedphases
1029 else:
1029 else:
1030 outdated = pushop.fallbackoutdatedphases
1030 outdated = pushop.fallbackoutdatedphases
1031
1031
1032 pushop.stepsdone.add('phases')
1032 pushop.stepsdone.add('phases')
1033
1033
1034 # filter heads already turned public by the push
1034 # filter heads already turned public by the push
1035 outdated = [c for c in outdated if c.node() not in pheads]
1035 outdated = [c for c in outdated if c.node() not in pheads]
1036 # fallback to independent pushkey command
1036 # fallback to independent pushkey command
1037 for newremotehead in outdated:
1037 for newremotehead in outdated:
1038 r = pushop.remote.pushkey('phases',
1038 r = pushop.remote.pushkey('phases',
1039 newremotehead.hex(),
1039 newremotehead.hex(),
1040 str(phases.draft),
1040 str(phases.draft),
1041 str(phases.public))
1041 str(phases.public))
1042 if not r:
1042 if not r:
1043 pushop.ui.warn(_('updating %s to public failed!\n')
1043 pushop.ui.warn(_('updating %s to public failed!\n')
1044 % newremotehead)
1044 % newremotehead)
1045
1045
1046 def _localphasemove(pushop, nodes, phase=phases.public):
1046 def _localphasemove(pushop, nodes, phase=phases.public):
1047 """move <nodes> to <phase> in the local source repo"""
1047 """move <nodes> to <phase> in the local source repo"""
1048 if pushop.trmanager:
1048 if pushop.trmanager:
1049 phases.advanceboundary(pushop.repo,
1049 phases.advanceboundary(pushop.repo,
1050 pushop.trmanager.transaction(),
1050 pushop.trmanager.transaction(),
1051 phase,
1051 phase,
1052 nodes)
1052 nodes)
1053 else:
1053 else:
1054 # repo is not locked, do not change any phases!
1054 # repo is not locked, do not change any phases!
1055 # Informs the user that phases should have been moved when
1055 # Informs the user that phases should have been moved when
1056 # applicable.
1056 # applicable.
1057 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1057 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1058 phasestr = phases.phasenames[phase]
1058 phasestr = phases.phasenames[phase]
1059 if actualmoves:
1059 if actualmoves:
1060 pushop.ui.status(_('cannot lock source repo, skipping '
1060 pushop.ui.status(_('cannot lock source repo, skipping '
1061 'local %s phase update\n') % phasestr)
1061 'local %s phase update\n') % phasestr)
1062
1062
1063 def _pushobsolete(pushop):
1063 def _pushobsolete(pushop):
1064 """utility function to push obsolete markers to a remote"""
1064 """utility function to push obsolete markers to a remote"""
1065 if 'obsmarkers' in pushop.stepsdone:
1065 if 'obsmarkers' in pushop.stepsdone:
1066 return
1066 return
1067 repo = pushop.repo
1067 repo = pushop.repo
1068 remote = pushop.remote
1068 remote = pushop.remote
1069 pushop.stepsdone.add('obsmarkers')
1069 pushop.stepsdone.add('obsmarkers')
1070 if pushop.outobsmarkers:
1070 if pushop.outobsmarkers:
1071 pushop.ui.debug('try to push obsolete markers to remote\n')
1071 pushop.ui.debug('try to push obsolete markers to remote\n')
1072 rslts = []
1072 rslts = []
1073 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1073 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1074 for key in sorted(remotedata, reverse=True):
1074 for key in sorted(remotedata, reverse=True):
1075 # reverse sort to ensure we end with dump0
1075 # reverse sort to ensure we end with dump0
1076 data = remotedata[key]
1076 data = remotedata[key]
1077 rslts.append(remote.pushkey('obsolete', key, '', data))
1077 rslts.append(remote.pushkey('obsolete', key, '', data))
1078 if [r for r in rslts if not r]:
1078 if [r for r in rslts if not r]:
1079 msg = _('failed to push some obsolete markers!\n')
1079 msg = _('failed to push some obsolete markers!\n')
1080 repo.ui.warn(msg)
1080 repo.ui.warn(msg)
1081
1081
1082 def _pushbookmark(pushop):
1082 def _pushbookmark(pushop):
1083 """Update bookmark position on remote"""
1083 """Update bookmark position on remote"""
1084 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1084 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1085 return
1085 return
1086 pushop.stepsdone.add('bookmarks')
1086 pushop.stepsdone.add('bookmarks')
1087 ui = pushop.ui
1087 ui = pushop.ui
1088 remote = pushop.remote
1088 remote = pushop.remote
1089
1089
1090 for b, old, new in pushop.outbookmarks:
1090 for b, old, new in pushop.outbookmarks:
1091 action = 'update'
1091 action = 'update'
1092 if not old:
1092 if not old:
1093 action = 'export'
1093 action = 'export'
1094 elif not new:
1094 elif not new:
1095 action = 'delete'
1095 action = 'delete'
1096 if remote.pushkey('bookmarks', b, old, new):
1096 if remote.pushkey('bookmarks', b, old, new):
1097 ui.status(bookmsgmap[action][0] % b)
1097 ui.status(bookmsgmap[action][0] % b)
1098 else:
1098 else:
1099 ui.warn(bookmsgmap[action][1] % b)
1099 ui.warn(bookmsgmap[action][1] % b)
1100 # discovery can have set the value form invalid entry
1100 # discovery can have set the value form invalid entry
1101 if pushop.bkresult is not None:
1101 if pushop.bkresult is not None:
1102 pushop.bkresult = 1
1102 pushop.bkresult = 1
1103
1103
1104 class pulloperation(object):
1104 class pulloperation(object):
1105 """A object that represent a single pull operation
1105 """A object that represent a single pull operation
1106
1106
1107 It purpose is to carry pull related state and very common operation.
1107 It purpose is to carry pull related state and very common operation.
1108
1108
1109 A new should be created at the beginning of each pull and discarded
1109 A new should be created at the beginning of each pull and discarded
1110 afterward.
1110 afterward.
1111 """
1111 """
1112
1112
1113 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1113 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1114 remotebookmarks=None, streamclonerequested=None):
1114 remotebookmarks=None, streamclonerequested=None):
1115 # repo we pull into
1115 # repo we pull into
1116 self.repo = repo
1116 self.repo = repo
1117 # repo we pull from
1117 # repo we pull from
1118 self.remote = remote
1118 self.remote = remote
1119 # revision we try to pull (None is "all")
1119 # revision we try to pull (None is "all")
1120 self.heads = heads
1120 self.heads = heads
1121 # bookmark pulled explicitly
1121 # bookmark pulled explicitly
1122 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1122 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1123 for bookmark in bookmarks]
1123 for bookmark in bookmarks]
1124 # do we force pull?
1124 # do we force pull?
1125 self.force = force
1125 self.force = force
1126 # whether a streaming clone was requested
1126 # whether a streaming clone was requested
1127 self.streamclonerequested = streamclonerequested
1127 self.streamclonerequested = streamclonerequested
1128 # transaction manager
1128 # transaction manager
1129 self.trmanager = None
1129 self.trmanager = None
1130 # set of common changeset between local and remote before pull
1130 # set of common changeset between local and remote before pull
1131 self.common = None
1131 self.common = None
1132 # set of pulled head
1132 # set of pulled head
1133 self.rheads = None
1133 self.rheads = None
1134 # list of missing changeset to fetch remotely
1134 # list of missing changeset to fetch remotely
1135 self.fetch = None
1135 self.fetch = None
1136 # remote bookmarks data
1136 # remote bookmarks data
1137 self.remotebookmarks = remotebookmarks
1137 self.remotebookmarks = remotebookmarks
1138 # result of changegroup pulling (used as return code by pull)
1138 # result of changegroup pulling (used as return code by pull)
1139 self.cgresult = None
1139 self.cgresult = None
1140 # list of step already done
1140 # list of step already done
1141 self.stepsdone = set()
1141 self.stepsdone = set()
1142 # Whether we attempted a clone from pre-generated bundles.
1142 # Whether we attempted a clone from pre-generated bundles.
1143 self.clonebundleattempted = False
1143 self.clonebundleattempted = False
1144
1144
1145 @util.propertycache
1145 @util.propertycache
1146 def pulledsubset(self):
1146 def pulledsubset(self):
1147 """heads of the set of changeset target by the pull"""
1147 """heads of the set of changeset target by the pull"""
1148 # compute target subset
1148 # compute target subset
1149 if self.heads is None:
1149 if self.heads is None:
1150 # We pulled every thing possible
1150 # We pulled every thing possible
1151 # sync on everything common
1151 # sync on everything common
1152 c = set(self.common)
1152 c = set(self.common)
1153 ret = list(self.common)
1153 ret = list(self.common)
1154 for n in self.rheads:
1154 for n in self.rheads:
1155 if n not in c:
1155 if n not in c:
1156 ret.append(n)
1156 ret.append(n)
1157 return ret
1157 return ret
1158 else:
1158 else:
1159 # We pulled a specific subset
1159 # We pulled a specific subset
1160 # sync on this subset
1160 # sync on this subset
1161 return self.heads
1161 return self.heads
1162
1162
1163 @util.propertycache
1163 @util.propertycache
1164 def canusebundle2(self):
1164 def canusebundle2(self):
1165 return not _forcebundle1(self)
1165 return not _forcebundle1(self)
1166
1166
1167 @util.propertycache
1167 @util.propertycache
1168 def remotebundle2caps(self):
1168 def remotebundle2caps(self):
1169 return bundle2.bundle2caps(self.remote)
1169 return bundle2.bundle2caps(self.remote)
1170
1170
1171 def gettransaction(self):
1171 def gettransaction(self):
1172 # deprecated; talk to trmanager directly
1172 # deprecated; talk to trmanager directly
1173 return self.trmanager.transaction()
1173 return self.trmanager.transaction()
1174
1174
1175 class transactionmanager(object):
1175 class transactionmanager(object):
1176 """An object to manage the life cycle of a transaction
1176 """An object to manage the life cycle of a transaction
1177
1177
1178 It creates the transaction on demand and calls the appropriate hooks when
1178 It creates the transaction on demand and calls the appropriate hooks when
1179 closing the transaction."""
1179 closing the transaction."""
1180 def __init__(self, repo, source, url):
1180 def __init__(self, repo, source, url):
1181 self.repo = repo
1181 self.repo = repo
1182 self.source = source
1182 self.source = source
1183 self.url = url
1183 self.url = url
1184 self._tr = None
1184 self._tr = None
1185
1185
1186 def transaction(self):
1186 def transaction(self):
1187 """Return an open transaction object, constructing if necessary"""
1187 """Return an open transaction object, constructing if necessary"""
1188 if not self._tr:
1188 if not self._tr:
1189 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1189 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1190 self._tr = self.repo.transaction(trname)
1190 self._tr = self.repo.transaction(trname)
1191 self._tr.hookargs['source'] = self.source
1191 self._tr.hookargs['source'] = self.source
1192 self._tr.hookargs['url'] = self.url
1192 self._tr.hookargs['url'] = self.url
1193 return self._tr
1193 return self._tr
1194
1194
1195 def close(self):
1195 def close(self):
1196 """close transaction if created"""
1196 """close transaction if created"""
1197 if self._tr is not None:
1197 if self._tr is not None:
1198 self._tr.close()
1198 self._tr.close()
1199
1199
1200 def release(self):
1200 def release(self):
1201 """release transaction if created"""
1201 """release transaction if created"""
1202 if self._tr is not None:
1202 if self._tr is not None:
1203 self._tr.release()
1203 self._tr.release()
1204
1204
1205 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1205 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1206 streamclonerequested=None):
1206 streamclonerequested=None):
1207 """Fetch repository data from a remote.
1207 """Fetch repository data from a remote.
1208
1208
1209 This is the main function used to retrieve data from a remote repository.
1209 This is the main function used to retrieve data from a remote repository.
1210
1210
1211 ``repo`` is the local repository to clone into.
1211 ``repo`` is the local repository to clone into.
1212 ``remote`` is a peer instance.
1212 ``remote`` is a peer instance.
1213 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1213 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1214 default) means to pull everything from the remote.
1214 default) means to pull everything from the remote.
1215 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1215 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1216 default, all remote bookmarks are pulled.
1216 default, all remote bookmarks are pulled.
1217 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1217 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1218 initialization.
1218 initialization.
1219 ``streamclonerequested`` is a boolean indicating whether a "streaming
1219 ``streamclonerequested`` is a boolean indicating whether a "streaming
1220 clone" is requested. A "streaming clone" is essentially a raw file copy
1220 clone" is requested. A "streaming clone" is essentially a raw file copy
1221 of revlogs from the server. This only works when the local repository is
1221 of revlogs from the server. This only works when the local repository is
1222 empty. The default value of ``None`` means to respect the server
1222 empty. The default value of ``None`` means to respect the server
1223 configuration for preferring stream clones.
1223 configuration for preferring stream clones.
1224
1224
1225 Returns the ``pulloperation`` created for this pull.
1225 Returns the ``pulloperation`` created for this pull.
1226 """
1226 """
1227 if opargs is None:
1227 if opargs is None:
1228 opargs = {}
1228 opargs = {}
1229 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1229 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1230 streamclonerequested=streamclonerequested, **opargs)
1230 streamclonerequested=streamclonerequested, **opargs)
1231 if pullop.remote.local():
1231 if pullop.remote.local():
1232 missing = set(pullop.remote.requirements) - pullop.repo.supported
1232 missing = set(pullop.remote.requirements) - pullop.repo.supported
1233 if missing:
1233 if missing:
1234 msg = _("required features are not"
1234 msg = _("required features are not"
1235 " supported in the destination:"
1235 " supported in the destination:"
1236 " %s") % (', '.join(sorted(missing)))
1236 " %s") % (', '.join(sorted(missing)))
1237 raise error.Abort(msg)
1237 raise error.Abort(msg)
1238
1238
1239 wlock = lock = None
1239 wlock = lock = None
1240 try:
1240 try:
1241 wlock = pullop.repo.wlock()
1241 wlock = pullop.repo.wlock()
1242 lock = pullop.repo.lock()
1242 lock = pullop.repo.lock()
1243 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1243 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1244 streamclone.maybeperformlegacystreamclone(pullop)
1244 streamclone.maybeperformlegacystreamclone(pullop)
1245 # This should ideally be in _pullbundle2(). However, it needs to run
1245 # This should ideally be in _pullbundle2(). However, it needs to run
1246 # before discovery to avoid extra work.
1246 # before discovery to avoid extra work.
1247 _maybeapplyclonebundle(pullop)
1247 _maybeapplyclonebundle(pullop)
1248 _pulldiscovery(pullop)
1248 _pulldiscovery(pullop)
1249 if pullop.canusebundle2:
1249 if pullop.canusebundle2:
1250 _pullbundle2(pullop)
1250 _pullbundle2(pullop)
1251 _pullchangeset(pullop)
1251 _pullchangeset(pullop)
1252 _pullphase(pullop)
1252 _pullphase(pullop)
1253 _pullbookmarks(pullop)
1253 _pullbookmarks(pullop)
1254 _pullobsolete(pullop)
1254 _pullobsolete(pullop)
1255 pullop.trmanager.close()
1255 pullop.trmanager.close()
1256 finally:
1256 finally:
1257 lockmod.release(pullop.trmanager, lock, wlock)
1257 lockmod.release(pullop.trmanager, lock, wlock)
1258
1258
1259 return pullop
1259 return pullop
1260
1260
1261 # list of steps to perform discovery before pull
1261 # list of steps to perform discovery before pull
1262 pulldiscoveryorder = []
1262 pulldiscoveryorder = []
1263
1263
1264 # Mapping between step name and function
1264 # Mapping between step name and function
1265 #
1265 #
1266 # This exists to help extensions wrap steps if necessary
1266 # This exists to help extensions wrap steps if necessary
1267 pulldiscoverymapping = {}
1267 pulldiscoverymapping = {}
1268
1268
1269 def pulldiscovery(stepname):
1269 def pulldiscovery(stepname):
1270 """decorator for function performing discovery before pull
1270 """decorator for function performing discovery before pull
1271
1271
1272 The function is added to the step -> function mapping and appended to the
1272 The function is added to the step -> function mapping and appended to the
1273 list of steps. Beware that decorated function will be added in order (this
1273 list of steps. Beware that decorated function will be added in order (this
1274 may matter).
1274 may matter).
1275
1275
1276 You can only use this decorator for a new step, if you want to wrap a step
1276 You can only use this decorator for a new step, if you want to wrap a step
1277 from an extension, change the pulldiscovery dictionary directly."""
1277 from an extension, change the pulldiscovery dictionary directly."""
1278 def dec(func):
1278 def dec(func):
1279 assert stepname not in pulldiscoverymapping
1279 assert stepname not in pulldiscoverymapping
1280 pulldiscoverymapping[stepname] = func
1280 pulldiscoverymapping[stepname] = func
1281 pulldiscoveryorder.append(stepname)
1281 pulldiscoveryorder.append(stepname)
1282 return func
1282 return func
1283 return dec
1283 return dec
1284
1284
1285 def _pulldiscovery(pullop):
1285 def _pulldiscovery(pullop):
1286 """Run all discovery steps"""
1286 """Run all discovery steps"""
1287 for stepname in pulldiscoveryorder:
1287 for stepname in pulldiscoveryorder:
1288 step = pulldiscoverymapping[stepname]
1288 step = pulldiscoverymapping[stepname]
1289 step(pullop)
1289 step(pullop)
1290
1290
1291 @pulldiscovery('b1:bookmarks')
1291 @pulldiscovery('b1:bookmarks')
1292 def _pullbookmarkbundle1(pullop):
1292 def _pullbookmarkbundle1(pullop):
1293 """fetch bookmark data in bundle1 case
1293 """fetch bookmark data in bundle1 case
1294
1294
1295 If not using bundle2, we have to fetch bookmarks before changeset
1295 If not using bundle2, we have to fetch bookmarks before changeset
1296 discovery to reduce the chance and impact of race conditions."""
1296 discovery to reduce the chance and impact of race conditions."""
1297 if pullop.remotebookmarks is not None:
1297 if pullop.remotebookmarks is not None:
1298 return
1298 return
1299 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1299 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1300 # all known bundle2 servers now support listkeys, but lets be nice with
1300 # all known bundle2 servers now support listkeys, but lets be nice with
1301 # new implementation.
1301 # new implementation.
1302 return
1302 return
1303 pullop.remotebookmarks = pullop.remote.listkeys('bookmarks')
1303 pullop.remotebookmarks = pullop.remote.listkeys('bookmarks')
1304
1304
1305
1305
1306 @pulldiscovery('changegroup')
1306 @pulldiscovery('changegroup')
1307 def _pulldiscoverychangegroup(pullop):
1307 def _pulldiscoverychangegroup(pullop):
1308 """discovery phase for the pull
1308 """discovery phase for the pull
1309
1309
1310 Current handle changeset discovery only, will change handle all discovery
1310 Current handle changeset discovery only, will change handle all discovery
1311 at some point."""
1311 at some point."""
1312 tmp = discovery.findcommonincoming(pullop.repo,
1312 tmp = discovery.findcommonincoming(pullop.repo,
1313 pullop.remote,
1313 pullop.remote,
1314 heads=pullop.heads,
1314 heads=pullop.heads,
1315 force=pullop.force)
1315 force=pullop.force)
1316 common, fetch, rheads = tmp
1316 common, fetch, rheads = tmp
1317 nm = pullop.repo.unfiltered().changelog.nodemap
1317 nm = pullop.repo.unfiltered().changelog.nodemap
1318 if fetch and rheads:
1318 if fetch and rheads:
1319 # If a remote heads in filtered locally, lets drop it from the unknown
1319 # If a remote heads in filtered locally, lets drop it from the unknown
1320 # remote heads and put in back in common.
1320 # remote heads and put in back in common.
1321 #
1321 #
1322 # This is a hackish solution to catch most of "common but locally
1322 # This is a hackish solution to catch most of "common but locally
1323 # hidden situation". We do not performs discovery on unfiltered
1323 # hidden situation". We do not performs discovery on unfiltered
1324 # repository because it end up doing a pathological amount of round
1324 # repository because it end up doing a pathological amount of round
1325 # trip for w huge amount of changeset we do not care about.
1325 # trip for w huge amount of changeset we do not care about.
1326 #
1326 #
1327 # If a set of such "common but filtered" changeset exist on the server
1327 # If a set of such "common but filtered" changeset exist on the server
1328 # but are not including a remote heads, we'll not be able to detect it,
1328 # but are not including a remote heads, we'll not be able to detect it,
1329 scommon = set(common)
1329 scommon = set(common)
1330 filteredrheads = []
1330 filteredrheads = []
1331 for n in rheads:
1331 for n in rheads:
1332 if n in nm:
1332 if n in nm:
1333 if n not in scommon:
1333 if n not in scommon:
1334 common.append(n)
1334 common.append(n)
1335 else:
1335 else:
1336 filteredrheads.append(n)
1336 filteredrheads.append(n)
1337 if not filteredrheads:
1337 if not filteredrheads:
1338 fetch = []
1338 fetch = []
1339 rheads = filteredrheads
1339 rheads = filteredrheads
1340 pullop.common = common
1340 pullop.common = common
1341 pullop.fetch = fetch
1341 pullop.fetch = fetch
1342 pullop.rheads = rheads
1342 pullop.rheads = rheads
1343
1343
1344 def _pullbundle2(pullop):
1344 def _pullbundle2(pullop):
1345 """pull data using bundle2
1345 """pull data using bundle2
1346
1346
1347 For now, the only supported data are changegroup."""
1347 For now, the only supported data are changegroup."""
1348 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
1348 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
1349
1349
1350 # At the moment we don't do stream clones over bundle2. If that is
1350 # At the moment we don't do stream clones over bundle2. If that is
1351 # implemented then here's where the check for that will go.
1351 # implemented then here's where the check for that will go.
1352 streaming = False
1352 streaming = False
1353
1353
1354 # pulling changegroup
1354 # pulling changegroup
1355 pullop.stepsdone.add('changegroup')
1355 pullop.stepsdone.add('changegroup')
1356
1356
1357 kwargs['common'] = pullop.common
1357 kwargs['common'] = pullop.common
1358 kwargs['heads'] = pullop.heads or pullop.rheads
1358 kwargs['heads'] = pullop.heads or pullop.rheads
1359 kwargs['cg'] = pullop.fetch
1359 kwargs['cg'] = pullop.fetch
1360 if 'listkeys' in pullop.remotebundle2caps:
1360 if 'listkeys' in pullop.remotebundle2caps:
1361 kwargs['listkeys'] = ['phases']
1361 kwargs['listkeys'] = ['phases']
1362 if pullop.remotebookmarks is None:
1362 if pullop.remotebookmarks is None:
1363 # make sure to always includes bookmark data when migrating
1363 # make sure to always includes bookmark data when migrating
1364 # `hg incoming --bundle` to using this function.
1364 # `hg incoming --bundle` to using this function.
1365 kwargs['listkeys'].append('bookmarks')
1365 kwargs['listkeys'].append('bookmarks')
1366
1366
1367 # If this is a full pull / clone and the server supports the clone bundles
1367 # If this is a full pull / clone and the server supports the clone bundles
1368 # feature, tell the server whether we attempted a clone bundle. The
1368 # feature, tell the server whether we attempted a clone bundle. The
1369 # presence of this flag indicates the client supports clone bundles. This
1369 # presence of this flag indicates the client supports clone bundles. This
1370 # will enable the server to treat clients that support clone bundles
1370 # will enable the server to treat clients that support clone bundles
1371 # differently from those that don't.
1371 # differently from those that don't.
1372 if (pullop.remote.capable('clonebundles')
1372 if (pullop.remote.capable('clonebundles')
1373 and pullop.heads is None and list(pullop.common) == [nullid]):
1373 and pullop.heads is None and list(pullop.common) == [nullid]):
1374 kwargs['cbattempted'] = pullop.clonebundleattempted
1374 kwargs['cbattempted'] = pullop.clonebundleattempted
1375
1375
1376 if streaming:
1376 if streaming:
1377 pullop.repo.ui.status(_('streaming all changes\n'))
1377 pullop.repo.ui.status(_('streaming all changes\n'))
1378 elif not pullop.fetch:
1378 elif not pullop.fetch:
1379 pullop.repo.ui.status(_("no changes found\n"))
1379 pullop.repo.ui.status(_("no changes found\n"))
1380 pullop.cgresult = 0
1380 pullop.cgresult = 0
1381 else:
1381 else:
1382 if pullop.heads is None and list(pullop.common) == [nullid]:
1382 if pullop.heads is None and list(pullop.common) == [nullid]:
1383 pullop.repo.ui.status(_("requesting all changes\n"))
1383 pullop.repo.ui.status(_("requesting all changes\n"))
1384 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1384 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1385 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1385 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1386 if obsolete.commonversion(remoteversions) is not None:
1386 if obsolete.commonversion(remoteversions) is not None:
1387 kwargs['obsmarkers'] = True
1387 kwargs['obsmarkers'] = True
1388 pullop.stepsdone.add('obsmarkers')
1388 pullop.stepsdone.add('obsmarkers')
1389 _pullbundle2extraprepare(pullop, kwargs)
1389 _pullbundle2extraprepare(pullop, kwargs)
1390 bundle = pullop.remote.getbundle('pull', **pycompat.strkwargs(kwargs))
1390 bundle = pullop.remote.getbundle('pull', **pycompat.strkwargs(kwargs))
1391 try:
1391 try:
1392 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
1392 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
1393 except bundle2.AbortFromPart as exc:
1393 except bundle2.AbortFromPart as exc:
1394 pullop.repo.ui.status(_('remote: abort: %s\n') % exc)
1394 pullop.repo.ui.status(_('remote: abort: %s\n') % exc)
1395 raise error.Abort(_('pull failed on remote'), hint=exc.hint)
1395 raise error.Abort(_('pull failed on remote'), hint=exc.hint)
1396 except error.BundleValueError as exc:
1396 except error.BundleValueError as exc:
1397 raise error.Abort(_('missing support for %s') % exc)
1397 raise error.Abort(_('missing support for %s') % exc)
1398
1398
1399 if pullop.fetch:
1399 if pullop.fetch:
1400 results = [cg['return'] for cg in op.records['changegroup']]
1400 results = [cg['return'] for cg in op.records['changegroup']]
1401 pullop.cgresult = changegroup.combineresults(results)
1401 pullop.cgresult = changegroup.combineresults(results)
1402
1402
1403 # processing phases change
1403 # processing phases change
1404 for namespace, value in op.records['listkeys']:
1404 for namespace, value in op.records['listkeys']:
1405 if namespace == 'phases':
1405 if namespace == 'phases':
1406 _pullapplyphases(pullop, value)
1406 _pullapplyphases(pullop, value)
1407
1407
1408 # processing bookmark update
1408 # processing bookmark update
1409 for namespace, value in op.records['listkeys']:
1409 for namespace, value in op.records['listkeys']:
1410 if namespace == 'bookmarks':
1410 if namespace == 'bookmarks':
1411 pullop.remotebookmarks = value
1411 pullop.remotebookmarks = value
1412
1412
1413 # bookmark data were either already there or pulled in the bundle
1413 # bookmark data were either already there or pulled in the bundle
1414 if pullop.remotebookmarks is not None:
1414 if pullop.remotebookmarks is not None:
1415 _pullbookmarks(pullop)
1415 _pullbookmarks(pullop)
1416
1416
1417 def _pullbundle2extraprepare(pullop, kwargs):
1417 def _pullbundle2extraprepare(pullop, kwargs):
1418 """hook function so that extensions can extend the getbundle call"""
1418 """hook function so that extensions can extend the getbundle call"""
1419 pass
1419 pass
1420
1420
1421 def _pullchangeset(pullop):
1421 def _pullchangeset(pullop):
1422 """pull changeset from unbundle into the local repo"""
1422 """pull changeset from unbundle into the local repo"""
1423 # We delay the open of the transaction as late as possible so we
1423 # We delay the open of the transaction as late as possible so we
1424 # don't open transaction for nothing or you break future useful
1424 # don't open transaction for nothing or you break future useful
1425 # rollback call
1425 # rollback call
1426 if 'changegroup' in pullop.stepsdone:
1426 if 'changegroup' in pullop.stepsdone:
1427 return
1427 return
1428 pullop.stepsdone.add('changegroup')
1428 pullop.stepsdone.add('changegroup')
1429 if not pullop.fetch:
1429 if not pullop.fetch:
1430 pullop.repo.ui.status(_("no changes found\n"))
1430 pullop.repo.ui.status(_("no changes found\n"))
1431 pullop.cgresult = 0
1431 pullop.cgresult = 0
1432 return
1432 return
1433 tr = pullop.gettransaction()
1433 tr = pullop.gettransaction()
1434 if pullop.heads is None and list(pullop.common) == [nullid]:
1434 if pullop.heads is None and list(pullop.common) == [nullid]:
1435 pullop.repo.ui.status(_("requesting all changes\n"))
1435 pullop.repo.ui.status(_("requesting all changes\n"))
1436 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1436 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1437 # issue1320, avoid a race if remote changed after discovery
1437 # issue1320, avoid a race if remote changed after discovery
1438 pullop.heads = pullop.rheads
1438 pullop.heads = pullop.rheads
1439
1439
1440 if pullop.remote.capable('getbundle'):
1440 if pullop.remote.capable('getbundle'):
1441 # TODO: get bundlecaps from remote
1441 # TODO: get bundlecaps from remote
1442 cg = pullop.remote.getbundle('pull', common=pullop.common,
1442 cg = pullop.remote.getbundle('pull', common=pullop.common,
1443 heads=pullop.heads or pullop.rheads)
1443 heads=pullop.heads or pullop.rheads)
1444 elif pullop.heads is None:
1444 elif pullop.heads is None:
1445 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1445 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1446 elif not pullop.remote.capable('changegroupsubset'):
1446 elif not pullop.remote.capable('changegroupsubset'):
1447 raise error.Abort(_("partial pull cannot be done because "
1447 raise error.Abort(_("partial pull cannot be done because "
1448 "other repository doesn't support "
1448 "other repository doesn't support "
1449 "changegroupsubset."))
1449 "changegroupsubset."))
1450 else:
1450 else:
1451 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1451 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1452 pullop.cgresult = cg.apply(pullop.repo, tr, 'pull', pullop.remote.url())
1452 pullop.cgresult = cg.apply(pullop.repo, tr, 'pull', pullop.remote.url())
1453
1453
1454 def _pullphase(pullop):
1454 def _pullphase(pullop):
1455 # Get remote phases data from remote
1455 # Get remote phases data from remote
1456 if 'phases' in pullop.stepsdone:
1456 if 'phases' in pullop.stepsdone:
1457 return
1457 return
1458 remotephases = pullop.remote.listkeys('phases')
1458 remotephases = pullop.remote.listkeys('phases')
1459 _pullapplyphases(pullop, remotephases)
1459 _pullapplyphases(pullop, remotephases)
1460
1460
1461 def _pullapplyphases(pullop, remotephases):
1461 def _pullapplyphases(pullop, remotephases):
1462 """apply phase movement from observed remote state"""
1462 """apply phase movement from observed remote state"""
1463 if 'phases' in pullop.stepsdone:
1463 if 'phases' in pullop.stepsdone:
1464 return
1464 return
1465 pullop.stepsdone.add('phases')
1465 pullop.stepsdone.add('phases')
1466 publishing = bool(remotephases.get('publishing', False))
1466 publishing = bool(remotephases.get('publishing', False))
1467 if remotephases and not publishing:
1467 if remotephases and not publishing:
1468 # remote is new and non-publishing
1468 # remote is new and non-publishing
1469 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1469 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1470 pullop.pulledsubset,
1470 pullop.pulledsubset,
1471 remotephases)
1471 remotephases)
1472 dheads = pullop.pulledsubset
1472 dheads = pullop.pulledsubset
1473 else:
1473 else:
1474 # Remote is old or publishing all common changesets
1474 # Remote is old or publishing all common changesets
1475 # should be seen as public
1475 # should be seen as public
1476 pheads = pullop.pulledsubset
1476 pheads = pullop.pulledsubset
1477 dheads = []
1477 dheads = []
1478 unfi = pullop.repo.unfiltered()
1478 unfi = pullop.repo.unfiltered()
1479 phase = unfi._phasecache.phase
1479 phase = unfi._phasecache.phase
1480 rev = unfi.changelog.nodemap.get
1480 rev = unfi.changelog.nodemap.get
1481 public = phases.public
1481 public = phases.public
1482 draft = phases.draft
1482 draft = phases.draft
1483
1483
1484 # exclude changesets already public locally and update the others
1484 # exclude changesets already public locally and update the others
1485 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1485 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1486 if pheads:
1486 if pheads:
1487 tr = pullop.gettransaction()
1487 tr = pullop.gettransaction()
1488 phases.advanceboundary(pullop.repo, tr, public, pheads)
1488 phases.advanceboundary(pullop.repo, tr, public, pheads)
1489
1489
1490 # exclude changesets already draft locally and update the others
1490 # exclude changesets already draft locally and update the others
1491 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1491 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1492 if dheads:
1492 if dheads:
1493 tr = pullop.gettransaction()
1493 tr = pullop.gettransaction()
1494 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1494 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1495
1495
1496 def _pullbookmarks(pullop):
1496 def _pullbookmarks(pullop):
1497 """process the remote bookmark information to update the local one"""
1497 """process the remote bookmark information to update the local one"""
1498 if 'bookmarks' in pullop.stepsdone:
1498 if 'bookmarks' in pullop.stepsdone:
1499 return
1499 return
1500 pullop.stepsdone.add('bookmarks')
1500 pullop.stepsdone.add('bookmarks')
1501 repo = pullop.repo
1501 repo = pullop.repo
1502 remotebookmarks = pullop.remotebookmarks
1502 remotebookmarks = pullop.remotebookmarks
1503 remotebookmarks = bookmod.unhexlifybookmarks(remotebookmarks)
1503 remotebookmarks = bookmod.unhexlifybookmarks(remotebookmarks)
1504 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1504 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1505 pullop.remote.url(),
1505 pullop.remote.url(),
1506 pullop.gettransaction,
1506 pullop.gettransaction,
1507 explicit=pullop.explicitbookmarks)
1507 explicit=pullop.explicitbookmarks)
1508
1508
1509 def _pullobsolete(pullop):
1509 def _pullobsolete(pullop):
1510 """utility function to pull obsolete markers from a remote
1510 """utility function to pull obsolete markers from a remote
1511
1511
1512 The `gettransaction` is function that return the pull transaction, creating
1512 The `gettransaction` is function that return the pull transaction, creating
1513 one if necessary. We return the transaction to inform the calling code that
1513 one if necessary. We return the transaction to inform the calling code that
1514 a new transaction have been created (when applicable).
1514 a new transaction have been created (when applicable).
1515
1515
1516 Exists mostly to allow overriding for experimentation purpose"""
1516 Exists mostly to allow overriding for experimentation purpose"""
1517 if 'obsmarkers' in pullop.stepsdone:
1517 if 'obsmarkers' in pullop.stepsdone:
1518 return
1518 return
1519 pullop.stepsdone.add('obsmarkers')
1519 pullop.stepsdone.add('obsmarkers')
1520 tr = None
1520 tr = None
1521 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1521 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1522 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1522 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1523 remoteobs = pullop.remote.listkeys('obsolete')
1523 remoteobs = pullop.remote.listkeys('obsolete')
1524 if 'dump0' in remoteobs:
1524 if 'dump0' in remoteobs:
1525 tr = pullop.gettransaction()
1525 tr = pullop.gettransaction()
1526 markers = []
1526 markers = []
1527 for key in sorted(remoteobs, reverse=True):
1527 for key in sorted(remoteobs, reverse=True):
1528 if key.startswith('dump'):
1528 if key.startswith('dump'):
1529 data = util.b85decode(remoteobs[key])
1529 data = util.b85decode(remoteobs[key])
1530 version, newmarks = obsolete._readmarkers(data)
1530 version, newmarks = obsolete._readmarkers(data)
1531 markers += newmarks
1531 markers += newmarks
1532 if markers:
1532 if markers:
1533 pullop.repo.obsstore.add(tr, markers)
1533 pullop.repo.obsstore.add(tr, markers)
1534 pullop.repo.invalidatevolatilesets()
1534 pullop.repo.invalidatevolatilesets()
1535 return tr
1535 return tr
1536
1536
1537 def caps20to10(repo):
1537 def caps20to10(repo):
1538 """return a set with appropriate options to use bundle20 during getbundle"""
1538 """return a set with appropriate options to use bundle20 during getbundle"""
1539 caps = {'HG20'}
1539 caps = {'HG20'}
1540 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
1540 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
1541 caps.add('bundle2=' + urlreq.quote(capsblob))
1541 caps.add('bundle2=' + urlreq.quote(capsblob))
1542 return caps
1542 return caps
1543
1543
1544 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1544 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1545 getbundle2partsorder = []
1545 getbundle2partsorder = []
1546
1546
1547 # Mapping between step name and function
1547 # Mapping between step name and function
1548 #
1548 #
1549 # This exists to help extensions wrap steps if necessary
1549 # This exists to help extensions wrap steps if necessary
1550 getbundle2partsmapping = {}
1550 getbundle2partsmapping = {}
1551
1551
1552 def getbundle2partsgenerator(stepname, idx=None):
1552 def getbundle2partsgenerator(stepname, idx=None):
1553 """decorator for function generating bundle2 part for getbundle
1553 """decorator for function generating bundle2 part for getbundle
1554
1554
1555 The function is added to the step -> function mapping and appended to the
1555 The function is added to the step -> function mapping and appended to the
1556 list of steps. Beware that decorated functions will be added in order
1556 list of steps. Beware that decorated functions will be added in order
1557 (this may matter).
1557 (this may matter).
1558
1558
1559 You can only use this decorator for new steps, if you want to wrap a step
1559 You can only use this decorator for new steps, if you want to wrap a step
1560 from an extension, attack the getbundle2partsmapping dictionary directly."""
1560 from an extension, attack the getbundle2partsmapping dictionary directly."""
1561 def dec(func):
1561 def dec(func):
1562 assert stepname not in getbundle2partsmapping
1562 assert stepname not in getbundle2partsmapping
1563 getbundle2partsmapping[stepname] = func
1563 getbundle2partsmapping[stepname] = func
1564 if idx is None:
1564 if idx is None:
1565 getbundle2partsorder.append(stepname)
1565 getbundle2partsorder.append(stepname)
1566 else:
1566 else:
1567 getbundle2partsorder.insert(idx, stepname)
1567 getbundle2partsorder.insert(idx, stepname)
1568 return func
1568 return func
1569 return dec
1569 return dec
1570
1570
1571 def bundle2requested(bundlecaps):
1571 def bundle2requested(bundlecaps):
1572 if bundlecaps is not None:
1572 if bundlecaps is not None:
1573 return any(cap.startswith('HG2') for cap in bundlecaps)
1573 return any(cap.startswith('HG2') for cap in bundlecaps)
1574 return False
1574 return False
1575
1575
1576 def getbundlechunks(repo, source, heads=None, common=None, bundlecaps=None,
1576 def getbundlechunks(repo, source, heads=None, common=None, bundlecaps=None,
1577 **kwargs):
1577 **kwargs):
1578 """Return chunks constituting a bundle's raw data.
1578 """Return chunks constituting a bundle's raw data.
1579
1579
1580 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1580 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1581 passed.
1581 passed.
1582
1582
1583 Returns an iterator over raw chunks (of varying sizes).
1583 Returns an iterator over raw chunks (of varying sizes).
1584 """
1584 """
1585 kwargs = pycompat.byteskwargs(kwargs)
1585 usebundle2 = bundle2requested(bundlecaps)
1586 usebundle2 = bundle2requested(bundlecaps)
1586 # bundle10 case
1587 # bundle10 case
1587 if not usebundle2:
1588 if not usebundle2:
1588 if bundlecaps and not kwargs.get('cg', True):
1589 if bundlecaps and not kwargs.get('cg', True):
1589 raise ValueError(_('request for bundle10 must include changegroup'))
1590 raise ValueError(_('request for bundle10 must include changegroup'))
1590
1591
1591 if kwargs:
1592 if kwargs:
1592 raise ValueError(_('unsupported getbundle arguments: %s')
1593 raise ValueError(_('unsupported getbundle arguments: %s')
1593 % ', '.join(sorted(kwargs.keys())))
1594 % ', '.join(sorted(kwargs.keys())))
1594 outgoing = _computeoutgoing(repo, heads, common)
1595 outgoing = _computeoutgoing(repo, heads, common)
1595 bundler = changegroup.getbundler('01', repo, bundlecaps)
1596 bundler = changegroup.getbundler('01', repo, bundlecaps)
1596 return changegroup.getsubsetraw(repo, outgoing, bundler, source)
1597 return changegroup.getsubsetraw(repo, outgoing, bundler, source)
1597
1598
1598 # bundle20 case
1599 # bundle20 case
1599 b2caps = {}
1600 b2caps = {}
1600 for bcaps in bundlecaps:
1601 for bcaps in bundlecaps:
1601 if bcaps.startswith('bundle2='):
1602 if bcaps.startswith('bundle2='):
1602 blob = urlreq.unquote(bcaps[len('bundle2='):])
1603 blob = urlreq.unquote(bcaps[len('bundle2='):])
1603 b2caps.update(bundle2.decodecaps(blob))
1604 b2caps.update(bundle2.decodecaps(blob))
1604 bundler = bundle2.bundle20(repo.ui, b2caps)
1605 bundler = bundle2.bundle20(repo.ui, b2caps)
1605
1606
1606 kwargs['heads'] = heads
1607 kwargs['heads'] = heads
1607 kwargs['common'] = common
1608 kwargs['common'] = common
1608
1609
1609 for name in getbundle2partsorder:
1610 for name in getbundle2partsorder:
1610 func = getbundle2partsmapping[name]
1611 func = getbundle2partsmapping[name]
1611 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1612 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1612 **kwargs)
1613 **kwargs)
1613
1614
1614 return bundler.getchunks()
1615 return bundler.getchunks()
1615
1616
1616 @getbundle2partsgenerator('changegroup')
1617 @getbundle2partsgenerator('changegroup')
1617 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1618 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1618 b2caps=None, heads=None, common=None, **kwargs):
1619 b2caps=None, heads=None, common=None, **kwargs):
1619 """add a changegroup part to the requested bundle"""
1620 """add a changegroup part to the requested bundle"""
1620 cg = None
1621 cg = None
1621 if kwargs.get('cg', True):
1622 if kwargs.get('cg', True):
1622 # build changegroup bundle here.
1623 # build changegroup bundle here.
1623 version = '01'
1624 version = '01'
1624 cgversions = b2caps.get('changegroup')
1625 cgversions = b2caps.get('changegroup')
1625 if cgversions: # 3.1 and 3.2 ship with an empty value
1626 if cgversions: # 3.1 and 3.2 ship with an empty value
1626 cgversions = [v for v in cgversions
1627 cgversions = [v for v in cgversions
1627 if v in changegroup.supportedoutgoingversions(repo)]
1628 if v in changegroup.supportedoutgoingversions(repo)]
1628 if not cgversions:
1629 if not cgversions:
1629 raise ValueError(_('no common changegroup version'))
1630 raise ValueError(_('no common changegroup version'))
1630 version = max(cgversions)
1631 version = max(cgversions)
1631 outgoing = _computeoutgoing(repo, heads, common)
1632 outgoing = _computeoutgoing(repo, heads, common)
1632 cg = changegroup.getlocalchangegroupraw(repo, source, outgoing,
1633 cg = changegroup.getlocalchangegroupraw(repo, source, outgoing,
1633 bundlecaps=bundlecaps,
1634 bundlecaps=bundlecaps,
1634 version=version)
1635 version=version)
1635
1636
1636 if cg:
1637 if cg:
1637 part = bundler.newpart('changegroup', data=cg)
1638 part = bundler.newpart('changegroup', data=cg)
1638 if cgversions:
1639 if cgversions:
1639 part.addparam('version', version)
1640 part.addparam('version', version)
1640 part.addparam('nbchanges', str(len(outgoing.missing)), mandatory=False)
1641 part.addparam('nbchanges', str(len(outgoing.missing)), mandatory=False)
1641 if 'treemanifest' in repo.requirements:
1642 if 'treemanifest' in repo.requirements:
1642 part.addparam('treemanifest', '1')
1643 part.addparam('treemanifest', '1')
1643
1644
1644 @getbundle2partsgenerator('listkeys')
1645 @getbundle2partsgenerator('listkeys')
1645 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1646 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1646 b2caps=None, **kwargs):
1647 b2caps=None, **kwargs):
1647 """add parts containing listkeys namespaces to the requested bundle"""
1648 """add parts containing listkeys namespaces to the requested bundle"""
1648 listkeys = kwargs.get('listkeys', ())
1649 listkeys = kwargs.get('listkeys', ())
1649 for namespace in listkeys:
1650 for namespace in listkeys:
1650 part = bundler.newpart('listkeys')
1651 part = bundler.newpart('listkeys')
1651 part.addparam('namespace', namespace)
1652 part.addparam('namespace', namespace)
1652 keys = repo.listkeys(namespace).items()
1653 keys = repo.listkeys(namespace).items()
1653 part.data = pushkey.encodekeys(keys)
1654 part.data = pushkey.encodekeys(keys)
1654
1655
1655 @getbundle2partsgenerator('obsmarkers')
1656 @getbundle2partsgenerator('obsmarkers')
1656 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1657 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1657 b2caps=None, heads=None, **kwargs):
1658 b2caps=None, heads=None, **kwargs):
1658 """add an obsolescence markers part to the requested bundle"""
1659 """add an obsolescence markers part to the requested bundle"""
1659 if kwargs.get('obsmarkers', False):
1660 if kwargs.get('obsmarkers', False):
1660 if heads is None:
1661 if heads is None:
1661 heads = repo.heads()
1662 heads = repo.heads()
1662 subset = [c.node() for c in repo.set('::%ln', heads)]
1663 subset = [c.node() for c in repo.set('::%ln', heads)]
1663 markers = repo.obsstore.relevantmarkers(subset)
1664 markers = repo.obsstore.relevantmarkers(subset)
1664 markers = sorted(markers)
1665 markers = sorted(markers)
1665 bundle2.buildobsmarkerspart(bundler, markers)
1666 bundle2.buildobsmarkerspart(bundler, markers)
1666
1667
1667 @getbundle2partsgenerator('hgtagsfnodes')
1668 @getbundle2partsgenerator('hgtagsfnodes')
1668 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
1669 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
1669 b2caps=None, heads=None, common=None,
1670 b2caps=None, heads=None, common=None,
1670 **kwargs):
1671 **kwargs):
1671 """Transfer the .hgtags filenodes mapping.
1672 """Transfer the .hgtags filenodes mapping.
1672
1673
1673 Only values for heads in this bundle will be transferred.
1674 Only values for heads in this bundle will be transferred.
1674
1675
1675 The part data consists of pairs of 20 byte changeset node and .hgtags
1676 The part data consists of pairs of 20 byte changeset node and .hgtags
1676 filenodes raw values.
1677 filenodes raw values.
1677 """
1678 """
1678 # Don't send unless:
1679 # Don't send unless:
1679 # - changeset are being exchanged,
1680 # - changeset are being exchanged,
1680 # - the client supports it.
1681 # - the client supports it.
1681 if not (kwargs.get('cg', True) and 'hgtagsfnodes' in b2caps):
1682 if not (kwargs.get('cg', True) and 'hgtagsfnodes' in b2caps):
1682 return
1683 return
1683
1684
1684 outgoing = _computeoutgoing(repo, heads, common)
1685 outgoing = _computeoutgoing(repo, heads, common)
1685 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
1686 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
1686
1687
1687 def _getbookmarks(repo, **kwargs):
1688 def _getbookmarks(repo, **kwargs):
1688 """Returns bookmark to node mapping.
1689 """Returns bookmark to node mapping.
1689
1690
1690 This function is primarily used to generate `bookmarks` bundle2 part.
1691 This function is primarily used to generate `bookmarks` bundle2 part.
1691 It is a separate function in order to make it easy to wrap it
1692 It is a separate function in order to make it easy to wrap it
1692 in extensions. Passing `kwargs` to the function makes it easy to
1693 in extensions. Passing `kwargs` to the function makes it easy to
1693 add new parameters in extensions.
1694 add new parameters in extensions.
1694 """
1695 """
1695
1696
1696 return dict(bookmod.listbinbookmarks(repo))
1697 return dict(bookmod.listbinbookmarks(repo))
1697
1698
1698 def check_heads(repo, their_heads, context):
1699 def check_heads(repo, their_heads, context):
1699 """check if the heads of a repo have been modified
1700 """check if the heads of a repo have been modified
1700
1701
1701 Used by peer for unbundling.
1702 Used by peer for unbundling.
1702 """
1703 """
1703 heads = repo.heads()
1704 heads = repo.heads()
1704 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
1705 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
1705 if not (their_heads == ['force'] or their_heads == heads or
1706 if not (their_heads == ['force'] or their_heads == heads or
1706 their_heads == ['hashed', heads_hash]):
1707 their_heads == ['hashed', heads_hash]):
1707 # someone else committed/pushed/unbundled while we
1708 # someone else committed/pushed/unbundled while we
1708 # were transferring data
1709 # were transferring data
1709 raise error.PushRaced('repository changed while %s - '
1710 raise error.PushRaced('repository changed while %s - '
1710 'please try again' % context)
1711 'please try again' % context)
1711
1712
1712 def unbundle(repo, cg, heads, source, url):
1713 def unbundle(repo, cg, heads, source, url):
1713 """Apply a bundle to a repo.
1714 """Apply a bundle to a repo.
1714
1715
1715 this function makes sure the repo is locked during the application and have
1716 this function makes sure the repo is locked during the application and have
1716 mechanism to check that no push race occurred between the creation of the
1717 mechanism to check that no push race occurred between the creation of the
1717 bundle and its application.
1718 bundle and its application.
1718
1719
1719 If the push was raced as PushRaced exception is raised."""
1720 If the push was raced as PushRaced exception is raised."""
1720 r = 0
1721 r = 0
1721 # need a transaction when processing a bundle2 stream
1722 # need a transaction when processing a bundle2 stream
1722 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
1723 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
1723 lockandtr = [None, None, None]
1724 lockandtr = [None, None, None]
1724 recordout = None
1725 recordout = None
1725 # quick fix for output mismatch with bundle2 in 3.4
1726 # quick fix for output mismatch with bundle2 in 3.4
1726 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture',
1727 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture',
1727 False)
1728 False)
1728 if url.startswith('remote:http:') or url.startswith('remote:https:'):
1729 if url.startswith('remote:http:') or url.startswith('remote:https:'):
1729 captureoutput = True
1730 captureoutput = True
1730 try:
1731 try:
1731 # note: outside bundle1, 'heads' is expected to be empty and this
1732 # note: outside bundle1, 'heads' is expected to be empty and this
1732 # 'check_heads' call wil be a no-op
1733 # 'check_heads' call wil be a no-op
1733 check_heads(repo, heads, 'uploading changes')
1734 check_heads(repo, heads, 'uploading changes')
1734 # push can proceed
1735 # push can proceed
1735 if not isinstance(cg, bundle2.unbundle20):
1736 if not isinstance(cg, bundle2.unbundle20):
1736 # legacy case: bundle1 (changegroup 01)
1737 # legacy case: bundle1 (changegroup 01)
1737 txnname = "\n".join([source, util.hidepassword(url)])
1738 txnname = "\n".join([source, util.hidepassword(url)])
1738 with repo.lock(), repo.transaction(txnname) as tr:
1739 with repo.lock(), repo.transaction(txnname) as tr:
1739 r = cg.apply(repo, tr, source, url)
1740 r = cg.apply(repo, tr, source, url)
1740 else:
1741 else:
1741 r = None
1742 r = None
1742 try:
1743 try:
1743 def gettransaction():
1744 def gettransaction():
1744 if not lockandtr[2]:
1745 if not lockandtr[2]:
1745 lockandtr[0] = repo.wlock()
1746 lockandtr[0] = repo.wlock()
1746 lockandtr[1] = repo.lock()
1747 lockandtr[1] = repo.lock()
1747 lockandtr[2] = repo.transaction(source)
1748 lockandtr[2] = repo.transaction(source)
1748 lockandtr[2].hookargs['source'] = source
1749 lockandtr[2].hookargs['source'] = source
1749 lockandtr[2].hookargs['url'] = url
1750 lockandtr[2].hookargs['url'] = url
1750 lockandtr[2].hookargs['bundle2'] = '1'
1751 lockandtr[2].hookargs['bundle2'] = '1'
1751 return lockandtr[2]
1752 return lockandtr[2]
1752
1753
1753 # Do greedy locking by default until we're satisfied with lazy
1754 # Do greedy locking by default until we're satisfied with lazy
1754 # locking.
1755 # locking.
1755 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
1756 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
1756 gettransaction()
1757 gettransaction()
1757
1758
1758 op = bundle2.bundleoperation(repo, gettransaction,
1759 op = bundle2.bundleoperation(repo, gettransaction,
1759 captureoutput=captureoutput)
1760 captureoutput=captureoutput)
1760 try:
1761 try:
1761 op = bundle2.processbundle(repo, cg, op=op)
1762 op = bundle2.processbundle(repo, cg, op=op)
1762 finally:
1763 finally:
1763 r = op.reply
1764 r = op.reply
1764 if captureoutput and r is not None:
1765 if captureoutput and r is not None:
1765 repo.ui.pushbuffer(error=True, subproc=True)
1766 repo.ui.pushbuffer(error=True, subproc=True)
1766 def recordout(output):
1767 def recordout(output):
1767 r.newpart('output', data=output, mandatory=False)
1768 r.newpart('output', data=output, mandatory=False)
1768 if lockandtr[2] is not None:
1769 if lockandtr[2] is not None:
1769 lockandtr[2].close()
1770 lockandtr[2].close()
1770 except BaseException as exc:
1771 except BaseException as exc:
1771 exc.duringunbundle2 = True
1772 exc.duringunbundle2 = True
1772 if captureoutput and r is not None:
1773 if captureoutput and r is not None:
1773 parts = exc._bundle2salvagedoutput = r.salvageoutput()
1774 parts = exc._bundle2salvagedoutput = r.salvageoutput()
1774 def recordout(output):
1775 def recordout(output):
1775 part = bundle2.bundlepart('output', data=output,
1776 part = bundle2.bundlepart('output', data=output,
1776 mandatory=False)
1777 mandatory=False)
1777 parts.append(part)
1778 parts.append(part)
1778 raise
1779 raise
1779 finally:
1780 finally:
1780 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
1781 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
1781 if recordout is not None:
1782 if recordout is not None:
1782 recordout(repo.ui.popbuffer())
1783 recordout(repo.ui.popbuffer())
1783 return r
1784 return r
1784
1785
1785 def _maybeapplyclonebundle(pullop):
1786 def _maybeapplyclonebundle(pullop):
1786 """Apply a clone bundle from a remote, if possible."""
1787 """Apply a clone bundle from a remote, if possible."""
1787
1788
1788 repo = pullop.repo
1789 repo = pullop.repo
1789 remote = pullop.remote
1790 remote = pullop.remote
1790
1791
1791 if not repo.ui.configbool('ui', 'clonebundles', True):
1792 if not repo.ui.configbool('ui', 'clonebundles', True):
1792 return
1793 return
1793
1794
1794 # Only run if local repo is empty.
1795 # Only run if local repo is empty.
1795 if len(repo):
1796 if len(repo):
1796 return
1797 return
1797
1798
1798 if pullop.heads:
1799 if pullop.heads:
1799 return
1800 return
1800
1801
1801 if not remote.capable('clonebundles'):
1802 if not remote.capable('clonebundles'):
1802 return
1803 return
1803
1804
1804 res = remote._call('clonebundles')
1805 res = remote._call('clonebundles')
1805
1806
1806 # If we call the wire protocol command, that's good enough to record the
1807 # If we call the wire protocol command, that's good enough to record the
1807 # attempt.
1808 # attempt.
1808 pullop.clonebundleattempted = True
1809 pullop.clonebundleattempted = True
1809
1810
1810 entries = parseclonebundlesmanifest(repo, res)
1811 entries = parseclonebundlesmanifest(repo, res)
1811 if not entries:
1812 if not entries:
1812 repo.ui.note(_('no clone bundles available on remote; '
1813 repo.ui.note(_('no clone bundles available on remote; '
1813 'falling back to regular clone\n'))
1814 'falling back to regular clone\n'))
1814 return
1815 return
1815
1816
1816 entries = filterclonebundleentries(repo, entries)
1817 entries = filterclonebundleentries(repo, entries)
1817 if not entries:
1818 if not entries:
1818 # There is a thundering herd concern here. However, if a server
1819 # There is a thundering herd concern here. However, if a server
1819 # operator doesn't advertise bundles appropriate for its clients,
1820 # operator doesn't advertise bundles appropriate for its clients,
1820 # they deserve what's coming. Furthermore, from a client's
1821 # they deserve what's coming. Furthermore, from a client's
1821 # perspective, no automatic fallback would mean not being able to
1822 # perspective, no automatic fallback would mean not being able to
1822 # clone!
1823 # clone!
1823 repo.ui.warn(_('no compatible clone bundles available on server; '
1824 repo.ui.warn(_('no compatible clone bundles available on server; '
1824 'falling back to regular clone\n'))
1825 'falling back to regular clone\n'))
1825 repo.ui.warn(_('(you may want to report this to the server '
1826 repo.ui.warn(_('(you may want to report this to the server '
1826 'operator)\n'))
1827 'operator)\n'))
1827 return
1828 return
1828
1829
1829 entries = sortclonebundleentries(repo.ui, entries)
1830 entries = sortclonebundleentries(repo.ui, entries)
1830
1831
1831 url = entries[0]['URL']
1832 url = entries[0]['URL']
1832 repo.ui.status(_('applying clone bundle from %s\n') % url)
1833 repo.ui.status(_('applying clone bundle from %s\n') % url)
1833 if trypullbundlefromurl(repo.ui, repo, url):
1834 if trypullbundlefromurl(repo.ui, repo, url):
1834 repo.ui.status(_('finished applying clone bundle\n'))
1835 repo.ui.status(_('finished applying clone bundle\n'))
1835 # Bundle failed.
1836 # Bundle failed.
1836 #
1837 #
1837 # We abort by default to avoid the thundering herd of
1838 # We abort by default to avoid the thundering herd of
1838 # clients flooding a server that was expecting expensive
1839 # clients flooding a server that was expecting expensive
1839 # clone load to be offloaded.
1840 # clone load to be offloaded.
1840 elif repo.ui.configbool('ui', 'clonebundlefallback', False):
1841 elif repo.ui.configbool('ui', 'clonebundlefallback', False):
1841 repo.ui.warn(_('falling back to normal clone\n'))
1842 repo.ui.warn(_('falling back to normal clone\n'))
1842 else:
1843 else:
1843 raise error.Abort(_('error applying bundle'),
1844 raise error.Abort(_('error applying bundle'),
1844 hint=_('if this error persists, consider contacting '
1845 hint=_('if this error persists, consider contacting '
1845 'the server operator or disable clone '
1846 'the server operator or disable clone '
1846 'bundles via '
1847 'bundles via '
1847 '"--config ui.clonebundles=false"'))
1848 '"--config ui.clonebundles=false"'))
1848
1849
1849 def parseclonebundlesmanifest(repo, s):
1850 def parseclonebundlesmanifest(repo, s):
1850 """Parses the raw text of a clone bundles manifest.
1851 """Parses the raw text of a clone bundles manifest.
1851
1852
1852 Returns a list of dicts. The dicts have a ``URL`` key corresponding
1853 Returns a list of dicts. The dicts have a ``URL`` key corresponding
1853 to the URL and other keys are the attributes for the entry.
1854 to the URL and other keys are the attributes for the entry.
1854 """
1855 """
1855 m = []
1856 m = []
1856 for line in s.splitlines():
1857 for line in s.splitlines():
1857 fields = line.split()
1858 fields = line.split()
1858 if not fields:
1859 if not fields:
1859 continue
1860 continue
1860 attrs = {'URL': fields[0]}
1861 attrs = {'URL': fields[0]}
1861 for rawattr in fields[1:]:
1862 for rawattr in fields[1:]:
1862 key, value = rawattr.split('=', 1)
1863 key, value = rawattr.split('=', 1)
1863 key = urlreq.unquote(key)
1864 key = urlreq.unquote(key)
1864 value = urlreq.unquote(value)
1865 value = urlreq.unquote(value)
1865 attrs[key] = value
1866 attrs[key] = value
1866
1867
1867 # Parse BUNDLESPEC into components. This makes client-side
1868 # Parse BUNDLESPEC into components. This makes client-side
1868 # preferences easier to specify since you can prefer a single
1869 # preferences easier to specify since you can prefer a single
1869 # component of the BUNDLESPEC.
1870 # component of the BUNDLESPEC.
1870 if key == 'BUNDLESPEC':
1871 if key == 'BUNDLESPEC':
1871 try:
1872 try:
1872 comp, version, params = parsebundlespec(repo, value,
1873 comp, version, params = parsebundlespec(repo, value,
1873 externalnames=True)
1874 externalnames=True)
1874 attrs['COMPRESSION'] = comp
1875 attrs['COMPRESSION'] = comp
1875 attrs['VERSION'] = version
1876 attrs['VERSION'] = version
1876 except error.InvalidBundleSpecification:
1877 except error.InvalidBundleSpecification:
1877 pass
1878 pass
1878 except error.UnsupportedBundleSpecification:
1879 except error.UnsupportedBundleSpecification:
1879 pass
1880 pass
1880
1881
1881 m.append(attrs)
1882 m.append(attrs)
1882
1883
1883 return m
1884 return m
1884
1885
1885 def filterclonebundleentries(repo, entries):
1886 def filterclonebundleentries(repo, entries):
1886 """Remove incompatible clone bundle manifest entries.
1887 """Remove incompatible clone bundle manifest entries.
1887
1888
1888 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
1889 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
1889 and returns a new list consisting of only the entries that this client
1890 and returns a new list consisting of only the entries that this client
1890 should be able to apply.
1891 should be able to apply.
1891
1892
1892 There is no guarantee we'll be able to apply all returned entries because
1893 There is no guarantee we'll be able to apply all returned entries because
1893 the metadata we use to filter on may be missing or wrong.
1894 the metadata we use to filter on may be missing or wrong.
1894 """
1895 """
1895 newentries = []
1896 newentries = []
1896 for entry in entries:
1897 for entry in entries:
1897 spec = entry.get('BUNDLESPEC')
1898 spec = entry.get('BUNDLESPEC')
1898 if spec:
1899 if spec:
1899 try:
1900 try:
1900 parsebundlespec(repo, spec, strict=True)
1901 parsebundlespec(repo, spec, strict=True)
1901 except error.InvalidBundleSpecification as e:
1902 except error.InvalidBundleSpecification as e:
1902 repo.ui.debug(str(e) + '\n')
1903 repo.ui.debug(str(e) + '\n')
1903 continue
1904 continue
1904 except error.UnsupportedBundleSpecification as e:
1905 except error.UnsupportedBundleSpecification as e:
1905 repo.ui.debug('filtering %s because unsupported bundle '
1906 repo.ui.debug('filtering %s because unsupported bundle '
1906 'spec: %s\n' % (entry['URL'], str(e)))
1907 'spec: %s\n' % (entry['URL'], str(e)))
1907 continue
1908 continue
1908
1909
1909 if 'REQUIRESNI' in entry and not sslutil.hassni:
1910 if 'REQUIRESNI' in entry and not sslutil.hassni:
1910 repo.ui.debug('filtering %s because SNI not supported\n' %
1911 repo.ui.debug('filtering %s because SNI not supported\n' %
1911 entry['URL'])
1912 entry['URL'])
1912 continue
1913 continue
1913
1914
1914 newentries.append(entry)
1915 newentries.append(entry)
1915
1916
1916 return newentries
1917 return newentries
1917
1918
1918 class clonebundleentry(object):
1919 class clonebundleentry(object):
1919 """Represents an item in a clone bundles manifest.
1920 """Represents an item in a clone bundles manifest.
1920
1921
1921 This rich class is needed to support sorting since sorted() in Python 3
1922 This rich class is needed to support sorting since sorted() in Python 3
1922 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
1923 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
1923 won't work.
1924 won't work.
1924 """
1925 """
1925
1926
1926 def __init__(self, value, prefers):
1927 def __init__(self, value, prefers):
1927 self.value = value
1928 self.value = value
1928 self.prefers = prefers
1929 self.prefers = prefers
1929
1930
1930 def _cmp(self, other):
1931 def _cmp(self, other):
1931 for prefkey, prefvalue in self.prefers:
1932 for prefkey, prefvalue in self.prefers:
1932 avalue = self.value.get(prefkey)
1933 avalue = self.value.get(prefkey)
1933 bvalue = other.value.get(prefkey)
1934 bvalue = other.value.get(prefkey)
1934
1935
1935 # Special case for b missing attribute and a matches exactly.
1936 # Special case for b missing attribute and a matches exactly.
1936 if avalue is not None and bvalue is None and avalue == prefvalue:
1937 if avalue is not None and bvalue is None and avalue == prefvalue:
1937 return -1
1938 return -1
1938
1939
1939 # Special case for a missing attribute and b matches exactly.
1940 # Special case for a missing attribute and b matches exactly.
1940 if bvalue is not None and avalue is None and bvalue == prefvalue:
1941 if bvalue is not None and avalue is None and bvalue == prefvalue:
1941 return 1
1942 return 1
1942
1943
1943 # We can't compare unless attribute present on both.
1944 # We can't compare unless attribute present on both.
1944 if avalue is None or bvalue is None:
1945 if avalue is None or bvalue is None:
1945 continue
1946 continue
1946
1947
1947 # Same values should fall back to next attribute.
1948 # Same values should fall back to next attribute.
1948 if avalue == bvalue:
1949 if avalue == bvalue:
1949 continue
1950 continue
1950
1951
1951 # Exact matches come first.
1952 # Exact matches come first.
1952 if avalue == prefvalue:
1953 if avalue == prefvalue:
1953 return -1
1954 return -1
1954 if bvalue == prefvalue:
1955 if bvalue == prefvalue:
1955 return 1
1956 return 1
1956
1957
1957 # Fall back to next attribute.
1958 # Fall back to next attribute.
1958 continue
1959 continue
1959
1960
1960 # If we got here we couldn't sort by attributes and prefers. Fall
1961 # If we got here we couldn't sort by attributes and prefers. Fall
1961 # back to index order.
1962 # back to index order.
1962 return 0
1963 return 0
1963
1964
1964 def __lt__(self, other):
1965 def __lt__(self, other):
1965 return self._cmp(other) < 0
1966 return self._cmp(other) < 0
1966
1967
1967 def __gt__(self, other):
1968 def __gt__(self, other):
1968 return self._cmp(other) > 0
1969 return self._cmp(other) > 0
1969
1970
1970 def __eq__(self, other):
1971 def __eq__(self, other):
1971 return self._cmp(other) == 0
1972 return self._cmp(other) == 0
1972
1973
1973 def __le__(self, other):
1974 def __le__(self, other):
1974 return self._cmp(other) <= 0
1975 return self._cmp(other) <= 0
1975
1976
1976 def __ge__(self, other):
1977 def __ge__(self, other):
1977 return self._cmp(other) >= 0
1978 return self._cmp(other) >= 0
1978
1979
1979 def __ne__(self, other):
1980 def __ne__(self, other):
1980 return self._cmp(other) != 0
1981 return self._cmp(other) != 0
1981
1982
1982 def sortclonebundleentries(ui, entries):
1983 def sortclonebundleentries(ui, entries):
1983 prefers = ui.configlist('ui', 'clonebundleprefers')
1984 prefers = ui.configlist('ui', 'clonebundleprefers')
1984 if not prefers:
1985 if not prefers:
1985 return list(entries)
1986 return list(entries)
1986
1987
1987 prefers = [p.split('=', 1) for p in prefers]
1988 prefers = [p.split('=', 1) for p in prefers]
1988
1989
1989 items = sorted(clonebundleentry(v, prefers) for v in entries)
1990 items = sorted(clonebundleentry(v, prefers) for v in entries)
1990 return [i.value for i in items]
1991 return [i.value for i in items]
1991
1992
1992 def trypullbundlefromurl(ui, repo, url):
1993 def trypullbundlefromurl(ui, repo, url):
1993 """Attempt to apply a bundle from a URL."""
1994 """Attempt to apply a bundle from a URL."""
1994 with repo.lock(), repo.transaction('bundleurl') as tr:
1995 with repo.lock(), repo.transaction('bundleurl') as tr:
1995 try:
1996 try:
1996 fh = urlmod.open(ui, url)
1997 fh = urlmod.open(ui, url)
1997 cg = readbundle(ui, fh, 'stream')
1998 cg = readbundle(ui, fh, 'stream')
1998
1999
1999 if isinstance(cg, bundle2.unbundle20):
2000 if isinstance(cg, bundle2.unbundle20):
2000 bundle2.applybundle(repo, cg, tr, 'clonebundles', url)
2001 bundle2.applybundle(repo, cg, tr, 'clonebundles', url)
2001 elif isinstance(cg, streamclone.streamcloneapplier):
2002 elif isinstance(cg, streamclone.streamcloneapplier):
2002 cg.apply(repo)
2003 cg.apply(repo)
2003 else:
2004 else:
2004 cg.apply(repo, tr, 'clonebundles', url)
2005 cg.apply(repo, tr, 'clonebundles', url)
2005 return True
2006 return True
2006 except urlerr.httperror as e:
2007 except urlerr.httperror as e:
2007 ui.warn(_('HTTP error fetching bundle: %s\n') % str(e))
2008 ui.warn(_('HTTP error fetching bundle: %s\n') % str(e))
2008 except urlerr.urlerror as e:
2009 except urlerr.urlerror as e:
2009 ui.warn(_('error fetching bundle: %s\n') % e.reason)
2010 ui.warn(_('error fetching bundle: %s\n') % e.reason)
2010
2011
2011 return False
2012 return False
@@ -1,1379 +1,1380 b''
1 # templater.py - template expansion for output
1 # templater.py - template expansion for output
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import os
10 import os
11 import re
11 import re
12 import types
12 import types
13
13
14 from .i18n import _
14 from .i18n import _
15 from . import (
15 from . import (
16 color,
16 color,
17 config,
17 config,
18 encoding,
18 encoding,
19 error,
19 error,
20 minirst,
20 minirst,
21 parser,
21 parser,
22 pycompat,
22 pycompat,
23 registrar,
23 registrar,
24 revset as revsetmod,
24 revset as revsetmod,
25 revsetlang,
25 revsetlang,
26 templatefilters,
26 templatefilters,
27 templatekw,
27 templatekw,
28 util,
28 util,
29 )
29 )
30
30
31 # template parsing
31 # template parsing
32
32
33 elements = {
33 elements = {
34 # token-type: binding-strength, primary, prefix, infix, suffix
34 # token-type: binding-strength, primary, prefix, infix, suffix
35 "(": (20, None, ("group", 1, ")"), ("func", 1, ")"), None),
35 "(": (20, None, ("group", 1, ")"), ("func", 1, ")"), None),
36 "%": (16, None, None, ("%", 16), None),
36 "%": (16, None, None, ("%", 16), None),
37 "|": (15, None, None, ("|", 15), None),
37 "|": (15, None, None, ("|", 15), None),
38 "*": (5, None, None, ("*", 5), None),
38 "*": (5, None, None, ("*", 5), None),
39 "/": (5, None, None, ("/", 5), None),
39 "/": (5, None, None, ("/", 5), None),
40 "+": (4, None, None, ("+", 4), None),
40 "+": (4, None, None, ("+", 4), None),
41 "-": (4, None, ("negate", 19), ("-", 4), None),
41 "-": (4, None, ("negate", 19), ("-", 4), None),
42 "=": (3, None, None, ("keyvalue", 3), None),
42 "=": (3, None, None, ("keyvalue", 3), None),
43 ",": (2, None, None, ("list", 2), None),
43 ",": (2, None, None, ("list", 2), None),
44 ")": (0, None, None, None, None),
44 ")": (0, None, None, None, None),
45 "integer": (0, "integer", None, None, None),
45 "integer": (0, "integer", None, None, None),
46 "symbol": (0, "symbol", None, None, None),
46 "symbol": (0, "symbol", None, None, None),
47 "string": (0, "string", None, None, None),
47 "string": (0, "string", None, None, None),
48 "template": (0, "template", None, None, None),
48 "template": (0, "template", None, None, None),
49 "end": (0, None, None, None, None),
49 "end": (0, None, None, None, None),
50 }
50 }
51
51
52 def tokenize(program, start, end, term=None):
52 def tokenize(program, start, end, term=None):
53 """Parse a template expression into a stream of tokens, which must end
53 """Parse a template expression into a stream of tokens, which must end
54 with term if specified"""
54 with term if specified"""
55 pos = start
55 pos = start
56 program = pycompat.bytestr(program)
56 program = pycompat.bytestr(program)
57 while pos < end:
57 while pos < end:
58 c = program[pos]
58 c = program[pos]
59 if c.isspace(): # skip inter-token whitespace
59 if c.isspace(): # skip inter-token whitespace
60 pass
60 pass
61 elif c in "(=,)%|+-*/": # handle simple operators
61 elif c in "(=,)%|+-*/": # handle simple operators
62 yield (c, None, pos)
62 yield (c, None, pos)
63 elif c in '"\'': # handle quoted templates
63 elif c in '"\'': # handle quoted templates
64 s = pos + 1
64 s = pos + 1
65 data, pos = _parsetemplate(program, s, end, c)
65 data, pos = _parsetemplate(program, s, end, c)
66 yield ('template', data, s)
66 yield ('template', data, s)
67 pos -= 1
67 pos -= 1
68 elif c == 'r' and program[pos:pos + 2] in ("r'", 'r"'):
68 elif c == 'r' and program[pos:pos + 2] in ("r'", 'r"'):
69 # handle quoted strings
69 # handle quoted strings
70 c = program[pos + 1]
70 c = program[pos + 1]
71 s = pos = pos + 2
71 s = pos = pos + 2
72 while pos < end: # find closing quote
72 while pos < end: # find closing quote
73 d = program[pos]
73 d = program[pos]
74 if d == '\\': # skip over escaped characters
74 if d == '\\': # skip over escaped characters
75 pos += 2
75 pos += 2
76 continue
76 continue
77 if d == c:
77 if d == c:
78 yield ('string', program[s:pos], s)
78 yield ('string', program[s:pos], s)
79 break
79 break
80 pos += 1
80 pos += 1
81 else:
81 else:
82 raise error.ParseError(_("unterminated string"), s)
82 raise error.ParseError(_("unterminated string"), s)
83 elif c.isdigit():
83 elif c.isdigit():
84 s = pos
84 s = pos
85 while pos < end:
85 while pos < end:
86 d = program[pos]
86 d = program[pos]
87 if not d.isdigit():
87 if not d.isdigit():
88 break
88 break
89 pos += 1
89 pos += 1
90 yield ('integer', program[s:pos], s)
90 yield ('integer', program[s:pos], s)
91 pos -= 1
91 pos -= 1
92 elif (c == '\\' and program[pos:pos + 2] in (r"\'", r'\"')
92 elif (c == '\\' and program[pos:pos + 2] in (r"\'", r'\"')
93 or c == 'r' and program[pos:pos + 3] in (r"r\'", r'r\"')):
93 or c == 'r' and program[pos:pos + 3] in (r"r\'", r'r\"')):
94 # handle escaped quoted strings for compatibility with 2.9.2-3.4,
94 # handle escaped quoted strings for compatibility with 2.9.2-3.4,
95 # where some of nested templates were preprocessed as strings and
95 # where some of nested templates were preprocessed as strings and
96 # then compiled. therefore, \"...\" was allowed. (issue4733)
96 # then compiled. therefore, \"...\" was allowed. (issue4733)
97 #
97 #
98 # processing flow of _evalifliteral() at 5ab28a2e9962:
98 # processing flow of _evalifliteral() at 5ab28a2e9962:
99 # outer template string -> stringify() -> compiletemplate()
99 # outer template string -> stringify() -> compiletemplate()
100 # ------------------------ ------------ ------------------
100 # ------------------------ ------------ ------------------
101 # {f("\\\\ {g(\"\\\"\")}"} \\ {g("\"")} [r'\\', {g("\"")}]
101 # {f("\\\\ {g(\"\\\"\")}"} \\ {g("\"")} [r'\\', {g("\"")}]
102 # ~~~~~~~~
102 # ~~~~~~~~
103 # escaped quoted string
103 # escaped quoted string
104 if c == 'r':
104 if c == 'r':
105 pos += 1
105 pos += 1
106 token = 'string'
106 token = 'string'
107 else:
107 else:
108 token = 'template'
108 token = 'template'
109 quote = program[pos:pos + 2]
109 quote = program[pos:pos + 2]
110 s = pos = pos + 2
110 s = pos = pos + 2
111 while pos < end: # find closing escaped quote
111 while pos < end: # find closing escaped quote
112 if program.startswith('\\\\\\', pos, end):
112 if program.startswith('\\\\\\', pos, end):
113 pos += 4 # skip over double escaped characters
113 pos += 4 # skip over double escaped characters
114 continue
114 continue
115 if program.startswith(quote, pos, end):
115 if program.startswith(quote, pos, end):
116 # interpret as if it were a part of an outer string
116 # interpret as if it were a part of an outer string
117 data = parser.unescapestr(program[s:pos])
117 data = parser.unescapestr(program[s:pos])
118 if token == 'template':
118 if token == 'template':
119 data = _parsetemplate(data, 0, len(data))[0]
119 data = _parsetemplate(data, 0, len(data))[0]
120 yield (token, data, s)
120 yield (token, data, s)
121 pos += 1
121 pos += 1
122 break
122 break
123 pos += 1
123 pos += 1
124 else:
124 else:
125 raise error.ParseError(_("unterminated string"), s)
125 raise error.ParseError(_("unterminated string"), s)
126 elif c.isalnum() or c in '_':
126 elif c.isalnum() or c in '_':
127 s = pos
127 s = pos
128 pos += 1
128 pos += 1
129 while pos < end: # find end of symbol
129 while pos < end: # find end of symbol
130 d = program[pos]
130 d = program[pos]
131 if not (d.isalnum() or d == "_"):
131 if not (d.isalnum() or d == "_"):
132 break
132 break
133 pos += 1
133 pos += 1
134 sym = program[s:pos]
134 sym = program[s:pos]
135 yield ('symbol', sym, s)
135 yield ('symbol', sym, s)
136 pos -= 1
136 pos -= 1
137 elif c == term:
137 elif c == term:
138 yield ('end', None, pos + 1)
138 yield ('end', None, pos + 1)
139 return
139 return
140 else:
140 else:
141 raise error.ParseError(_("syntax error"), pos)
141 raise error.ParseError(_("syntax error"), pos)
142 pos += 1
142 pos += 1
143 if term:
143 if term:
144 raise error.ParseError(_("unterminated template expansion"), start)
144 raise error.ParseError(_("unterminated template expansion"), start)
145 yield ('end', None, pos)
145 yield ('end', None, pos)
146
146
147 def _parsetemplate(tmpl, start, stop, quote=''):
147 def _parsetemplate(tmpl, start, stop, quote=''):
148 r"""
148 r"""
149 >>> _parsetemplate('foo{bar}"baz', 0, 12)
149 >>> _parsetemplate('foo{bar}"baz', 0, 12)
150 ([('string', 'foo'), ('symbol', 'bar'), ('string', '"baz')], 12)
150 ([('string', 'foo'), ('symbol', 'bar'), ('string', '"baz')], 12)
151 >>> _parsetemplate('foo{bar}"baz', 0, 12, quote='"')
151 >>> _parsetemplate('foo{bar}"baz', 0, 12, quote='"')
152 ([('string', 'foo'), ('symbol', 'bar')], 9)
152 ([('string', 'foo'), ('symbol', 'bar')], 9)
153 >>> _parsetemplate('foo"{bar}', 0, 9, quote='"')
153 >>> _parsetemplate('foo"{bar}', 0, 9, quote='"')
154 ([('string', 'foo')], 4)
154 ([('string', 'foo')], 4)
155 >>> _parsetemplate(r'foo\"bar"baz', 0, 12, quote='"')
155 >>> _parsetemplate(r'foo\"bar"baz', 0, 12, quote='"')
156 ([('string', 'foo"'), ('string', 'bar')], 9)
156 ([('string', 'foo"'), ('string', 'bar')], 9)
157 >>> _parsetemplate(r'foo\\"bar', 0, 10, quote='"')
157 >>> _parsetemplate(r'foo\\"bar', 0, 10, quote='"')
158 ([('string', 'foo\\')], 6)
158 ([('string', 'foo\\')], 6)
159 """
159 """
160 parsed = []
160 parsed = []
161 sepchars = '{' + quote
161 sepchars = '{' + quote
162 pos = start
162 pos = start
163 p = parser.parser(elements)
163 p = parser.parser(elements)
164 while pos < stop:
164 while pos < stop:
165 n = min((tmpl.find(c, pos, stop) for c in sepchars),
165 n = min((tmpl.find(c, pos, stop) for c in sepchars),
166 key=lambda n: (n < 0, n))
166 key=lambda n: (n < 0, n))
167 if n < 0:
167 if n < 0:
168 parsed.append(('string', parser.unescapestr(tmpl[pos:stop])))
168 parsed.append(('string', parser.unescapestr(tmpl[pos:stop])))
169 pos = stop
169 pos = stop
170 break
170 break
171 c = tmpl[n]
171 c = tmpl[n]
172 bs = (n - pos) - len(tmpl[pos:n].rstrip('\\'))
172 bs = (n - pos) - len(tmpl[pos:n].rstrip('\\'))
173 if bs % 2 == 1:
173 if bs % 2 == 1:
174 # escaped (e.g. '\{', '\\\{', but not '\\{')
174 # escaped (e.g. '\{', '\\\{', but not '\\{')
175 parsed.append(('string', parser.unescapestr(tmpl[pos:n - 1]) + c))
175 parsed.append(('string', parser.unescapestr(tmpl[pos:n - 1]) + c))
176 pos = n + 1
176 pos = n + 1
177 continue
177 continue
178 if n > pos:
178 if n > pos:
179 parsed.append(('string', parser.unescapestr(tmpl[pos:n])))
179 parsed.append(('string', parser.unescapestr(tmpl[pos:n])))
180 if c == quote:
180 if c == quote:
181 return parsed, n + 1
181 return parsed, n + 1
182
182
183 parseres, pos = p.parse(tokenize(tmpl, n + 1, stop, '}'))
183 parseres, pos = p.parse(tokenize(tmpl, n + 1, stop, '}'))
184 parsed.append(parseres)
184 parsed.append(parseres)
185
185
186 if quote:
186 if quote:
187 raise error.ParseError(_("unterminated string"), start)
187 raise error.ParseError(_("unterminated string"), start)
188 return parsed, pos
188 return parsed, pos
189
189
190 def _unnesttemplatelist(tree):
190 def _unnesttemplatelist(tree):
191 """Expand list of templates to node tuple
191 """Expand list of templates to node tuple
192
192
193 >>> def f(tree):
193 >>> def f(tree):
194 ... print prettyformat(_unnesttemplatelist(tree))
194 ... print prettyformat(_unnesttemplatelist(tree))
195 >>> f(('template', []))
195 >>> f(('template', []))
196 ('string', '')
196 ('string', '')
197 >>> f(('template', [('string', 'foo')]))
197 >>> f(('template', [('string', 'foo')]))
198 ('string', 'foo')
198 ('string', 'foo')
199 >>> f(('template', [('string', 'foo'), ('symbol', 'rev')]))
199 >>> f(('template', [('string', 'foo'), ('symbol', 'rev')]))
200 (template
200 (template
201 ('string', 'foo')
201 ('string', 'foo')
202 ('symbol', 'rev'))
202 ('symbol', 'rev'))
203 >>> f(('template', [('symbol', 'rev')])) # template(rev) -> str
203 >>> f(('template', [('symbol', 'rev')])) # template(rev) -> str
204 (template
204 (template
205 ('symbol', 'rev'))
205 ('symbol', 'rev'))
206 >>> f(('template', [('template', [('string', 'foo')])]))
206 >>> f(('template', [('template', [('string', 'foo')])]))
207 ('string', 'foo')
207 ('string', 'foo')
208 """
208 """
209 if not isinstance(tree, tuple):
209 if not isinstance(tree, tuple):
210 return tree
210 return tree
211 op = tree[0]
211 op = tree[0]
212 if op != 'template':
212 if op != 'template':
213 return (op,) + tuple(_unnesttemplatelist(x) for x in tree[1:])
213 return (op,) + tuple(_unnesttemplatelist(x) for x in tree[1:])
214
214
215 assert len(tree) == 2
215 assert len(tree) == 2
216 xs = tuple(_unnesttemplatelist(x) for x in tree[1])
216 xs = tuple(_unnesttemplatelist(x) for x in tree[1])
217 if not xs:
217 if not xs:
218 return ('string', '') # empty template ""
218 return ('string', '') # empty template ""
219 elif len(xs) == 1 and xs[0][0] == 'string':
219 elif len(xs) == 1 and xs[0][0] == 'string':
220 return xs[0] # fast path for string with no template fragment "x"
220 return xs[0] # fast path for string with no template fragment "x"
221 else:
221 else:
222 return (op,) + xs
222 return (op,) + xs
223
223
224 def parse(tmpl):
224 def parse(tmpl):
225 """Parse template string into tree"""
225 """Parse template string into tree"""
226 parsed, pos = _parsetemplate(tmpl, 0, len(tmpl))
226 parsed, pos = _parsetemplate(tmpl, 0, len(tmpl))
227 assert pos == len(tmpl), 'unquoted template should be consumed'
227 assert pos == len(tmpl), 'unquoted template should be consumed'
228 return _unnesttemplatelist(('template', parsed))
228 return _unnesttemplatelist(('template', parsed))
229
229
230 def _parseexpr(expr):
230 def _parseexpr(expr):
231 """Parse a template expression into tree
231 """Parse a template expression into tree
232
232
233 >>> _parseexpr('"foo"')
233 >>> _parseexpr('"foo"')
234 ('string', 'foo')
234 ('string', 'foo')
235 >>> _parseexpr('foo(bar)')
235 >>> _parseexpr('foo(bar)')
236 ('func', ('symbol', 'foo'), ('symbol', 'bar'))
236 ('func', ('symbol', 'foo'), ('symbol', 'bar'))
237 >>> _parseexpr('foo(')
237 >>> _parseexpr('foo(')
238 Traceback (most recent call last):
238 Traceback (most recent call last):
239 ...
239 ...
240 ParseError: ('not a prefix: end', 4)
240 ParseError: ('not a prefix: end', 4)
241 >>> _parseexpr('"foo" "bar"')
241 >>> _parseexpr('"foo" "bar"')
242 Traceback (most recent call last):
242 Traceback (most recent call last):
243 ...
243 ...
244 ParseError: ('invalid token', 7)
244 ParseError: ('invalid token', 7)
245 """
245 """
246 p = parser.parser(elements)
246 p = parser.parser(elements)
247 tree, pos = p.parse(tokenize(expr, 0, len(expr)))
247 tree, pos = p.parse(tokenize(expr, 0, len(expr)))
248 if pos != len(expr):
248 if pos != len(expr):
249 raise error.ParseError(_('invalid token'), pos)
249 raise error.ParseError(_('invalid token'), pos)
250 return _unnesttemplatelist(tree)
250 return _unnesttemplatelist(tree)
251
251
252 def prettyformat(tree):
252 def prettyformat(tree):
253 return parser.prettyformat(tree, ('integer', 'string', 'symbol'))
253 return parser.prettyformat(tree, ('integer', 'string', 'symbol'))
254
254
255 def compileexp(exp, context, curmethods):
255 def compileexp(exp, context, curmethods):
256 """Compile parsed template tree to (func, data) pair"""
256 """Compile parsed template tree to (func, data) pair"""
257 t = exp[0]
257 t = exp[0]
258 if t in curmethods:
258 if t in curmethods:
259 return curmethods[t](exp, context)
259 return curmethods[t](exp, context)
260 raise error.ParseError(_("unknown method '%s'") % t)
260 raise error.ParseError(_("unknown method '%s'") % t)
261
261
262 # template evaluation
262 # template evaluation
263
263
264 def getsymbol(exp):
264 def getsymbol(exp):
265 if exp[0] == 'symbol':
265 if exp[0] == 'symbol':
266 return exp[1]
266 return exp[1]
267 raise error.ParseError(_("expected a symbol, got '%s'") % exp[0])
267 raise error.ParseError(_("expected a symbol, got '%s'") % exp[0])
268
268
269 def getlist(x):
269 def getlist(x):
270 if not x:
270 if not x:
271 return []
271 return []
272 if x[0] == 'list':
272 if x[0] == 'list':
273 return getlist(x[1]) + [x[2]]
273 return getlist(x[1]) + [x[2]]
274 return [x]
274 return [x]
275
275
276 def gettemplate(exp, context):
276 def gettemplate(exp, context):
277 """Compile given template tree or load named template from map file;
277 """Compile given template tree or load named template from map file;
278 returns (func, data) pair"""
278 returns (func, data) pair"""
279 if exp[0] in ('template', 'string'):
279 if exp[0] in ('template', 'string'):
280 return compileexp(exp, context, methods)
280 return compileexp(exp, context, methods)
281 if exp[0] == 'symbol':
281 if exp[0] == 'symbol':
282 # unlike runsymbol(), here 'symbol' is always taken as template name
282 # unlike runsymbol(), here 'symbol' is always taken as template name
283 # even if it exists in mapping. this allows us to override mapping
283 # even if it exists in mapping. this allows us to override mapping
284 # by web templates, e.g. 'changelogtag' is redefined in map file.
284 # by web templates, e.g. 'changelogtag' is redefined in map file.
285 return context._load(exp[1])
285 return context._load(exp[1])
286 raise error.ParseError(_("expected template specifier"))
286 raise error.ParseError(_("expected template specifier"))
287
287
288 def findsymbolicname(arg):
288 def findsymbolicname(arg):
289 """Find symbolic name for the given compiled expression; returns None
289 """Find symbolic name for the given compiled expression; returns None
290 if nothing found reliably"""
290 if nothing found reliably"""
291 while True:
291 while True:
292 func, data = arg
292 func, data = arg
293 if func is runsymbol:
293 if func is runsymbol:
294 return data
294 return data
295 elif func is runfilter:
295 elif func is runfilter:
296 arg = data[0]
296 arg = data[0]
297 else:
297 else:
298 return None
298 return None
299
299
300 def evalfuncarg(context, mapping, arg):
300 def evalfuncarg(context, mapping, arg):
301 func, data = arg
301 func, data = arg
302 # func() may return string, generator of strings or arbitrary object such
302 # func() may return string, generator of strings or arbitrary object such
303 # as date tuple, but filter does not want generator.
303 # as date tuple, but filter does not want generator.
304 thing = func(context, mapping, data)
304 thing = func(context, mapping, data)
305 if isinstance(thing, types.GeneratorType):
305 if isinstance(thing, types.GeneratorType):
306 thing = stringify(thing)
306 thing = stringify(thing)
307 return thing
307 return thing
308
308
309 def evalboolean(context, mapping, arg):
309 def evalboolean(context, mapping, arg):
310 """Evaluate given argument as boolean, but also takes boolean literals"""
310 """Evaluate given argument as boolean, but also takes boolean literals"""
311 func, data = arg
311 func, data = arg
312 if func is runsymbol:
312 if func is runsymbol:
313 thing = func(context, mapping, data, default=None)
313 thing = func(context, mapping, data, default=None)
314 if thing is None:
314 if thing is None:
315 # not a template keyword, takes as a boolean literal
315 # not a template keyword, takes as a boolean literal
316 thing = util.parsebool(data)
316 thing = util.parsebool(data)
317 else:
317 else:
318 thing = func(context, mapping, data)
318 thing = func(context, mapping, data)
319 if isinstance(thing, bool):
319 if isinstance(thing, bool):
320 return thing
320 return thing
321 # other objects are evaluated as strings, which means 0 is True, but
321 # other objects are evaluated as strings, which means 0 is True, but
322 # empty dict/list should be False as they are expected to be ''
322 # empty dict/list should be False as they are expected to be ''
323 return bool(stringify(thing))
323 return bool(stringify(thing))
324
324
325 def evalinteger(context, mapping, arg, err):
325 def evalinteger(context, mapping, arg, err):
326 v = evalfuncarg(context, mapping, arg)
326 v = evalfuncarg(context, mapping, arg)
327 try:
327 try:
328 return int(v)
328 return int(v)
329 except (TypeError, ValueError):
329 except (TypeError, ValueError):
330 raise error.ParseError(err)
330 raise error.ParseError(err)
331
331
332 def evalstring(context, mapping, arg):
332 def evalstring(context, mapping, arg):
333 func, data = arg
333 func, data = arg
334 return stringify(func(context, mapping, data))
334 return stringify(func(context, mapping, data))
335
335
336 def evalstringliteral(context, mapping, arg):
336 def evalstringliteral(context, mapping, arg):
337 """Evaluate given argument as string template, but returns symbol name
337 """Evaluate given argument as string template, but returns symbol name
338 if it is unknown"""
338 if it is unknown"""
339 func, data = arg
339 func, data = arg
340 if func is runsymbol:
340 if func is runsymbol:
341 thing = func(context, mapping, data, default=data)
341 thing = func(context, mapping, data, default=data)
342 else:
342 else:
343 thing = func(context, mapping, data)
343 thing = func(context, mapping, data)
344 return stringify(thing)
344 return stringify(thing)
345
345
346 def runinteger(context, mapping, data):
346 def runinteger(context, mapping, data):
347 return int(data)
347 return int(data)
348
348
349 def runstring(context, mapping, data):
349 def runstring(context, mapping, data):
350 return data
350 return data
351
351
352 def _recursivesymbolblocker(key):
352 def _recursivesymbolblocker(key):
353 def showrecursion(**args):
353 def showrecursion(**args):
354 raise error.Abort(_("recursive reference '%s' in template") % key)
354 raise error.Abort(_("recursive reference '%s' in template") % key)
355 return showrecursion
355 return showrecursion
356
356
357 def _runrecursivesymbol(context, mapping, key):
357 def _runrecursivesymbol(context, mapping, key):
358 raise error.Abort(_("recursive reference '%s' in template") % key)
358 raise error.Abort(_("recursive reference '%s' in template") % key)
359
359
360 def runsymbol(context, mapping, key, default=''):
360 def runsymbol(context, mapping, key, default=''):
361 v = mapping.get(key)
361 v = mapping.get(key)
362 if v is None:
362 if v is None:
363 v = context._defaults.get(key)
363 v = context._defaults.get(key)
364 if v is None:
364 if v is None:
365 # put poison to cut recursion. we can't move this to parsing phase
365 # put poison to cut recursion. we can't move this to parsing phase
366 # because "x = {x}" is allowed if "x" is a keyword. (issue4758)
366 # because "x = {x}" is allowed if "x" is a keyword. (issue4758)
367 safemapping = mapping.copy()
367 safemapping = mapping.copy()
368 safemapping[key] = _recursivesymbolblocker(key)
368 safemapping[key] = _recursivesymbolblocker(key)
369 try:
369 try:
370 v = context.process(key, safemapping)
370 v = context.process(key, safemapping)
371 except TemplateNotFound:
371 except TemplateNotFound:
372 v = default
372 v = default
373 if callable(v):
373 if callable(v):
374 return v(**mapping)
374 return v(**mapping)
375 return v
375 return v
376
376
377 def buildtemplate(exp, context):
377 def buildtemplate(exp, context):
378 ctmpl = [compileexp(e, context, methods) for e in exp[1:]]
378 ctmpl = [compileexp(e, context, methods) for e in exp[1:]]
379 return (runtemplate, ctmpl)
379 return (runtemplate, ctmpl)
380
380
381 def runtemplate(context, mapping, template):
381 def runtemplate(context, mapping, template):
382 for func, data in template:
382 for func, data in template:
383 yield func(context, mapping, data)
383 yield func(context, mapping, data)
384
384
385 def buildfilter(exp, context):
385 def buildfilter(exp, context):
386 n = getsymbol(exp[2])
386 n = getsymbol(exp[2])
387 if n in context._filters:
387 if n in context._filters:
388 filt = context._filters[n]
388 filt = context._filters[n]
389 arg = compileexp(exp[1], context, methods)
389 arg = compileexp(exp[1], context, methods)
390 return (runfilter, (arg, filt))
390 return (runfilter, (arg, filt))
391 if n in funcs:
391 if n in funcs:
392 f = funcs[n]
392 f = funcs[n]
393 args = _buildfuncargs(exp[1], context, methods, n, f._argspec)
393 args = _buildfuncargs(exp[1], context, methods, n, f._argspec)
394 return (f, args)
394 return (f, args)
395 raise error.ParseError(_("unknown function '%s'") % n)
395 raise error.ParseError(_("unknown function '%s'") % n)
396
396
397 def runfilter(context, mapping, data):
397 def runfilter(context, mapping, data):
398 arg, filt = data
398 arg, filt = data
399 thing = evalfuncarg(context, mapping, arg)
399 thing = evalfuncarg(context, mapping, arg)
400 try:
400 try:
401 return filt(thing)
401 return filt(thing)
402 except (ValueError, AttributeError, TypeError):
402 except (ValueError, AttributeError, TypeError):
403 sym = findsymbolicname(arg)
403 sym = findsymbolicname(arg)
404 if sym:
404 if sym:
405 msg = (_("template filter '%s' is not compatible with keyword '%s'")
405 msg = (_("template filter '%s' is not compatible with keyword '%s'")
406 % (filt.func_name, sym))
406 % (filt.func_name, sym))
407 else:
407 else:
408 msg = _("incompatible use of template filter '%s'") % filt.func_name
408 msg = _("incompatible use of template filter '%s'") % filt.func_name
409 raise error.Abort(msg)
409 raise error.Abort(msg)
410
410
411 def buildmap(exp, context):
411 def buildmap(exp, context):
412 func, data = compileexp(exp[1], context, methods)
412 func, data = compileexp(exp[1], context, methods)
413 tfunc, tdata = gettemplate(exp[2], context)
413 tfunc, tdata = gettemplate(exp[2], context)
414 return (runmap, (func, data, tfunc, tdata))
414 return (runmap, (func, data, tfunc, tdata))
415
415
416 def runmap(context, mapping, data):
416 def runmap(context, mapping, data):
417 func, data, tfunc, tdata = data
417 func, data, tfunc, tdata = data
418 d = func(context, mapping, data)
418 d = func(context, mapping, data)
419 if util.safehasattr(d, 'itermaps'):
419 if util.safehasattr(d, 'itermaps'):
420 diter = d.itermaps()
420 diter = d.itermaps()
421 else:
421 else:
422 try:
422 try:
423 diter = iter(d)
423 diter = iter(d)
424 except TypeError:
424 except TypeError:
425 if func is runsymbol:
425 if func is runsymbol:
426 raise error.ParseError(_("keyword '%s' is not iterable") % data)
426 raise error.ParseError(_("keyword '%s' is not iterable") % data)
427 else:
427 else:
428 raise error.ParseError(_("%r is not iterable") % d)
428 raise error.ParseError(_("%r is not iterable") % d)
429
429
430 for i, v in enumerate(diter):
430 for i, v in enumerate(diter):
431 lm = mapping.copy()
431 lm = mapping.copy()
432 lm['index'] = i
432 lm['index'] = i
433 if isinstance(v, dict):
433 if isinstance(v, dict):
434 lm.update(v)
434 lm.update(v)
435 lm['originalnode'] = mapping.get('node')
435 lm['originalnode'] = mapping.get('node')
436 yield tfunc(context, lm, tdata)
436 yield tfunc(context, lm, tdata)
437 else:
437 else:
438 # v is not an iterable of dicts, this happen when 'key'
438 # v is not an iterable of dicts, this happen when 'key'
439 # has been fully expanded already and format is useless.
439 # has been fully expanded already and format is useless.
440 # If so, return the expanded value.
440 # If so, return the expanded value.
441 yield v
441 yield v
442
442
443 def buildnegate(exp, context):
443 def buildnegate(exp, context):
444 arg = compileexp(exp[1], context, exprmethods)
444 arg = compileexp(exp[1], context, exprmethods)
445 return (runnegate, arg)
445 return (runnegate, arg)
446
446
447 def runnegate(context, mapping, data):
447 def runnegate(context, mapping, data):
448 data = evalinteger(context, mapping, data,
448 data = evalinteger(context, mapping, data,
449 _('negation needs an integer argument'))
449 _('negation needs an integer argument'))
450 return -data
450 return -data
451
451
452 def buildarithmetic(exp, context, func):
452 def buildarithmetic(exp, context, func):
453 left = compileexp(exp[1], context, exprmethods)
453 left = compileexp(exp[1], context, exprmethods)
454 right = compileexp(exp[2], context, exprmethods)
454 right = compileexp(exp[2], context, exprmethods)
455 return (runarithmetic, (func, left, right))
455 return (runarithmetic, (func, left, right))
456
456
457 def runarithmetic(context, mapping, data):
457 def runarithmetic(context, mapping, data):
458 func, left, right = data
458 func, left, right = data
459 left = evalinteger(context, mapping, left,
459 left = evalinteger(context, mapping, left,
460 _('arithmetic only defined on integers'))
460 _('arithmetic only defined on integers'))
461 right = evalinteger(context, mapping, right,
461 right = evalinteger(context, mapping, right,
462 _('arithmetic only defined on integers'))
462 _('arithmetic only defined on integers'))
463 try:
463 try:
464 return func(left, right)
464 return func(left, right)
465 except ZeroDivisionError:
465 except ZeroDivisionError:
466 raise error.Abort(_('division by zero is not defined'))
466 raise error.Abort(_('division by zero is not defined'))
467
467
468 def buildfunc(exp, context):
468 def buildfunc(exp, context):
469 n = getsymbol(exp[1])
469 n = getsymbol(exp[1])
470 if n in funcs:
470 if n in funcs:
471 f = funcs[n]
471 f = funcs[n]
472 args = _buildfuncargs(exp[2], context, exprmethods, n, f._argspec)
472 args = _buildfuncargs(exp[2], context, exprmethods, n, f._argspec)
473 return (f, args)
473 return (f, args)
474 if n in context._filters:
474 if n in context._filters:
475 args = _buildfuncargs(exp[2], context, exprmethods, n, argspec=None)
475 args = _buildfuncargs(exp[2], context, exprmethods, n, argspec=None)
476 if len(args) != 1:
476 if len(args) != 1:
477 raise error.ParseError(_("filter %s expects one argument") % n)
477 raise error.ParseError(_("filter %s expects one argument") % n)
478 f = context._filters[n]
478 f = context._filters[n]
479 return (runfilter, (args[0], f))
479 return (runfilter, (args[0], f))
480 raise error.ParseError(_("unknown function '%s'") % n)
480 raise error.ParseError(_("unknown function '%s'") % n)
481
481
482 def _buildfuncargs(exp, context, curmethods, funcname, argspec):
482 def _buildfuncargs(exp, context, curmethods, funcname, argspec):
483 """Compile parsed tree of function arguments into list or dict of
483 """Compile parsed tree of function arguments into list or dict of
484 (func, data) pairs
484 (func, data) pairs
485
485
486 >>> context = engine(lambda t: (runsymbol, t))
486 >>> context = engine(lambda t: (runsymbol, t))
487 >>> def fargs(expr, argspec):
487 >>> def fargs(expr, argspec):
488 ... x = _parseexpr(expr)
488 ... x = _parseexpr(expr)
489 ... n = getsymbol(x[1])
489 ... n = getsymbol(x[1])
490 ... return _buildfuncargs(x[2], context, exprmethods, n, argspec)
490 ... return _buildfuncargs(x[2], context, exprmethods, n, argspec)
491 >>> fargs('a(l=1, k=2)', 'k l m').keys()
491 >>> fargs('a(l=1, k=2)', 'k l m').keys()
492 ['l', 'k']
492 ['l', 'k']
493 >>> args = fargs('a(opts=1, k=2)', '**opts')
493 >>> args = fargs('a(opts=1, k=2)', '**opts')
494 >>> args.keys(), args['opts'].keys()
494 >>> args.keys(), args['opts'].keys()
495 (['opts'], ['opts', 'k'])
495 (['opts'], ['opts', 'k'])
496 """
496 """
497 def compiledict(xs):
497 def compiledict(xs):
498 return util.sortdict((k, compileexp(x, context, curmethods))
498 return util.sortdict((k, compileexp(x, context, curmethods))
499 for k, x in xs.iteritems())
499 for k, x in xs.iteritems())
500 def compilelist(xs):
500 def compilelist(xs):
501 return [compileexp(x, context, curmethods) for x in xs]
501 return [compileexp(x, context, curmethods) for x in xs]
502
502
503 if not argspec:
503 if not argspec:
504 # filter or function with no argspec: return list of positional args
504 # filter or function with no argspec: return list of positional args
505 return compilelist(getlist(exp))
505 return compilelist(getlist(exp))
506
506
507 # function with argspec: return dict of named args
507 # function with argspec: return dict of named args
508 _poskeys, varkey, _keys, optkey = argspec = parser.splitargspec(argspec)
508 _poskeys, varkey, _keys, optkey = argspec = parser.splitargspec(argspec)
509 treeargs = parser.buildargsdict(getlist(exp), funcname, argspec,
509 treeargs = parser.buildargsdict(getlist(exp), funcname, argspec,
510 keyvaluenode='keyvalue', keynode='symbol')
510 keyvaluenode='keyvalue', keynode='symbol')
511 compargs = util.sortdict()
511 compargs = util.sortdict()
512 if varkey:
512 if varkey:
513 compargs[varkey] = compilelist(treeargs.pop(varkey))
513 compargs[varkey] = compilelist(treeargs.pop(varkey))
514 if optkey:
514 if optkey:
515 compargs[optkey] = compiledict(treeargs.pop(optkey))
515 compargs[optkey] = compiledict(treeargs.pop(optkey))
516 compargs.update(compiledict(treeargs))
516 compargs.update(compiledict(treeargs))
517 return compargs
517 return compargs
518
518
519 def buildkeyvaluepair(exp, content):
519 def buildkeyvaluepair(exp, content):
520 raise error.ParseError(_("can't use a key-value pair in this context"))
520 raise error.ParseError(_("can't use a key-value pair in this context"))
521
521
522 # dict of template built-in functions
522 # dict of template built-in functions
523 funcs = {}
523 funcs = {}
524
524
525 templatefunc = registrar.templatefunc(funcs)
525 templatefunc = registrar.templatefunc(funcs)
526
526
527 @templatefunc('date(date[, fmt])')
527 @templatefunc('date(date[, fmt])')
528 def date(context, mapping, args):
528 def date(context, mapping, args):
529 """Format a date. See :hg:`help dates` for formatting
529 """Format a date. See :hg:`help dates` for formatting
530 strings. The default is a Unix date format, including the timezone:
530 strings. The default is a Unix date format, including the timezone:
531 "Mon Sep 04 15:13:13 2006 0700"."""
531 "Mon Sep 04 15:13:13 2006 0700"."""
532 if not (1 <= len(args) <= 2):
532 if not (1 <= len(args) <= 2):
533 # i18n: "date" is a keyword
533 # i18n: "date" is a keyword
534 raise error.ParseError(_("date expects one or two arguments"))
534 raise error.ParseError(_("date expects one or two arguments"))
535
535
536 date = evalfuncarg(context, mapping, args[0])
536 date = evalfuncarg(context, mapping, args[0])
537 fmt = None
537 fmt = None
538 if len(args) == 2:
538 if len(args) == 2:
539 fmt = evalstring(context, mapping, args[1])
539 fmt = evalstring(context, mapping, args[1])
540 try:
540 try:
541 if fmt is None:
541 if fmt is None:
542 return util.datestr(date)
542 return util.datestr(date)
543 else:
543 else:
544 return util.datestr(date, fmt)
544 return util.datestr(date, fmt)
545 except (TypeError, ValueError):
545 except (TypeError, ValueError):
546 # i18n: "date" is a keyword
546 # i18n: "date" is a keyword
547 raise error.ParseError(_("date expects a date information"))
547 raise error.ParseError(_("date expects a date information"))
548
548
549 @templatefunc('dict([[key=]value...])', argspec='*args **kwargs')
549 @templatefunc('dict([[key=]value...])', argspec='*args **kwargs')
550 def dict_(context, mapping, args):
550 def dict_(context, mapping, args):
551 """Construct a dict from key-value pairs. A key may be omitted if
551 """Construct a dict from key-value pairs. A key may be omitted if
552 a value expression can provide an unambiguous name."""
552 a value expression can provide an unambiguous name."""
553 data = util.sortdict()
553 data = util.sortdict()
554
554
555 for v in args['args']:
555 for v in args['args']:
556 k = findsymbolicname(v)
556 k = findsymbolicname(v)
557 if not k:
557 if not k:
558 raise error.ParseError(_('dict key cannot be inferred'))
558 raise error.ParseError(_('dict key cannot be inferred'))
559 if k in data or k in args['kwargs']:
559 if k in data or k in args['kwargs']:
560 raise error.ParseError(_("duplicated dict key '%s' inferred") % k)
560 raise error.ParseError(_("duplicated dict key '%s' inferred") % k)
561 data[k] = evalfuncarg(context, mapping, v)
561 data[k] = evalfuncarg(context, mapping, v)
562
562
563 data.update((k, evalfuncarg(context, mapping, v))
563 data.update((k, evalfuncarg(context, mapping, v))
564 for k, v in args['kwargs'].iteritems())
564 for k, v in args['kwargs'].iteritems())
565 return templatekw.hybriddict(data)
565 return templatekw.hybriddict(data)
566
566
567 @templatefunc('diff([includepattern [, excludepattern]])')
567 @templatefunc('diff([includepattern [, excludepattern]])')
568 def diff(context, mapping, args):
568 def diff(context, mapping, args):
569 """Show a diff, optionally
569 """Show a diff, optionally
570 specifying files to include or exclude."""
570 specifying files to include or exclude."""
571 if len(args) > 2:
571 if len(args) > 2:
572 # i18n: "diff" is a keyword
572 # i18n: "diff" is a keyword
573 raise error.ParseError(_("diff expects zero, one, or two arguments"))
573 raise error.ParseError(_("diff expects zero, one, or two arguments"))
574
574
575 def getpatterns(i):
575 def getpatterns(i):
576 if i < len(args):
576 if i < len(args):
577 s = evalstring(context, mapping, args[i]).strip()
577 s = evalstring(context, mapping, args[i]).strip()
578 if s:
578 if s:
579 return [s]
579 return [s]
580 return []
580 return []
581
581
582 ctx = mapping['ctx']
582 ctx = mapping['ctx']
583 chunks = ctx.diff(match=ctx.match([], getpatterns(0), getpatterns(1)))
583 chunks = ctx.diff(match=ctx.match([], getpatterns(0), getpatterns(1)))
584
584
585 return ''.join(chunks)
585 return ''.join(chunks)
586
586
587 @templatefunc('files(pattern)')
587 @templatefunc('files(pattern)')
588 def files(context, mapping, args):
588 def files(context, mapping, args):
589 """All files of the current changeset matching the pattern. See
589 """All files of the current changeset matching the pattern. See
590 :hg:`help patterns`."""
590 :hg:`help patterns`."""
591 if not len(args) == 1:
591 if not len(args) == 1:
592 # i18n: "files" is a keyword
592 # i18n: "files" is a keyword
593 raise error.ParseError(_("files expects one argument"))
593 raise error.ParseError(_("files expects one argument"))
594
594
595 raw = evalstring(context, mapping, args[0])
595 raw = evalstring(context, mapping, args[0])
596 ctx = mapping['ctx']
596 ctx = mapping['ctx']
597 m = ctx.match([raw])
597 m = ctx.match([raw])
598 files = list(ctx.matches(m))
598 files = list(ctx.matches(m))
599 return templatekw.showlist("file", files, mapping)
599 return templatekw.showlist("file", files, mapping)
600
600
601 @templatefunc('fill(text[, width[, initialident[, hangindent]]])')
601 @templatefunc('fill(text[, width[, initialident[, hangindent]]])')
602 def fill(context, mapping, args):
602 def fill(context, mapping, args):
603 """Fill many
603 """Fill many
604 paragraphs with optional indentation. See the "fill" filter."""
604 paragraphs with optional indentation. See the "fill" filter."""
605 if not (1 <= len(args) <= 4):
605 if not (1 <= len(args) <= 4):
606 # i18n: "fill" is a keyword
606 # i18n: "fill" is a keyword
607 raise error.ParseError(_("fill expects one to four arguments"))
607 raise error.ParseError(_("fill expects one to four arguments"))
608
608
609 text = evalstring(context, mapping, args[0])
609 text = evalstring(context, mapping, args[0])
610 width = 76
610 width = 76
611 initindent = ''
611 initindent = ''
612 hangindent = ''
612 hangindent = ''
613 if 2 <= len(args) <= 4:
613 if 2 <= len(args) <= 4:
614 width = evalinteger(context, mapping, args[1],
614 width = evalinteger(context, mapping, args[1],
615 # i18n: "fill" is a keyword
615 # i18n: "fill" is a keyword
616 _("fill expects an integer width"))
616 _("fill expects an integer width"))
617 try:
617 try:
618 initindent = evalstring(context, mapping, args[2])
618 initindent = evalstring(context, mapping, args[2])
619 hangindent = evalstring(context, mapping, args[3])
619 hangindent = evalstring(context, mapping, args[3])
620 except IndexError:
620 except IndexError:
621 pass
621 pass
622
622
623 return templatefilters.fill(text, width, initindent, hangindent)
623 return templatefilters.fill(text, width, initindent, hangindent)
624
624
625 @templatefunc('formatnode(node)')
625 @templatefunc('formatnode(node)')
626 def formatnode(context, mapping, args):
626 def formatnode(context, mapping, args):
627 """Obtain the preferred form of a changeset hash. (DEPRECATED)"""
627 """Obtain the preferred form of a changeset hash. (DEPRECATED)"""
628 if len(args) != 1:
628 if len(args) != 1:
629 # i18n: "formatnode" is a keyword
629 # i18n: "formatnode" is a keyword
630 raise error.ParseError(_("formatnode expects one argument"))
630 raise error.ParseError(_("formatnode expects one argument"))
631
631
632 ui = mapping['ui']
632 ui = mapping['ui']
633 node = evalstring(context, mapping, args[0])
633 node = evalstring(context, mapping, args[0])
634 if ui.debugflag:
634 if ui.debugflag:
635 return node
635 return node
636 return templatefilters.short(node)
636 return templatefilters.short(node)
637
637
638 @templatefunc('pad(text, width[, fillchar=\' \'[, left=False]])',
638 @templatefunc('pad(text, width[, fillchar=\' \'[, left=False]])',
639 argspec='text width fillchar left')
639 argspec='text width fillchar left')
640 def pad(context, mapping, args):
640 def pad(context, mapping, args):
641 """Pad text with a
641 """Pad text with a
642 fill character."""
642 fill character."""
643 if 'text' not in args or 'width' not in args:
643 if 'text' not in args or 'width' not in args:
644 # i18n: "pad" is a keyword
644 # i18n: "pad" is a keyword
645 raise error.ParseError(_("pad() expects two to four arguments"))
645 raise error.ParseError(_("pad() expects two to four arguments"))
646
646
647 width = evalinteger(context, mapping, args['width'],
647 width = evalinteger(context, mapping, args['width'],
648 # i18n: "pad" is a keyword
648 # i18n: "pad" is a keyword
649 _("pad() expects an integer width"))
649 _("pad() expects an integer width"))
650
650
651 text = evalstring(context, mapping, args['text'])
651 text = evalstring(context, mapping, args['text'])
652
652
653 left = False
653 left = False
654 fillchar = ' '
654 fillchar = ' '
655 if 'fillchar' in args:
655 if 'fillchar' in args:
656 fillchar = evalstring(context, mapping, args['fillchar'])
656 fillchar = evalstring(context, mapping, args['fillchar'])
657 if len(color.stripeffects(fillchar)) != 1:
657 if len(color.stripeffects(fillchar)) != 1:
658 # i18n: "pad" is a keyword
658 # i18n: "pad" is a keyword
659 raise error.ParseError(_("pad() expects a single fill character"))
659 raise error.ParseError(_("pad() expects a single fill character"))
660 if 'left' in args:
660 if 'left' in args:
661 left = evalboolean(context, mapping, args['left'])
661 left = evalboolean(context, mapping, args['left'])
662
662
663 fillwidth = width - encoding.colwidth(color.stripeffects(text))
663 fillwidth = width - encoding.colwidth(color.stripeffects(text))
664 if fillwidth <= 0:
664 if fillwidth <= 0:
665 return text
665 return text
666 if left:
666 if left:
667 return fillchar * fillwidth + text
667 return fillchar * fillwidth + text
668 else:
668 else:
669 return text + fillchar * fillwidth
669 return text + fillchar * fillwidth
670
670
671 @templatefunc('indent(text, indentchars[, firstline])')
671 @templatefunc('indent(text, indentchars[, firstline])')
672 def indent(context, mapping, args):
672 def indent(context, mapping, args):
673 """Indents all non-empty lines
673 """Indents all non-empty lines
674 with the characters given in the indentchars string. An optional
674 with the characters given in the indentchars string. An optional
675 third parameter will override the indent for the first line only
675 third parameter will override the indent for the first line only
676 if present."""
676 if present."""
677 if not (2 <= len(args) <= 3):
677 if not (2 <= len(args) <= 3):
678 # i18n: "indent" is a keyword
678 # i18n: "indent" is a keyword
679 raise error.ParseError(_("indent() expects two or three arguments"))
679 raise error.ParseError(_("indent() expects two or three arguments"))
680
680
681 text = evalstring(context, mapping, args[0])
681 text = evalstring(context, mapping, args[0])
682 indent = evalstring(context, mapping, args[1])
682 indent = evalstring(context, mapping, args[1])
683
683
684 if len(args) == 3:
684 if len(args) == 3:
685 firstline = evalstring(context, mapping, args[2])
685 firstline = evalstring(context, mapping, args[2])
686 else:
686 else:
687 firstline = indent
687 firstline = indent
688
688
689 # the indent function doesn't indent the first line, so we do it here
689 # the indent function doesn't indent the first line, so we do it here
690 return templatefilters.indent(firstline + text, indent)
690 return templatefilters.indent(firstline + text, indent)
691
691
692 @templatefunc('get(dict, key)')
692 @templatefunc('get(dict, key)')
693 def get(context, mapping, args):
693 def get(context, mapping, args):
694 """Get an attribute/key from an object. Some keywords
694 """Get an attribute/key from an object. Some keywords
695 are complex types. This function allows you to obtain the value of an
695 are complex types. This function allows you to obtain the value of an
696 attribute on these types."""
696 attribute on these types."""
697 if len(args) != 2:
697 if len(args) != 2:
698 # i18n: "get" is a keyword
698 # i18n: "get" is a keyword
699 raise error.ParseError(_("get() expects two arguments"))
699 raise error.ParseError(_("get() expects two arguments"))
700
700
701 dictarg = evalfuncarg(context, mapping, args[0])
701 dictarg = evalfuncarg(context, mapping, args[0])
702 if not util.safehasattr(dictarg, 'get'):
702 if not util.safehasattr(dictarg, 'get'):
703 # i18n: "get" is a keyword
703 # i18n: "get" is a keyword
704 raise error.ParseError(_("get() expects a dict as first argument"))
704 raise error.ParseError(_("get() expects a dict as first argument"))
705
705
706 key = evalfuncarg(context, mapping, args[1])
706 key = evalfuncarg(context, mapping, args[1])
707 return dictarg.get(key)
707 return dictarg.get(key)
708
708
709 @templatefunc('if(expr, then[, else])')
709 @templatefunc('if(expr, then[, else])')
710 def if_(context, mapping, args):
710 def if_(context, mapping, args):
711 """Conditionally execute based on the result of
711 """Conditionally execute based on the result of
712 an expression."""
712 an expression."""
713 if not (2 <= len(args) <= 3):
713 if not (2 <= len(args) <= 3):
714 # i18n: "if" is a keyword
714 # i18n: "if" is a keyword
715 raise error.ParseError(_("if expects two or three arguments"))
715 raise error.ParseError(_("if expects two or three arguments"))
716
716
717 test = evalboolean(context, mapping, args[0])
717 test = evalboolean(context, mapping, args[0])
718 if test:
718 if test:
719 yield args[1][0](context, mapping, args[1][1])
719 yield args[1][0](context, mapping, args[1][1])
720 elif len(args) == 3:
720 elif len(args) == 3:
721 yield args[2][0](context, mapping, args[2][1])
721 yield args[2][0](context, mapping, args[2][1])
722
722
723 @templatefunc('ifcontains(needle, haystack, then[, else])')
723 @templatefunc('ifcontains(needle, haystack, then[, else])')
724 def ifcontains(context, mapping, args):
724 def ifcontains(context, mapping, args):
725 """Conditionally execute based
725 """Conditionally execute based
726 on whether the item "needle" is in "haystack"."""
726 on whether the item "needle" is in "haystack"."""
727 if not (3 <= len(args) <= 4):
727 if not (3 <= len(args) <= 4):
728 # i18n: "ifcontains" is a keyword
728 # i18n: "ifcontains" is a keyword
729 raise error.ParseError(_("ifcontains expects three or four arguments"))
729 raise error.ParseError(_("ifcontains expects three or four arguments"))
730
730
731 needle = evalstring(context, mapping, args[0])
731 needle = evalstring(context, mapping, args[0])
732 haystack = evalfuncarg(context, mapping, args[1])
732 haystack = evalfuncarg(context, mapping, args[1])
733
733
734 if needle in haystack:
734 if needle in haystack:
735 yield args[2][0](context, mapping, args[2][1])
735 yield args[2][0](context, mapping, args[2][1])
736 elif len(args) == 4:
736 elif len(args) == 4:
737 yield args[3][0](context, mapping, args[3][1])
737 yield args[3][0](context, mapping, args[3][1])
738
738
739 @templatefunc('ifeq(expr1, expr2, then[, else])')
739 @templatefunc('ifeq(expr1, expr2, then[, else])')
740 def ifeq(context, mapping, args):
740 def ifeq(context, mapping, args):
741 """Conditionally execute based on
741 """Conditionally execute based on
742 whether 2 items are equivalent."""
742 whether 2 items are equivalent."""
743 if not (3 <= len(args) <= 4):
743 if not (3 <= len(args) <= 4):
744 # i18n: "ifeq" is a keyword
744 # i18n: "ifeq" is a keyword
745 raise error.ParseError(_("ifeq expects three or four arguments"))
745 raise error.ParseError(_("ifeq expects three or four arguments"))
746
746
747 test = evalstring(context, mapping, args[0])
747 test = evalstring(context, mapping, args[0])
748 match = evalstring(context, mapping, args[1])
748 match = evalstring(context, mapping, args[1])
749 if test == match:
749 if test == match:
750 yield args[2][0](context, mapping, args[2][1])
750 yield args[2][0](context, mapping, args[2][1])
751 elif len(args) == 4:
751 elif len(args) == 4:
752 yield args[3][0](context, mapping, args[3][1])
752 yield args[3][0](context, mapping, args[3][1])
753
753
754 @templatefunc('join(list, sep)')
754 @templatefunc('join(list, sep)')
755 def join(context, mapping, args):
755 def join(context, mapping, args):
756 """Join items in a list with a delimiter."""
756 """Join items in a list with a delimiter."""
757 if not (1 <= len(args) <= 2):
757 if not (1 <= len(args) <= 2):
758 # i18n: "join" is a keyword
758 # i18n: "join" is a keyword
759 raise error.ParseError(_("join expects one or two arguments"))
759 raise error.ParseError(_("join expects one or two arguments"))
760
760
761 joinset = args[0][0](context, mapping, args[0][1])
761 joinset = args[0][0](context, mapping, args[0][1])
762 if util.safehasattr(joinset, 'itermaps'):
762 if util.safehasattr(joinset, 'itermaps'):
763 jf = joinset.joinfmt
763 jf = joinset.joinfmt
764 joinset = [jf(x) for x in joinset.itermaps()]
764 joinset = [jf(x) for x in joinset.itermaps()]
765
765
766 joiner = " "
766 joiner = " "
767 if len(args) > 1:
767 if len(args) > 1:
768 joiner = evalstring(context, mapping, args[1])
768 joiner = evalstring(context, mapping, args[1])
769
769
770 first = True
770 first = True
771 for x in joinset:
771 for x in joinset:
772 if first:
772 if first:
773 first = False
773 first = False
774 else:
774 else:
775 yield joiner
775 yield joiner
776 yield x
776 yield x
777
777
778 @templatefunc('label(label, expr)')
778 @templatefunc('label(label, expr)')
779 def label(context, mapping, args):
779 def label(context, mapping, args):
780 """Apply a label to generated content. Content with
780 """Apply a label to generated content. Content with
781 a label applied can result in additional post-processing, such as
781 a label applied can result in additional post-processing, such as
782 automatic colorization."""
782 automatic colorization."""
783 if len(args) != 2:
783 if len(args) != 2:
784 # i18n: "label" is a keyword
784 # i18n: "label" is a keyword
785 raise error.ParseError(_("label expects two arguments"))
785 raise error.ParseError(_("label expects two arguments"))
786
786
787 ui = mapping['ui']
787 ui = mapping['ui']
788 thing = evalstring(context, mapping, args[1])
788 thing = evalstring(context, mapping, args[1])
789 # preserve unknown symbol as literal so effects like 'red', 'bold',
789 # preserve unknown symbol as literal so effects like 'red', 'bold',
790 # etc. don't need to be quoted
790 # etc. don't need to be quoted
791 label = evalstringliteral(context, mapping, args[0])
791 label = evalstringliteral(context, mapping, args[0])
792
792
793 return ui.label(thing, label)
793 return ui.label(thing, label)
794
794
795 @templatefunc('latesttag([pattern])')
795 @templatefunc('latesttag([pattern])')
796 def latesttag(context, mapping, args):
796 def latesttag(context, mapping, args):
797 """The global tags matching the given pattern on the
797 """The global tags matching the given pattern on the
798 most recent globally tagged ancestor of this changeset.
798 most recent globally tagged ancestor of this changeset.
799 If no such tags exist, the "{tag}" template resolves to
799 If no such tags exist, the "{tag}" template resolves to
800 the string "null"."""
800 the string "null"."""
801 if len(args) > 1:
801 if len(args) > 1:
802 # i18n: "latesttag" is a keyword
802 # i18n: "latesttag" is a keyword
803 raise error.ParseError(_("latesttag expects at most one argument"))
803 raise error.ParseError(_("latesttag expects at most one argument"))
804
804
805 pattern = None
805 pattern = None
806 if len(args) == 1:
806 if len(args) == 1:
807 pattern = evalstring(context, mapping, args[0])
807 pattern = evalstring(context, mapping, args[0])
808
808
809 return templatekw.showlatesttags(pattern, **mapping)
809 return templatekw.showlatesttags(pattern, **mapping)
810
810
811 @templatefunc('localdate(date[, tz])')
811 @templatefunc('localdate(date[, tz])')
812 def localdate(context, mapping, args):
812 def localdate(context, mapping, args):
813 """Converts a date to the specified timezone.
813 """Converts a date to the specified timezone.
814 The default is local date."""
814 The default is local date."""
815 if not (1 <= len(args) <= 2):
815 if not (1 <= len(args) <= 2):
816 # i18n: "localdate" is a keyword
816 # i18n: "localdate" is a keyword
817 raise error.ParseError(_("localdate expects one or two arguments"))
817 raise error.ParseError(_("localdate expects one or two arguments"))
818
818
819 date = evalfuncarg(context, mapping, args[0])
819 date = evalfuncarg(context, mapping, args[0])
820 try:
820 try:
821 date = util.parsedate(date)
821 date = util.parsedate(date)
822 except AttributeError: # not str nor date tuple
822 except AttributeError: # not str nor date tuple
823 # i18n: "localdate" is a keyword
823 # i18n: "localdate" is a keyword
824 raise error.ParseError(_("localdate expects a date information"))
824 raise error.ParseError(_("localdate expects a date information"))
825 if len(args) >= 2:
825 if len(args) >= 2:
826 tzoffset = None
826 tzoffset = None
827 tz = evalfuncarg(context, mapping, args[1])
827 tz = evalfuncarg(context, mapping, args[1])
828 if isinstance(tz, str):
828 if isinstance(tz, str):
829 tzoffset, remainder = util.parsetimezone(tz)
829 tzoffset, remainder = util.parsetimezone(tz)
830 if remainder:
830 if remainder:
831 tzoffset = None
831 tzoffset = None
832 if tzoffset is None:
832 if tzoffset is None:
833 try:
833 try:
834 tzoffset = int(tz)
834 tzoffset = int(tz)
835 except (TypeError, ValueError):
835 except (TypeError, ValueError):
836 # i18n: "localdate" is a keyword
836 # i18n: "localdate" is a keyword
837 raise error.ParseError(_("localdate expects a timezone"))
837 raise error.ParseError(_("localdate expects a timezone"))
838 else:
838 else:
839 tzoffset = util.makedate()[1]
839 tzoffset = util.makedate()[1]
840 return (date[0], tzoffset)
840 return (date[0], tzoffset)
841
841
842 @templatefunc('mod(a, b)')
842 @templatefunc('mod(a, b)')
843 def mod(context, mapping, args):
843 def mod(context, mapping, args):
844 """Calculate a mod b such that a / b + a mod b == a"""
844 """Calculate a mod b such that a / b + a mod b == a"""
845 if not len(args) == 2:
845 if not len(args) == 2:
846 # i18n: "mod" is a keyword
846 # i18n: "mod" is a keyword
847 raise error.ParseError(_("mod expects two arguments"))
847 raise error.ParseError(_("mod expects two arguments"))
848
848
849 func = lambda a, b: a % b
849 func = lambda a, b: a % b
850 return runarithmetic(context, mapping, (func, args[0], args[1]))
850 return runarithmetic(context, mapping, (func, args[0], args[1]))
851
851
852 @templatefunc('relpath(path)')
852 @templatefunc('relpath(path)')
853 def relpath(context, mapping, args):
853 def relpath(context, mapping, args):
854 """Convert a repository-absolute path into a filesystem path relative to
854 """Convert a repository-absolute path into a filesystem path relative to
855 the current working directory."""
855 the current working directory."""
856 if len(args) != 1:
856 if len(args) != 1:
857 # i18n: "relpath" is a keyword
857 # i18n: "relpath" is a keyword
858 raise error.ParseError(_("relpath expects one argument"))
858 raise error.ParseError(_("relpath expects one argument"))
859
859
860 repo = mapping['ctx'].repo()
860 repo = mapping['ctx'].repo()
861 path = evalstring(context, mapping, args[0])
861 path = evalstring(context, mapping, args[0])
862 return repo.pathto(path)
862 return repo.pathto(path)
863
863
864 @templatefunc('revset(query[, formatargs...])')
864 @templatefunc('revset(query[, formatargs...])')
865 def revset(context, mapping, args):
865 def revset(context, mapping, args):
866 """Execute a revision set query. See
866 """Execute a revision set query. See
867 :hg:`help revset`."""
867 :hg:`help revset`."""
868 if not len(args) > 0:
868 if not len(args) > 0:
869 # i18n: "revset" is a keyword
869 # i18n: "revset" is a keyword
870 raise error.ParseError(_("revset expects one or more arguments"))
870 raise error.ParseError(_("revset expects one or more arguments"))
871
871
872 raw = evalstring(context, mapping, args[0])
872 raw = evalstring(context, mapping, args[0])
873 ctx = mapping['ctx']
873 ctx = mapping['ctx']
874 repo = ctx.repo()
874 repo = ctx.repo()
875
875
876 def query(expr):
876 def query(expr):
877 m = revsetmod.match(repo.ui, expr)
877 m = revsetmod.match(repo.ui, expr)
878 return m(repo)
878 return m(repo)
879
879
880 if len(args) > 1:
880 if len(args) > 1:
881 formatargs = [evalfuncarg(context, mapping, a) for a in args[1:]]
881 formatargs = [evalfuncarg(context, mapping, a) for a in args[1:]]
882 revs = query(revsetlang.formatspec(raw, *formatargs))
882 revs = query(revsetlang.formatspec(raw, *formatargs))
883 revs = list(revs)
883 revs = list(revs)
884 else:
884 else:
885 revsetcache = mapping['cache'].setdefault("revsetcache", {})
885 revsetcache = mapping['cache'].setdefault("revsetcache", {})
886 if raw in revsetcache:
886 if raw in revsetcache:
887 revs = revsetcache[raw]
887 revs = revsetcache[raw]
888 else:
888 else:
889 revs = query(raw)
889 revs = query(raw)
890 revs = list(revs)
890 revs = list(revs)
891 revsetcache[raw] = revs
891 revsetcache[raw] = revs
892
892
893 return templatekw.showrevslist("revision", revs, **mapping)
893 return templatekw.showrevslist("revision", revs, **mapping)
894
894
895 @templatefunc('rstdoc(text, style)')
895 @templatefunc('rstdoc(text, style)')
896 def rstdoc(context, mapping, args):
896 def rstdoc(context, mapping, args):
897 """Format reStructuredText."""
897 """Format reStructuredText."""
898 if len(args) != 2:
898 if len(args) != 2:
899 # i18n: "rstdoc" is a keyword
899 # i18n: "rstdoc" is a keyword
900 raise error.ParseError(_("rstdoc expects two arguments"))
900 raise error.ParseError(_("rstdoc expects two arguments"))
901
901
902 text = evalstring(context, mapping, args[0])
902 text = evalstring(context, mapping, args[0])
903 style = evalstring(context, mapping, args[1])
903 style = evalstring(context, mapping, args[1])
904
904
905 return minirst.format(text, style=style, keep=['verbose'])
905 return minirst.format(text, style=style, keep=['verbose'])
906
906
907 @templatefunc('separate(sep, args)', argspec='sep *args')
907 @templatefunc('separate(sep, args)', argspec='sep *args')
908 def separate(context, mapping, args):
908 def separate(context, mapping, args):
909 """Add a separator between non-empty arguments."""
909 """Add a separator between non-empty arguments."""
910 if 'sep' not in args:
910 if 'sep' not in args:
911 # i18n: "separate" is a keyword
911 # i18n: "separate" is a keyword
912 raise error.ParseError(_("separate expects at least one argument"))
912 raise error.ParseError(_("separate expects at least one argument"))
913
913
914 sep = evalstring(context, mapping, args['sep'])
914 sep = evalstring(context, mapping, args['sep'])
915 first = True
915 first = True
916 for arg in args['args']:
916 for arg in args['args']:
917 argstr = evalstring(context, mapping, arg)
917 argstr = evalstring(context, mapping, arg)
918 if not argstr:
918 if not argstr:
919 continue
919 continue
920 if first:
920 if first:
921 first = False
921 first = False
922 else:
922 else:
923 yield sep
923 yield sep
924 yield argstr
924 yield argstr
925
925
926 @templatefunc('shortest(node, minlength=4)')
926 @templatefunc('shortest(node, minlength=4)')
927 def shortest(context, mapping, args):
927 def shortest(context, mapping, args):
928 """Obtain the shortest representation of
928 """Obtain the shortest representation of
929 a node."""
929 a node."""
930 if not (1 <= len(args) <= 2):
930 if not (1 <= len(args) <= 2):
931 # i18n: "shortest" is a keyword
931 # i18n: "shortest" is a keyword
932 raise error.ParseError(_("shortest() expects one or two arguments"))
932 raise error.ParseError(_("shortest() expects one or two arguments"))
933
933
934 node = evalstring(context, mapping, args[0])
934 node = evalstring(context, mapping, args[0])
935
935
936 minlength = 4
936 minlength = 4
937 if len(args) > 1:
937 if len(args) > 1:
938 minlength = evalinteger(context, mapping, args[1],
938 minlength = evalinteger(context, mapping, args[1],
939 # i18n: "shortest" is a keyword
939 # i18n: "shortest" is a keyword
940 _("shortest() expects an integer minlength"))
940 _("shortest() expects an integer minlength"))
941
941
942 # _partialmatch() of filtered changelog could take O(len(repo)) time,
942 # _partialmatch() of filtered changelog could take O(len(repo)) time,
943 # which would be unacceptably slow. so we look for hash collision in
943 # which would be unacceptably slow. so we look for hash collision in
944 # unfiltered space, which means some hashes may be slightly longer.
944 # unfiltered space, which means some hashes may be slightly longer.
945 cl = mapping['ctx']._repo.unfiltered().changelog
945 cl = mapping['ctx']._repo.unfiltered().changelog
946 def isvalid(test):
946 def isvalid(test):
947 try:
947 try:
948 if cl._partialmatch(test) is None:
948 if cl._partialmatch(test) is None:
949 return False
949 return False
950
950
951 try:
951 try:
952 i = int(test)
952 i = int(test)
953 # if we are a pure int, then starting with zero will not be
953 # if we are a pure int, then starting with zero will not be
954 # confused as a rev; or, obviously, if the int is larger than
954 # confused as a rev; or, obviously, if the int is larger than
955 # the value of the tip rev
955 # the value of the tip rev
956 if test[0] == '0' or i > len(cl):
956 if test[0] == '0' or i > len(cl):
957 return True
957 return True
958 return False
958 return False
959 except ValueError:
959 except ValueError:
960 return True
960 return True
961 except error.RevlogError:
961 except error.RevlogError:
962 return False
962 return False
963 except error.WdirUnsupported:
963 except error.WdirUnsupported:
964 # single 'ff...' match
964 # single 'ff...' match
965 return True
965 return True
966
966
967 shortest = node
967 shortest = node
968 startlength = max(6, minlength)
968 startlength = max(6, minlength)
969 length = startlength
969 length = startlength
970 while True:
970 while True:
971 test = node[:length]
971 test = node[:length]
972 if isvalid(test):
972 if isvalid(test):
973 shortest = test
973 shortest = test
974 if length == minlength or length > startlength:
974 if length == minlength or length > startlength:
975 return shortest
975 return shortest
976 length -= 1
976 length -= 1
977 else:
977 else:
978 length += 1
978 length += 1
979 if len(shortest) <= length:
979 if len(shortest) <= length:
980 return shortest
980 return shortest
981
981
982 @templatefunc('strip(text[, chars])')
982 @templatefunc('strip(text[, chars])')
983 def strip(context, mapping, args):
983 def strip(context, mapping, args):
984 """Strip characters from a string. By default,
984 """Strip characters from a string. By default,
985 strips all leading and trailing whitespace."""
985 strips all leading and trailing whitespace."""
986 if not (1 <= len(args) <= 2):
986 if not (1 <= len(args) <= 2):
987 # i18n: "strip" is a keyword
987 # i18n: "strip" is a keyword
988 raise error.ParseError(_("strip expects one or two arguments"))
988 raise error.ParseError(_("strip expects one or two arguments"))
989
989
990 text = evalstring(context, mapping, args[0])
990 text = evalstring(context, mapping, args[0])
991 if len(args) == 2:
991 if len(args) == 2:
992 chars = evalstring(context, mapping, args[1])
992 chars = evalstring(context, mapping, args[1])
993 return text.strip(chars)
993 return text.strip(chars)
994 return text.strip()
994 return text.strip()
995
995
996 @templatefunc('sub(pattern, replacement, expression)')
996 @templatefunc('sub(pattern, replacement, expression)')
997 def sub(context, mapping, args):
997 def sub(context, mapping, args):
998 """Perform text substitution
998 """Perform text substitution
999 using regular expressions."""
999 using regular expressions."""
1000 if len(args) != 3:
1000 if len(args) != 3:
1001 # i18n: "sub" is a keyword
1001 # i18n: "sub" is a keyword
1002 raise error.ParseError(_("sub expects three arguments"))
1002 raise error.ParseError(_("sub expects three arguments"))
1003
1003
1004 pat = evalstring(context, mapping, args[0])
1004 pat = evalstring(context, mapping, args[0])
1005 rpl = evalstring(context, mapping, args[1])
1005 rpl = evalstring(context, mapping, args[1])
1006 src = evalstring(context, mapping, args[2])
1006 src = evalstring(context, mapping, args[2])
1007 try:
1007 try:
1008 patre = re.compile(pat)
1008 patre = re.compile(pat)
1009 except re.error:
1009 except re.error:
1010 # i18n: "sub" is a keyword
1010 # i18n: "sub" is a keyword
1011 raise error.ParseError(_("sub got an invalid pattern: %s") % pat)
1011 raise error.ParseError(_("sub got an invalid pattern: %s") % pat)
1012 try:
1012 try:
1013 yield patre.sub(rpl, src)
1013 yield patre.sub(rpl, src)
1014 except re.error:
1014 except re.error:
1015 # i18n: "sub" is a keyword
1015 # i18n: "sub" is a keyword
1016 raise error.ParseError(_("sub got an invalid replacement: %s") % rpl)
1016 raise error.ParseError(_("sub got an invalid replacement: %s") % rpl)
1017
1017
1018 @templatefunc('startswith(pattern, text)')
1018 @templatefunc('startswith(pattern, text)')
1019 def startswith(context, mapping, args):
1019 def startswith(context, mapping, args):
1020 """Returns the value from the "text" argument
1020 """Returns the value from the "text" argument
1021 if it begins with the content from the "pattern" argument."""
1021 if it begins with the content from the "pattern" argument."""
1022 if len(args) != 2:
1022 if len(args) != 2:
1023 # i18n: "startswith" is a keyword
1023 # i18n: "startswith" is a keyword
1024 raise error.ParseError(_("startswith expects two arguments"))
1024 raise error.ParseError(_("startswith expects two arguments"))
1025
1025
1026 patn = evalstring(context, mapping, args[0])
1026 patn = evalstring(context, mapping, args[0])
1027 text = evalstring(context, mapping, args[1])
1027 text = evalstring(context, mapping, args[1])
1028 if text.startswith(patn):
1028 if text.startswith(patn):
1029 return text
1029 return text
1030 return ''
1030 return ''
1031
1031
1032 @templatefunc('word(number, text[, separator])')
1032 @templatefunc('word(number, text[, separator])')
1033 def word(context, mapping, args):
1033 def word(context, mapping, args):
1034 """Return the nth word from a string."""
1034 """Return the nth word from a string."""
1035 if not (2 <= len(args) <= 3):
1035 if not (2 <= len(args) <= 3):
1036 # i18n: "word" is a keyword
1036 # i18n: "word" is a keyword
1037 raise error.ParseError(_("word expects two or three arguments, got %d")
1037 raise error.ParseError(_("word expects two or three arguments, got %d")
1038 % len(args))
1038 % len(args))
1039
1039
1040 num = evalinteger(context, mapping, args[0],
1040 num = evalinteger(context, mapping, args[0],
1041 # i18n: "word" is a keyword
1041 # i18n: "word" is a keyword
1042 _("word expects an integer index"))
1042 _("word expects an integer index"))
1043 text = evalstring(context, mapping, args[1])
1043 text = evalstring(context, mapping, args[1])
1044 if len(args) == 3:
1044 if len(args) == 3:
1045 splitter = evalstring(context, mapping, args[2])
1045 splitter = evalstring(context, mapping, args[2])
1046 else:
1046 else:
1047 splitter = None
1047 splitter = None
1048
1048
1049 tokens = text.split(splitter)
1049 tokens = text.split(splitter)
1050 if num >= len(tokens) or num < -len(tokens):
1050 if num >= len(tokens) or num < -len(tokens):
1051 return ''
1051 return ''
1052 else:
1052 else:
1053 return tokens[num]
1053 return tokens[num]
1054
1054
1055 # methods to interpret function arguments or inner expressions (e.g. {_(x)})
1055 # methods to interpret function arguments or inner expressions (e.g. {_(x)})
1056 exprmethods = {
1056 exprmethods = {
1057 "integer": lambda e, c: (runinteger, e[1]),
1057 "integer": lambda e, c: (runinteger, e[1]),
1058 "string": lambda e, c: (runstring, e[1]),
1058 "string": lambda e, c: (runstring, e[1]),
1059 "symbol": lambda e, c: (runsymbol, e[1]),
1059 "symbol": lambda e, c: (runsymbol, e[1]),
1060 "template": buildtemplate,
1060 "template": buildtemplate,
1061 "group": lambda e, c: compileexp(e[1], c, exprmethods),
1061 "group": lambda e, c: compileexp(e[1], c, exprmethods),
1062 # ".": buildmember,
1062 # ".": buildmember,
1063 "|": buildfilter,
1063 "|": buildfilter,
1064 "%": buildmap,
1064 "%": buildmap,
1065 "func": buildfunc,
1065 "func": buildfunc,
1066 "keyvalue": buildkeyvaluepair,
1066 "keyvalue": buildkeyvaluepair,
1067 "+": lambda e, c: buildarithmetic(e, c, lambda a, b: a + b),
1067 "+": lambda e, c: buildarithmetic(e, c, lambda a, b: a + b),
1068 "-": lambda e, c: buildarithmetic(e, c, lambda a, b: a - b),
1068 "-": lambda e, c: buildarithmetic(e, c, lambda a, b: a - b),
1069 "negate": buildnegate,
1069 "negate": buildnegate,
1070 "*": lambda e, c: buildarithmetic(e, c, lambda a, b: a * b),
1070 "*": lambda e, c: buildarithmetic(e, c, lambda a, b: a * b),
1071 "/": lambda e, c: buildarithmetic(e, c, lambda a, b: a // b),
1071 "/": lambda e, c: buildarithmetic(e, c, lambda a, b: a // b),
1072 }
1072 }
1073
1073
1074 # methods to interpret top-level template (e.g. {x}, {x|_}, {x % "y"})
1074 # methods to interpret top-level template (e.g. {x}, {x|_}, {x % "y"})
1075 methods = exprmethods.copy()
1075 methods = exprmethods.copy()
1076 methods["integer"] = exprmethods["symbol"] # '{1}' as variable
1076 methods["integer"] = exprmethods["symbol"] # '{1}' as variable
1077
1077
1078 class _aliasrules(parser.basealiasrules):
1078 class _aliasrules(parser.basealiasrules):
1079 """Parsing and expansion rule set of template aliases"""
1079 """Parsing and expansion rule set of template aliases"""
1080 _section = _('template alias')
1080 _section = _('template alias')
1081 _parse = staticmethod(_parseexpr)
1081 _parse = staticmethod(_parseexpr)
1082
1082
1083 @staticmethod
1083 @staticmethod
1084 def _trygetfunc(tree):
1084 def _trygetfunc(tree):
1085 """Return (name, args) if tree is func(...) or ...|filter; otherwise
1085 """Return (name, args) if tree is func(...) or ...|filter; otherwise
1086 None"""
1086 None"""
1087 if tree[0] == 'func' and tree[1][0] == 'symbol':
1087 if tree[0] == 'func' and tree[1][0] == 'symbol':
1088 return tree[1][1], getlist(tree[2])
1088 return tree[1][1], getlist(tree[2])
1089 if tree[0] == '|' and tree[2][0] == 'symbol':
1089 if tree[0] == '|' and tree[2][0] == 'symbol':
1090 return tree[2][1], [tree[1]]
1090 return tree[2][1], [tree[1]]
1091
1091
1092 def expandaliases(tree, aliases):
1092 def expandaliases(tree, aliases):
1093 """Return new tree of aliases are expanded"""
1093 """Return new tree of aliases are expanded"""
1094 aliasmap = _aliasrules.buildmap(aliases)
1094 aliasmap = _aliasrules.buildmap(aliases)
1095 return _aliasrules.expand(aliasmap, tree)
1095 return _aliasrules.expand(aliasmap, tree)
1096
1096
1097 # template engine
1097 # template engine
1098
1098
1099 stringify = templatefilters.stringify
1099 stringify = templatefilters.stringify
1100
1100
1101 def _flatten(thing):
1101 def _flatten(thing):
1102 '''yield a single stream from a possibly nested set of iterators'''
1102 '''yield a single stream from a possibly nested set of iterators'''
1103 thing = templatekw.unwraphybrid(thing)
1103 thing = templatekw.unwraphybrid(thing)
1104 if isinstance(thing, bytes):
1104 if isinstance(thing, bytes):
1105 yield thing
1105 yield thing
1106 elif thing is None:
1106 elif thing is None:
1107 pass
1107 pass
1108 elif not util.safehasattr(thing, '__iter__'):
1108 elif not util.safehasattr(thing, '__iter__'):
1109 yield pycompat.bytestr(thing)
1109 yield pycompat.bytestr(thing)
1110 else:
1110 else:
1111 for i in thing:
1111 for i in thing:
1112 i = templatekw.unwraphybrid(i)
1112 i = templatekw.unwraphybrid(i)
1113 if isinstance(i, bytes):
1113 if isinstance(i, bytes):
1114 yield i
1114 yield i
1115 elif i is None:
1115 elif i is None:
1116 pass
1116 pass
1117 elif not util.safehasattr(i, '__iter__'):
1117 elif not util.safehasattr(i, '__iter__'):
1118 yield pycompat.bytestr(i)
1118 yield pycompat.bytestr(i)
1119 else:
1119 else:
1120 for j in _flatten(i):
1120 for j in _flatten(i):
1121 yield j
1121 yield j
1122
1122
1123 def unquotestring(s):
1123 def unquotestring(s):
1124 '''unwrap quotes if any; otherwise returns unmodified string'''
1124 '''unwrap quotes if any; otherwise returns unmodified string'''
1125 if len(s) < 2 or s[0] not in "'\"" or s[0] != s[-1]:
1125 if len(s) < 2 or s[0] not in "'\"" or s[0] != s[-1]:
1126 return s
1126 return s
1127 return s[1:-1]
1127 return s[1:-1]
1128
1128
1129 class engine(object):
1129 class engine(object):
1130 '''template expansion engine.
1130 '''template expansion engine.
1131
1131
1132 template expansion works like this. a map file contains key=value
1132 template expansion works like this. a map file contains key=value
1133 pairs. if value is quoted, it is treated as string. otherwise, it
1133 pairs. if value is quoted, it is treated as string. otherwise, it
1134 is treated as name of template file.
1134 is treated as name of template file.
1135
1135
1136 templater is asked to expand a key in map. it looks up key, and
1136 templater is asked to expand a key in map. it looks up key, and
1137 looks for strings like this: {foo}. it expands {foo} by looking up
1137 looks for strings like this: {foo}. it expands {foo} by looking up
1138 foo in map, and substituting it. expansion is recursive: it stops
1138 foo in map, and substituting it. expansion is recursive: it stops
1139 when there is no more {foo} to replace.
1139 when there is no more {foo} to replace.
1140
1140
1141 expansion also allows formatting and filtering.
1141 expansion also allows formatting and filtering.
1142
1142
1143 format uses key to expand each item in list. syntax is
1143 format uses key to expand each item in list. syntax is
1144 {key%format}.
1144 {key%format}.
1145
1145
1146 filter uses function to transform value. syntax is
1146 filter uses function to transform value. syntax is
1147 {key|filter1|filter2|...}.'''
1147 {key|filter1|filter2|...}.'''
1148
1148
1149 def __init__(self, loader, filters=None, defaults=None, aliases=()):
1149 def __init__(self, loader, filters=None, defaults=None, aliases=()):
1150 self._loader = loader
1150 self._loader = loader
1151 if filters is None:
1151 if filters is None:
1152 filters = {}
1152 filters = {}
1153 self._filters = filters
1153 self._filters = filters
1154 if defaults is None:
1154 if defaults is None:
1155 defaults = {}
1155 defaults = {}
1156 self._defaults = defaults
1156 self._defaults = defaults
1157 self._aliasmap = _aliasrules.buildmap(aliases)
1157 self._aliasmap = _aliasrules.buildmap(aliases)
1158 self._cache = {} # key: (func, data)
1158 self._cache = {} # key: (func, data)
1159
1159
1160 def _load(self, t):
1160 def _load(self, t):
1161 '''load, parse, and cache a template'''
1161 '''load, parse, and cache a template'''
1162 if t not in self._cache:
1162 if t not in self._cache:
1163 # put poison to cut recursion while compiling 't'
1163 # put poison to cut recursion while compiling 't'
1164 self._cache[t] = (_runrecursivesymbol, t)
1164 self._cache[t] = (_runrecursivesymbol, t)
1165 try:
1165 try:
1166 x = parse(self._loader(t))
1166 x = parse(self._loader(t))
1167 if self._aliasmap:
1167 if self._aliasmap:
1168 x = _aliasrules.expand(self._aliasmap, x)
1168 x = _aliasrules.expand(self._aliasmap, x)
1169 self._cache[t] = compileexp(x, self, methods)
1169 self._cache[t] = compileexp(x, self, methods)
1170 except: # re-raises
1170 except: # re-raises
1171 del self._cache[t]
1171 del self._cache[t]
1172 raise
1172 raise
1173 return self._cache[t]
1173 return self._cache[t]
1174
1174
1175 def process(self, t, mapping):
1175 def process(self, t, mapping):
1176 '''Perform expansion. t is name of map element to expand.
1176 '''Perform expansion. t is name of map element to expand.
1177 mapping contains added elements for use during expansion. Is a
1177 mapping contains added elements for use during expansion. Is a
1178 generator.'''
1178 generator.'''
1179 func, data = self._load(t)
1179 func, data = self._load(t)
1180 return _flatten(func(self, mapping, data))
1180 return _flatten(func(self, mapping, data))
1181
1181
1182 engines = {'default': engine}
1182 engines = {'default': engine}
1183
1183
1184 def stylelist():
1184 def stylelist():
1185 paths = templatepaths()
1185 paths = templatepaths()
1186 if not paths:
1186 if not paths:
1187 return _('no templates found, try `hg debuginstall` for more info')
1187 return _('no templates found, try `hg debuginstall` for more info')
1188 dirlist = os.listdir(paths[0])
1188 dirlist = os.listdir(paths[0])
1189 stylelist = []
1189 stylelist = []
1190 for file in dirlist:
1190 for file in dirlist:
1191 split = file.split(".")
1191 split = file.split(".")
1192 if split[-1] in ('orig', 'rej'):
1192 if split[-1] in ('orig', 'rej'):
1193 continue
1193 continue
1194 if split[0] == "map-cmdline":
1194 if split[0] == "map-cmdline":
1195 stylelist.append(split[1])
1195 stylelist.append(split[1])
1196 return ", ".join(sorted(stylelist))
1196 return ", ".join(sorted(stylelist))
1197
1197
1198 def _readmapfile(mapfile):
1198 def _readmapfile(mapfile):
1199 """Load template elements from the given map file"""
1199 """Load template elements from the given map file"""
1200 if not os.path.exists(mapfile):
1200 if not os.path.exists(mapfile):
1201 raise error.Abort(_("style '%s' not found") % mapfile,
1201 raise error.Abort(_("style '%s' not found") % mapfile,
1202 hint=_("available styles: %s") % stylelist())
1202 hint=_("available styles: %s") % stylelist())
1203
1203
1204 base = os.path.dirname(mapfile)
1204 base = os.path.dirname(mapfile)
1205 conf = config.config(includepaths=templatepaths())
1205 conf = config.config(includepaths=templatepaths())
1206 conf.read(mapfile)
1206 conf.read(mapfile)
1207
1207
1208 cache = {}
1208 cache = {}
1209 tmap = {}
1209 tmap = {}
1210 for key, val in conf[''].items():
1210 for key, val in conf[''].items():
1211 if not val:
1211 if not val:
1212 raise error.ParseError(_('missing value'), conf.source('', key))
1212 raise error.ParseError(_('missing value'), conf.source('', key))
1213 if val[0] in "'\"":
1213 if val[0] in "'\"":
1214 if val[0] != val[-1]:
1214 if val[0] != val[-1]:
1215 raise error.ParseError(_('unmatched quotes'),
1215 raise error.ParseError(_('unmatched quotes'),
1216 conf.source('', key))
1216 conf.source('', key))
1217 cache[key] = unquotestring(val)
1217 cache[key] = unquotestring(val)
1218 elif key == "__base__":
1218 elif key == "__base__":
1219 # treat as a pointer to a base class for this style
1219 # treat as a pointer to a base class for this style
1220 path = util.normpath(os.path.join(base, val))
1220 path = util.normpath(os.path.join(base, val))
1221
1221
1222 # fallback check in template paths
1222 # fallback check in template paths
1223 if not os.path.exists(path):
1223 if not os.path.exists(path):
1224 for p in templatepaths():
1224 for p in templatepaths():
1225 p2 = util.normpath(os.path.join(p, val))
1225 p2 = util.normpath(os.path.join(p, val))
1226 if os.path.isfile(p2):
1226 if os.path.isfile(p2):
1227 path = p2
1227 path = p2
1228 break
1228 break
1229 p3 = util.normpath(os.path.join(p2, "map"))
1229 p3 = util.normpath(os.path.join(p2, "map"))
1230 if os.path.isfile(p3):
1230 if os.path.isfile(p3):
1231 path = p3
1231 path = p3
1232 break
1232 break
1233
1233
1234 bcache, btmap = _readmapfile(path)
1234 bcache, btmap = _readmapfile(path)
1235 for k in bcache:
1235 for k in bcache:
1236 if k not in cache:
1236 if k not in cache:
1237 cache[k] = bcache[k]
1237 cache[k] = bcache[k]
1238 for k in btmap:
1238 for k in btmap:
1239 if k not in tmap:
1239 if k not in tmap:
1240 tmap[k] = btmap[k]
1240 tmap[k] = btmap[k]
1241 else:
1241 else:
1242 val = 'default', val
1242 val = 'default', val
1243 if ':' in val[1]:
1243 if ':' in val[1]:
1244 val = val[1].split(':', 1)
1244 val = val[1].split(':', 1)
1245 tmap[key] = val[0], os.path.join(base, val[1])
1245 tmap[key] = val[0], os.path.join(base, val[1])
1246 return cache, tmap
1246 return cache, tmap
1247
1247
1248 class TemplateNotFound(error.Abort):
1248 class TemplateNotFound(error.Abort):
1249 pass
1249 pass
1250
1250
1251 class templater(object):
1251 class templater(object):
1252
1252
1253 def __init__(self, filters=None, defaults=None, cache=None, aliases=(),
1253 def __init__(self, filters=None, defaults=None, cache=None, aliases=(),
1254 minchunk=1024, maxchunk=65536):
1254 minchunk=1024, maxchunk=65536):
1255 '''set up template engine.
1255 '''set up template engine.
1256 filters is dict of functions. each transforms a value into another.
1256 filters is dict of functions. each transforms a value into another.
1257 defaults is dict of default map definitions.
1257 defaults is dict of default map definitions.
1258 aliases is list of alias (name, replacement) pairs.
1258 aliases is list of alias (name, replacement) pairs.
1259 '''
1259 '''
1260 if filters is None:
1260 if filters is None:
1261 filters = {}
1261 filters = {}
1262 if defaults is None:
1262 if defaults is None:
1263 defaults = {}
1263 defaults = {}
1264 if cache is None:
1264 if cache is None:
1265 cache = {}
1265 cache = {}
1266 self.cache = cache.copy()
1266 self.cache = cache.copy()
1267 self.map = {}
1267 self.map = {}
1268 self.filters = templatefilters.filters.copy()
1268 self.filters = templatefilters.filters.copy()
1269 self.filters.update(filters)
1269 self.filters.update(filters)
1270 self.defaults = defaults
1270 self.defaults = defaults
1271 self._aliases = aliases
1271 self._aliases = aliases
1272 self.minchunk, self.maxchunk = minchunk, maxchunk
1272 self.minchunk, self.maxchunk = minchunk, maxchunk
1273 self.ecache = {}
1273 self.ecache = {}
1274
1274
1275 @classmethod
1275 @classmethod
1276 def frommapfile(cls, mapfile, filters=None, defaults=None, cache=None,
1276 def frommapfile(cls, mapfile, filters=None, defaults=None, cache=None,
1277 minchunk=1024, maxchunk=65536):
1277 minchunk=1024, maxchunk=65536):
1278 """Create templater from the specified map file"""
1278 """Create templater from the specified map file"""
1279 t = cls(filters, defaults, cache, [], minchunk, maxchunk)
1279 t = cls(filters, defaults, cache, [], minchunk, maxchunk)
1280 cache, tmap = _readmapfile(mapfile)
1280 cache, tmap = _readmapfile(mapfile)
1281 t.cache.update(cache)
1281 t.cache.update(cache)
1282 t.map = tmap
1282 t.map = tmap
1283 return t
1283 return t
1284
1284
1285 def __contains__(self, key):
1285 def __contains__(self, key):
1286 return key in self.cache or key in self.map
1286 return key in self.cache or key in self.map
1287
1287
1288 def load(self, t):
1288 def load(self, t):
1289 '''Get the template for the given template name. Use a local cache.'''
1289 '''Get the template for the given template name. Use a local cache.'''
1290 if t not in self.cache:
1290 if t not in self.cache:
1291 try:
1291 try:
1292 self.cache[t] = util.readfile(self.map[t][1])
1292 self.cache[t] = util.readfile(self.map[t][1])
1293 except KeyError as inst:
1293 except KeyError as inst:
1294 raise TemplateNotFound(_('"%s" not in template map') %
1294 raise TemplateNotFound(_('"%s" not in template map') %
1295 inst.args[0])
1295 inst.args[0])
1296 except IOError as inst:
1296 except IOError as inst:
1297 raise IOError(inst.args[0], _('template file %s: %s') %
1297 raise IOError(inst.args[0], _('template file %s: %s') %
1298 (self.map[t][1], inst.args[1]))
1298 (self.map[t][1], inst.args[1]))
1299 return self.cache[t]
1299 return self.cache[t]
1300
1300
1301 def render(self, mapping):
1301 def render(self, mapping):
1302 """Render the default unnamed template and return result as string"""
1302 """Render the default unnamed template and return result as string"""
1303 return stringify(self('', **mapping))
1303 return stringify(self('', **mapping))
1304
1304
1305 def __call__(self, t, **mapping):
1305 def __call__(self, t, **mapping):
1306 mapping = pycompat.byteskwargs(mapping)
1306 ttype = t in self.map and self.map[t][0] or 'default'
1307 ttype = t in self.map and self.map[t][0] or 'default'
1307 if ttype not in self.ecache:
1308 if ttype not in self.ecache:
1308 try:
1309 try:
1309 ecls = engines[ttype]
1310 ecls = engines[ttype]
1310 except KeyError:
1311 except KeyError:
1311 raise error.Abort(_('invalid template engine: %s') % ttype)
1312 raise error.Abort(_('invalid template engine: %s') % ttype)
1312 self.ecache[ttype] = ecls(self.load, self.filters, self.defaults,
1313 self.ecache[ttype] = ecls(self.load, self.filters, self.defaults,
1313 self._aliases)
1314 self._aliases)
1314 proc = self.ecache[ttype]
1315 proc = self.ecache[ttype]
1315
1316
1316 stream = proc.process(t, mapping)
1317 stream = proc.process(t, mapping)
1317 if self.minchunk:
1318 if self.minchunk:
1318 stream = util.increasingchunks(stream, min=self.minchunk,
1319 stream = util.increasingchunks(stream, min=self.minchunk,
1319 max=self.maxchunk)
1320 max=self.maxchunk)
1320 return stream
1321 return stream
1321
1322
1322 def templatepaths():
1323 def templatepaths():
1323 '''return locations used for template files.'''
1324 '''return locations used for template files.'''
1324 pathsrel = ['templates']
1325 pathsrel = ['templates']
1325 paths = [os.path.normpath(os.path.join(util.datapath, f))
1326 paths = [os.path.normpath(os.path.join(util.datapath, f))
1326 for f in pathsrel]
1327 for f in pathsrel]
1327 return [p for p in paths if os.path.isdir(p)]
1328 return [p for p in paths if os.path.isdir(p)]
1328
1329
1329 def templatepath(name):
1330 def templatepath(name):
1330 '''return location of template file. returns None if not found.'''
1331 '''return location of template file. returns None if not found.'''
1331 for p in templatepaths():
1332 for p in templatepaths():
1332 f = os.path.join(p, name)
1333 f = os.path.join(p, name)
1333 if os.path.exists(f):
1334 if os.path.exists(f):
1334 return f
1335 return f
1335 return None
1336 return None
1336
1337
1337 def stylemap(styles, paths=None):
1338 def stylemap(styles, paths=None):
1338 """Return path to mapfile for a given style.
1339 """Return path to mapfile for a given style.
1339
1340
1340 Searches mapfile in the following locations:
1341 Searches mapfile in the following locations:
1341 1. templatepath/style/map
1342 1. templatepath/style/map
1342 2. templatepath/map-style
1343 2. templatepath/map-style
1343 3. templatepath/map
1344 3. templatepath/map
1344 """
1345 """
1345
1346
1346 if paths is None:
1347 if paths is None:
1347 paths = templatepaths()
1348 paths = templatepaths()
1348 elif isinstance(paths, str):
1349 elif isinstance(paths, str):
1349 paths = [paths]
1350 paths = [paths]
1350
1351
1351 if isinstance(styles, str):
1352 if isinstance(styles, str):
1352 styles = [styles]
1353 styles = [styles]
1353
1354
1354 for style in styles:
1355 for style in styles:
1355 # only plain name is allowed to honor template paths
1356 # only plain name is allowed to honor template paths
1356 if (not style
1357 if (not style
1357 or style in (os.curdir, os.pardir)
1358 or style in (os.curdir, os.pardir)
1358 or pycompat.ossep in style
1359 or pycompat.ossep in style
1359 or pycompat.osaltsep and pycompat.osaltsep in style):
1360 or pycompat.osaltsep and pycompat.osaltsep in style):
1360 continue
1361 continue
1361 locations = [os.path.join(style, 'map'), 'map-' + style]
1362 locations = [os.path.join(style, 'map'), 'map-' + style]
1362 locations.append('map')
1363 locations.append('map')
1363
1364
1364 for path in paths:
1365 for path in paths:
1365 for location in locations:
1366 for location in locations:
1366 mapfile = os.path.join(path, location)
1367 mapfile = os.path.join(path, location)
1367 if os.path.isfile(mapfile):
1368 if os.path.isfile(mapfile):
1368 return style, mapfile
1369 return style, mapfile
1369
1370
1370 raise RuntimeError("No hgweb templates found in %r" % paths)
1371 raise RuntimeError("No hgweb templates found in %r" % paths)
1371
1372
1372 def loadfunction(ui, extname, registrarobj):
1373 def loadfunction(ui, extname, registrarobj):
1373 """Load template function from specified registrarobj
1374 """Load template function from specified registrarobj
1374 """
1375 """
1375 for name, func in registrarobj._table.iteritems():
1376 for name, func in registrarobj._table.iteritems():
1376 funcs[name] = func
1377 funcs[name] = func
1377
1378
1378 # tell hggettext to extract docstrings from these functions:
1379 # tell hggettext to extract docstrings from these functions:
1379 i18nfunctions = funcs.values()
1380 i18nfunctions = funcs.values()
General Comments 0
You need to be logged in to leave comments. Login now