##// END OF EJS Templates
py3: convert kwargs' keys' to str using pycompat.strkwargs()...
Pulkit Goyal -
r33017:c31d4562 default
parent child Browse files
Show More
@@ -1,2012 +1,2012
1 1 # exchange.py - utility to exchange data between repos.
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import hashlib
12 12
13 13 from .i18n import _
14 14 from .node import (
15 15 hex,
16 16 nullid,
17 17 )
18 18 from . import (
19 19 bookmarks as bookmod,
20 20 bundle2,
21 21 changegroup,
22 22 discovery,
23 23 error,
24 24 lock as lockmod,
25 25 obsolete,
26 26 phases,
27 27 pushkey,
28 28 pycompat,
29 29 scmutil,
30 30 sslutil,
31 31 streamclone,
32 32 url as urlmod,
33 33 util,
34 34 )
35 35
36 36 urlerr = util.urlerr
37 37 urlreq = util.urlreq
38 38
39 39 # Maps bundle version human names to changegroup versions.
40 40 _bundlespeccgversions = {'v1': '01',
41 41 'v2': '02',
42 42 'packed1': 's1',
43 43 'bundle2': '02', #legacy
44 44 }
45 45
46 46 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
47 47 _bundlespecv1compengines = {'gzip', 'bzip2', 'none'}
48 48
49 49 def parsebundlespec(repo, spec, strict=True, externalnames=False):
50 50 """Parse a bundle string specification into parts.
51 51
52 52 Bundle specifications denote a well-defined bundle/exchange format.
53 53 The content of a given specification should not change over time in
54 54 order to ensure that bundles produced by a newer version of Mercurial are
55 55 readable from an older version.
56 56
57 57 The string currently has the form:
58 58
59 59 <compression>-<type>[;<parameter0>[;<parameter1>]]
60 60
61 61 Where <compression> is one of the supported compression formats
62 62 and <type> is (currently) a version string. A ";" can follow the type and
63 63 all text afterwards is interpreted as URI encoded, ";" delimited key=value
64 64 pairs.
65 65
66 66 If ``strict`` is True (the default) <compression> is required. Otherwise,
67 67 it is optional.
68 68
69 69 If ``externalnames`` is False (the default), the human-centric names will
70 70 be converted to their internal representation.
71 71
72 72 Returns a 3-tuple of (compression, version, parameters). Compression will
73 73 be ``None`` if not in strict mode and a compression isn't defined.
74 74
75 75 An ``InvalidBundleSpecification`` is raised when the specification is
76 76 not syntactically well formed.
77 77
78 78 An ``UnsupportedBundleSpecification`` is raised when the compression or
79 79 bundle type/version is not recognized.
80 80
81 81 Note: this function will likely eventually return a more complex data
82 82 structure, including bundle2 part information.
83 83 """
84 84 def parseparams(s):
85 85 if ';' not in s:
86 86 return s, {}
87 87
88 88 params = {}
89 89 version, paramstr = s.split(';', 1)
90 90
91 91 for p in paramstr.split(';'):
92 92 if '=' not in p:
93 93 raise error.InvalidBundleSpecification(
94 94 _('invalid bundle specification: '
95 95 'missing "=" in parameter: %s') % p)
96 96
97 97 key, value = p.split('=', 1)
98 98 key = urlreq.unquote(key)
99 99 value = urlreq.unquote(value)
100 100 params[key] = value
101 101
102 102 return version, params
103 103
104 104
105 105 if strict and '-' not in spec:
106 106 raise error.InvalidBundleSpecification(
107 107 _('invalid bundle specification; '
108 108 'must be prefixed with compression: %s') % spec)
109 109
110 110 if '-' in spec:
111 111 compression, version = spec.split('-', 1)
112 112
113 113 if compression not in util.compengines.supportedbundlenames:
114 114 raise error.UnsupportedBundleSpecification(
115 115 _('%s compression is not supported') % compression)
116 116
117 117 version, params = parseparams(version)
118 118
119 119 if version not in _bundlespeccgversions:
120 120 raise error.UnsupportedBundleSpecification(
121 121 _('%s is not a recognized bundle version') % version)
122 122 else:
123 123 # Value could be just the compression or just the version, in which
124 124 # case some defaults are assumed (but only when not in strict mode).
125 125 assert not strict
126 126
127 127 spec, params = parseparams(spec)
128 128
129 129 if spec in util.compengines.supportedbundlenames:
130 130 compression = spec
131 131 version = 'v1'
132 132 # Generaldelta repos require v2.
133 133 if 'generaldelta' in repo.requirements:
134 134 version = 'v2'
135 135 # Modern compression engines require v2.
136 136 if compression not in _bundlespecv1compengines:
137 137 version = 'v2'
138 138 elif spec in _bundlespeccgversions:
139 139 if spec == 'packed1':
140 140 compression = 'none'
141 141 else:
142 142 compression = 'bzip2'
143 143 version = spec
144 144 else:
145 145 raise error.UnsupportedBundleSpecification(
146 146 _('%s is not a recognized bundle specification') % spec)
147 147
148 148 # Bundle version 1 only supports a known set of compression engines.
149 149 if version == 'v1' and compression not in _bundlespecv1compengines:
150 150 raise error.UnsupportedBundleSpecification(
151 151 _('compression engine %s is not supported on v1 bundles') %
152 152 compression)
153 153
154 154 # The specification for packed1 can optionally declare the data formats
155 155 # required to apply it. If we see this metadata, compare against what the
156 156 # repo supports and error if the bundle isn't compatible.
157 157 if version == 'packed1' and 'requirements' in params:
158 158 requirements = set(params['requirements'].split(','))
159 159 missingreqs = requirements - repo.supportedformats
160 160 if missingreqs:
161 161 raise error.UnsupportedBundleSpecification(
162 162 _('missing support for repository features: %s') %
163 163 ', '.join(sorted(missingreqs)))
164 164
165 165 if not externalnames:
166 166 engine = util.compengines.forbundlename(compression)
167 167 compression = engine.bundletype()[1]
168 168 version = _bundlespeccgversions[version]
169 169 return compression, version, params
170 170
171 171 def readbundle(ui, fh, fname, vfs=None):
172 172 header = changegroup.readexactly(fh, 4)
173 173
174 174 alg = None
175 175 if not fname:
176 176 fname = "stream"
177 177 if not header.startswith('HG') and header.startswith('\0'):
178 178 fh = changegroup.headerlessfixup(fh, header)
179 179 header = "HG10"
180 180 alg = 'UN'
181 181 elif vfs:
182 182 fname = vfs.join(fname)
183 183
184 184 magic, version = header[0:2], header[2:4]
185 185
186 186 if magic != 'HG':
187 187 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
188 188 if version == '10':
189 189 if alg is None:
190 190 alg = changegroup.readexactly(fh, 2)
191 191 return changegroup.cg1unpacker(fh, alg)
192 192 elif version.startswith('2'):
193 193 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
194 194 elif version == 'S1':
195 195 return streamclone.streamcloneapplier(fh)
196 196 else:
197 197 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
198 198
199 199 def getbundlespec(ui, fh):
200 200 """Infer the bundlespec from a bundle file handle.
201 201
202 202 The input file handle is seeked and the original seek position is not
203 203 restored.
204 204 """
205 205 def speccompression(alg):
206 206 try:
207 207 return util.compengines.forbundletype(alg).bundletype()[0]
208 208 except KeyError:
209 209 return None
210 210
211 211 b = readbundle(ui, fh, None)
212 212 if isinstance(b, changegroup.cg1unpacker):
213 213 alg = b._type
214 214 if alg == '_truncatedBZ':
215 215 alg = 'BZ'
216 216 comp = speccompression(alg)
217 217 if not comp:
218 218 raise error.Abort(_('unknown compression algorithm: %s') % alg)
219 219 return '%s-v1' % comp
220 220 elif isinstance(b, bundle2.unbundle20):
221 221 if 'Compression' in b.params:
222 222 comp = speccompression(b.params['Compression'])
223 223 if not comp:
224 224 raise error.Abort(_('unknown compression algorithm: %s') % comp)
225 225 else:
226 226 comp = 'none'
227 227
228 228 version = None
229 229 for part in b.iterparts():
230 230 if part.type == 'changegroup':
231 231 version = part.params['version']
232 232 if version in ('01', '02'):
233 233 version = 'v2'
234 234 else:
235 235 raise error.Abort(_('changegroup version %s does not have '
236 236 'a known bundlespec') % version,
237 237 hint=_('try upgrading your Mercurial '
238 238 'client'))
239 239
240 240 if not version:
241 241 raise error.Abort(_('could not identify changegroup version in '
242 242 'bundle'))
243 243
244 244 return '%s-%s' % (comp, version)
245 245 elif isinstance(b, streamclone.streamcloneapplier):
246 246 requirements = streamclone.readbundle1header(fh)[2]
247 247 params = 'requirements=%s' % ','.join(sorted(requirements))
248 248 return 'none-packed1;%s' % urlreq.quote(params)
249 249 else:
250 250 raise error.Abort(_('unknown bundle type: %s') % b)
251 251
252 252 def _computeoutgoing(repo, heads, common):
253 253 """Computes which revs are outgoing given a set of common
254 254 and a set of heads.
255 255
256 256 This is a separate function so extensions can have access to
257 257 the logic.
258 258
259 259 Returns a discovery.outgoing object.
260 260 """
261 261 cl = repo.changelog
262 262 if common:
263 263 hasnode = cl.hasnode
264 264 common = [n for n in common if hasnode(n)]
265 265 else:
266 266 common = [nullid]
267 267 if not heads:
268 268 heads = cl.heads()
269 269 return discovery.outgoing(repo, common, heads)
270 270
271 271 def _forcebundle1(op):
272 272 """return true if a pull/push must use bundle1
273 273
274 274 This function is used to allow testing of the older bundle version"""
275 275 ui = op.repo.ui
276 276 forcebundle1 = False
277 277 # The goal is this config is to allow developer to choose the bundle
278 278 # version used during exchanged. This is especially handy during test.
279 279 # Value is a list of bundle version to be picked from, highest version
280 280 # should be used.
281 281 #
282 282 # developer config: devel.legacy.exchange
283 283 exchange = ui.configlist('devel', 'legacy.exchange')
284 284 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
285 285 return forcebundle1 or not op.remote.capable('bundle2')
286 286
287 287 class pushoperation(object):
288 288 """A object that represent a single push operation
289 289
290 290 Its purpose is to carry push related state and very common operations.
291 291
292 292 A new pushoperation should be created at the beginning of each push and
293 293 discarded afterward.
294 294 """
295 295
296 296 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
297 297 bookmarks=()):
298 298 # repo we push from
299 299 self.repo = repo
300 300 self.ui = repo.ui
301 301 # repo we push to
302 302 self.remote = remote
303 303 # force option provided
304 304 self.force = force
305 305 # revs to be pushed (None is "all")
306 306 self.revs = revs
307 307 # bookmark explicitly pushed
308 308 self.bookmarks = bookmarks
309 309 # allow push of new branch
310 310 self.newbranch = newbranch
311 311 # did a local lock get acquired?
312 312 self.locallocked = None
313 313 # step already performed
314 314 # (used to check what steps have been already performed through bundle2)
315 315 self.stepsdone = set()
316 316 # Integer version of the changegroup push result
317 317 # - None means nothing to push
318 318 # - 0 means HTTP error
319 319 # - 1 means we pushed and remote head count is unchanged *or*
320 320 # we have outgoing changesets but refused to push
321 321 # - other values as described by addchangegroup()
322 322 self.cgresult = None
323 323 # Boolean value for the bookmark push
324 324 self.bkresult = None
325 325 # discover.outgoing object (contains common and outgoing data)
326 326 self.outgoing = None
327 327 # all remote topological heads before the push
328 328 self.remoteheads = None
329 329 # Details of the remote branch pre and post push
330 330 #
331 331 # mapping: {'branch': ([remoteheads],
332 332 # [newheads],
333 333 # [unsyncedheads],
334 334 # [discardedheads])}
335 335 # - branch: the branch name
336 336 # - remoteheads: the list of remote heads known locally
337 337 # None if the branch is new
338 338 # - newheads: the new remote heads (known locally) with outgoing pushed
339 339 # - unsyncedheads: the list of remote heads unknown locally.
340 340 # - discardedheads: the list of remote heads made obsolete by the push
341 341 self.pushbranchmap = None
342 342 # testable as a boolean indicating if any nodes are missing locally.
343 343 self.incoming = None
344 344 # phases changes that must be pushed along side the changesets
345 345 self.outdatedphases = None
346 346 # phases changes that must be pushed if changeset push fails
347 347 self.fallbackoutdatedphases = None
348 348 # outgoing obsmarkers
349 349 self.outobsmarkers = set()
350 350 # outgoing bookmarks
351 351 self.outbookmarks = []
352 352 # transaction manager
353 353 self.trmanager = None
354 354 # map { pushkey partid -> callback handling failure}
355 355 # used to handle exception from mandatory pushkey part failure
356 356 self.pkfailcb = {}
357 357
358 358 @util.propertycache
359 359 def futureheads(self):
360 360 """future remote heads if the changeset push succeeds"""
361 361 return self.outgoing.missingheads
362 362
363 363 @util.propertycache
364 364 def fallbackheads(self):
365 365 """future remote heads if the changeset push fails"""
366 366 if self.revs is None:
367 367 # not target to push, all common are relevant
368 368 return self.outgoing.commonheads
369 369 unfi = self.repo.unfiltered()
370 370 # I want cheads = heads(::missingheads and ::commonheads)
371 371 # (missingheads is revs with secret changeset filtered out)
372 372 #
373 373 # This can be expressed as:
374 374 # cheads = ( (missingheads and ::commonheads)
375 375 # + (commonheads and ::missingheads))"
376 376 # )
377 377 #
378 378 # while trying to push we already computed the following:
379 379 # common = (::commonheads)
380 380 # missing = ((commonheads::missingheads) - commonheads)
381 381 #
382 382 # We can pick:
383 383 # * missingheads part of common (::commonheads)
384 384 common = self.outgoing.common
385 385 nm = self.repo.changelog.nodemap
386 386 cheads = [node for node in self.revs if nm[node] in common]
387 387 # and
388 388 # * commonheads parents on missing
389 389 revset = unfi.set('%ln and parents(roots(%ln))',
390 390 self.outgoing.commonheads,
391 391 self.outgoing.missing)
392 392 cheads.extend(c.node() for c in revset)
393 393 return cheads
394 394
395 395 @property
396 396 def commonheads(self):
397 397 """set of all common heads after changeset bundle push"""
398 398 if self.cgresult:
399 399 return self.futureheads
400 400 else:
401 401 return self.fallbackheads
402 402
403 403 # mapping of message used when pushing bookmark
404 404 bookmsgmap = {'update': (_("updating bookmark %s\n"),
405 405 _('updating bookmark %s failed!\n')),
406 406 'export': (_("exporting bookmark %s\n"),
407 407 _('exporting bookmark %s failed!\n')),
408 408 'delete': (_("deleting remote bookmark %s\n"),
409 409 _('deleting remote bookmark %s failed!\n')),
410 410 }
411 411
412 412
413 413 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
414 414 opargs=None):
415 415 '''Push outgoing changesets (limited by revs) from a local
416 416 repository to remote. Return an integer:
417 417 - None means nothing to push
418 418 - 0 means HTTP error
419 419 - 1 means we pushed and remote head count is unchanged *or*
420 420 we have outgoing changesets but refused to push
421 421 - other values as described by addchangegroup()
422 422 '''
423 423 if opargs is None:
424 424 opargs = {}
425 425 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
426 426 **opargs)
427 427 if pushop.remote.local():
428 428 missing = (set(pushop.repo.requirements)
429 429 - pushop.remote.local().supported)
430 430 if missing:
431 431 msg = _("required features are not"
432 432 " supported in the destination:"
433 433 " %s") % (', '.join(sorted(missing)))
434 434 raise error.Abort(msg)
435 435
436 436 # there are two ways to push to remote repo:
437 437 #
438 438 # addchangegroup assumes local user can lock remote
439 439 # repo (local filesystem, old ssh servers).
440 440 #
441 441 # unbundle assumes local user cannot lock remote repo (new ssh
442 442 # servers, http servers).
443 443
444 444 if not pushop.remote.canpush():
445 445 raise error.Abort(_("destination does not support push"))
446 446 # get local lock as we might write phase data
447 447 localwlock = locallock = None
448 448 try:
449 449 # bundle2 push may receive a reply bundle touching bookmarks or other
450 450 # things requiring the wlock. Take it now to ensure proper ordering.
451 451 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
452 452 if (not _forcebundle1(pushop)) and maypushback:
453 453 localwlock = pushop.repo.wlock()
454 454 locallock = pushop.repo.lock()
455 455 pushop.locallocked = True
456 456 except IOError as err:
457 457 pushop.locallocked = False
458 458 if err.errno != errno.EACCES:
459 459 raise
460 460 # source repo cannot be locked.
461 461 # We do not abort the push, but just disable the local phase
462 462 # synchronisation.
463 463 msg = 'cannot lock source repository: %s\n' % err
464 464 pushop.ui.debug(msg)
465 465 try:
466 466 if pushop.locallocked:
467 467 pushop.trmanager = transactionmanager(pushop.repo,
468 468 'push-response',
469 469 pushop.remote.url())
470 470 pushop.repo.checkpush(pushop)
471 471 lock = None
472 472 unbundle = pushop.remote.capable('unbundle')
473 473 if not unbundle:
474 474 lock = pushop.remote.lock()
475 475 try:
476 476 _pushdiscovery(pushop)
477 477 if not _forcebundle1(pushop):
478 478 _pushbundle2(pushop)
479 479 _pushchangeset(pushop)
480 480 _pushsyncphase(pushop)
481 481 _pushobsolete(pushop)
482 482 _pushbookmark(pushop)
483 483 finally:
484 484 if lock is not None:
485 485 lock.release()
486 486 if pushop.trmanager:
487 487 pushop.trmanager.close()
488 488 finally:
489 489 if pushop.trmanager:
490 490 pushop.trmanager.release()
491 491 if locallock is not None:
492 492 locallock.release()
493 493 if localwlock is not None:
494 494 localwlock.release()
495 495
496 496 return pushop
497 497
498 498 # list of steps to perform discovery before push
499 499 pushdiscoveryorder = []
500 500
501 501 # Mapping between step name and function
502 502 #
503 503 # This exists to help extensions wrap steps if necessary
504 504 pushdiscoverymapping = {}
505 505
506 506 def pushdiscovery(stepname):
507 507 """decorator for function performing discovery before push
508 508
509 509 The function is added to the step -> function mapping and appended to the
510 510 list of steps. Beware that decorated function will be added in order (this
511 511 may matter).
512 512
513 513 You can only use this decorator for a new step, if you want to wrap a step
514 514 from an extension, change the pushdiscovery dictionary directly."""
515 515 def dec(func):
516 516 assert stepname not in pushdiscoverymapping
517 517 pushdiscoverymapping[stepname] = func
518 518 pushdiscoveryorder.append(stepname)
519 519 return func
520 520 return dec
521 521
522 522 def _pushdiscovery(pushop):
523 523 """Run all discovery steps"""
524 524 for stepname in pushdiscoveryorder:
525 525 step = pushdiscoverymapping[stepname]
526 526 step(pushop)
527 527
528 528 @pushdiscovery('changeset')
529 529 def _pushdiscoverychangeset(pushop):
530 530 """discover the changeset that need to be pushed"""
531 531 fci = discovery.findcommonincoming
532 532 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
533 533 common, inc, remoteheads = commoninc
534 534 fco = discovery.findcommonoutgoing
535 535 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
536 536 commoninc=commoninc, force=pushop.force)
537 537 pushop.outgoing = outgoing
538 538 pushop.remoteheads = remoteheads
539 539 pushop.incoming = inc
540 540
541 541 @pushdiscovery('phase')
542 542 def _pushdiscoveryphase(pushop):
543 543 """discover the phase that needs to be pushed
544 544
545 545 (computed for both success and failure case for changesets push)"""
546 546 outgoing = pushop.outgoing
547 547 unfi = pushop.repo.unfiltered()
548 548 remotephases = pushop.remote.listkeys('phases')
549 549 publishing = remotephases.get('publishing', False)
550 550 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
551 551 and remotephases # server supports phases
552 552 and not pushop.outgoing.missing # no changesets to be pushed
553 553 and publishing):
554 554 # When:
555 555 # - this is a subrepo push
556 556 # - and remote support phase
557 557 # - and no changeset are to be pushed
558 558 # - and remote is publishing
559 559 # We may be in issue 3871 case!
560 560 # We drop the possible phase synchronisation done by
561 561 # courtesy to publish changesets possibly locally draft
562 562 # on the remote.
563 563 remotephases = {'publishing': 'True'}
564 564 ana = phases.analyzeremotephases(pushop.repo,
565 565 pushop.fallbackheads,
566 566 remotephases)
567 567 pheads, droots = ana
568 568 extracond = ''
569 569 if not publishing:
570 570 extracond = ' and public()'
571 571 revset = 'heads((%%ln::%%ln) %s)' % extracond
572 572 # Get the list of all revs draft on remote by public here.
573 573 # XXX Beware that revset break if droots is not strictly
574 574 # XXX root we may want to ensure it is but it is costly
575 575 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
576 576 if not outgoing.missing:
577 577 future = fallback
578 578 else:
579 579 # adds changeset we are going to push as draft
580 580 #
581 581 # should not be necessary for publishing server, but because of an
582 582 # issue fixed in xxxxx we have to do it anyway.
583 583 fdroots = list(unfi.set('roots(%ln + %ln::)',
584 584 outgoing.missing, droots))
585 585 fdroots = [f.node() for f in fdroots]
586 586 future = list(unfi.set(revset, fdroots, pushop.futureheads))
587 587 pushop.outdatedphases = future
588 588 pushop.fallbackoutdatedphases = fallback
589 589
590 590 @pushdiscovery('obsmarker')
591 591 def _pushdiscoveryobsmarkers(pushop):
592 592 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
593 593 and pushop.repo.obsstore
594 594 and 'obsolete' in pushop.remote.listkeys('namespaces')):
595 595 repo = pushop.repo
596 596 # very naive computation, that can be quite expensive on big repo.
597 597 # However: evolution is currently slow on them anyway.
598 598 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
599 599 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
600 600
601 601 @pushdiscovery('bookmarks')
602 602 def _pushdiscoverybookmarks(pushop):
603 603 ui = pushop.ui
604 604 repo = pushop.repo.unfiltered()
605 605 remote = pushop.remote
606 606 ui.debug("checking for updated bookmarks\n")
607 607 ancestors = ()
608 608 if pushop.revs:
609 609 revnums = map(repo.changelog.rev, pushop.revs)
610 610 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
611 611 remotebookmark = remote.listkeys('bookmarks')
612 612
613 613 explicit = set([repo._bookmarks.expandname(bookmark)
614 614 for bookmark in pushop.bookmarks])
615 615
616 616 remotebookmark = bookmod.unhexlifybookmarks(remotebookmark)
617 617 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
618 618
619 619 def safehex(x):
620 620 if x is None:
621 621 return x
622 622 return hex(x)
623 623
624 624 def hexifycompbookmarks(bookmarks):
625 625 for b, scid, dcid in bookmarks:
626 626 yield b, safehex(scid), safehex(dcid)
627 627
628 628 comp = [hexifycompbookmarks(marks) for marks in comp]
629 629 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
630 630
631 631 for b, scid, dcid in advsrc:
632 632 if b in explicit:
633 633 explicit.remove(b)
634 634 if not ancestors or repo[scid].rev() in ancestors:
635 635 pushop.outbookmarks.append((b, dcid, scid))
636 636 # search added bookmark
637 637 for b, scid, dcid in addsrc:
638 638 if b in explicit:
639 639 explicit.remove(b)
640 640 pushop.outbookmarks.append((b, '', scid))
641 641 # search for overwritten bookmark
642 642 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
643 643 if b in explicit:
644 644 explicit.remove(b)
645 645 pushop.outbookmarks.append((b, dcid, scid))
646 646 # search for bookmark to delete
647 647 for b, scid, dcid in adddst:
648 648 if b in explicit:
649 649 explicit.remove(b)
650 650 # treat as "deleted locally"
651 651 pushop.outbookmarks.append((b, dcid, ''))
652 652 # identical bookmarks shouldn't get reported
653 653 for b, scid, dcid in same:
654 654 if b in explicit:
655 655 explicit.remove(b)
656 656
657 657 if explicit:
658 658 explicit = sorted(explicit)
659 659 # we should probably list all of them
660 660 ui.warn(_('bookmark %s does not exist on the local '
661 661 'or remote repository!\n') % explicit[0])
662 662 pushop.bkresult = 2
663 663
664 664 pushop.outbookmarks.sort()
665 665
666 666 def _pushcheckoutgoing(pushop):
667 667 outgoing = pushop.outgoing
668 668 unfi = pushop.repo.unfiltered()
669 669 if not outgoing.missing:
670 670 # nothing to push
671 671 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
672 672 return False
673 673 # something to push
674 674 if not pushop.force:
675 675 # if repo.obsstore == False --> no obsolete
676 676 # then, save the iteration
677 677 if unfi.obsstore:
678 678 # this message are here for 80 char limit reason
679 679 mso = _("push includes obsolete changeset: %s!")
680 680 mst = {"unstable": _("push includes unstable changeset: %s!"),
681 681 "bumped": _("push includes bumped changeset: %s!"),
682 682 "divergent": _("push includes divergent changeset: %s!")}
683 683 # If we are to push if there is at least one
684 684 # obsolete or unstable changeset in missing, at
685 685 # least one of the missinghead will be obsolete or
686 686 # unstable. So checking heads only is ok
687 687 for node in outgoing.missingheads:
688 688 ctx = unfi[node]
689 689 if ctx.obsolete():
690 690 raise error.Abort(mso % ctx)
691 691 elif ctx.troubled():
692 692 raise error.Abort(mst[ctx.troubles()[0]] % ctx)
693 693
694 694 discovery.checkheads(pushop)
695 695 return True
696 696
697 697 # List of names of steps to perform for an outgoing bundle2, order matters.
698 698 b2partsgenorder = []
699 699
700 700 # Mapping between step name and function
701 701 #
702 702 # This exists to help extensions wrap steps if necessary
703 703 b2partsgenmapping = {}
704 704
705 705 def b2partsgenerator(stepname, idx=None):
706 706 """decorator for function generating bundle2 part
707 707
708 708 The function is added to the step -> function mapping and appended to the
709 709 list of steps. Beware that decorated functions will be added in order
710 710 (this may matter).
711 711
712 712 You can only use this decorator for new steps, if you want to wrap a step
713 713 from an extension, attack the b2partsgenmapping dictionary directly."""
714 714 def dec(func):
715 715 assert stepname not in b2partsgenmapping
716 716 b2partsgenmapping[stepname] = func
717 717 if idx is None:
718 718 b2partsgenorder.append(stepname)
719 719 else:
720 720 b2partsgenorder.insert(idx, stepname)
721 721 return func
722 722 return dec
723 723
724 724 def _pushb2ctxcheckheads(pushop, bundler):
725 725 """Generate race condition checking parts
726 726
727 727 Exists as an independent function to aid extensions
728 728 """
729 729 # * 'force' do not check for push race,
730 730 # * if we don't push anything, there are nothing to check.
731 731 if not pushop.force and pushop.outgoing.missingheads:
732 732 allowunrelated = 'related' in bundler.capabilities.get('checkheads', ())
733 733 if not allowunrelated:
734 734 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
735 735 else:
736 736 affected = set()
737 737 for branch, heads in pushop.pushbranchmap.iteritems():
738 738 remoteheads, newheads, unsyncedheads, discardedheads = heads
739 739 if remoteheads is not None:
740 740 remote = set(remoteheads)
741 741 affected |= set(discardedheads) & remote
742 742 affected |= remote - set(newheads)
743 743 if affected:
744 744 data = iter(sorted(affected))
745 745 bundler.newpart('check:updated-heads', data=data)
746 746
747 747 @b2partsgenerator('changeset')
748 748 def _pushb2ctx(pushop, bundler):
749 749 """handle changegroup push through bundle2
750 750
751 751 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
752 752 """
753 753 if 'changesets' in pushop.stepsdone:
754 754 return
755 755 pushop.stepsdone.add('changesets')
756 756 # Send known heads to the server for race detection.
757 757 if not _pushcheckoutgoing(pushop):
758 758 return
759 759 pushop.repo.prepushoutgoinghooks(pushop)
760 760
761 761 _pushb2ctxcheckheads(pushop, bundler)
762 762
763 763 b2caps = bundle2.bundle2caps(pushop.remote)
764 764 version = '01'
765 765 cgversions = b2caps.get('changegroup')
766 766 if cgversions: # 3.1 and 3.2 ship with an empty value
767 767 cgversions = [v for v in cgversions
768 768 if v in changegroup.supportedoutgoingversions(
769 769 pushop.repo)]
770 770 if not cgversions:
771 771 raise ValueError(_('no common changegroup version'))
772 772 version = max(cgversions)
773 773 cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push',
774 774 pushop.outgoing,
775 775 version=version)
776 776 cgpart = bundler.newpart('changegroup', data=cg)
777 777 if cgversions:
778 778 cgpart.addparam('version', version)
779 779 if 'treemanifest' in pushop.repo.requirements:
780 780 cgpart.addparam('treemanifest', '1')
781 781 def handlereply(op):
782 782 """extract addchangegroup returns from server reply"""
783 783 cgreplies = op.records.getreplies(cgpart.id)
784 784 assert len(cgreplies['changegroup']) == 1
785 785 pushop.cgresult = cgreplies['changegroup'][0]['return']
786 786 return handlereply
787 787
788 788 @b2partsgenerator('phase')
789 789 def _pushb2phases(pushop, bundler):
790 790 """handle phase push through bundle2"""
791 791 if 'phases' in pushop.stepsdone:
792 792 return
793 793 b2caps = bundle2.bundle2caps(pushop.remote)
794 794 if not 'pushkey' in b2caps:
795 795 return
796 796 pushop.stepsdone.add('phases')
797 797 part2node = []
798 798
799 799 def handlefailure(pushop, exc):
800 800 targetid = int(exc.partid)
801 801 for partid, node in part2node:
802 802 if partid == targetid:
803 803 raise error.Abort(_('updating %s to public failed') % node)
804 804
805 805 enc = pushkey.encode
806 806 for newremotehead in pushop.outdatedphases:
807 807 part = bundler.newpart('pushkey')
808 808 part.addparam('namespace', enc('phases'))
809 809 part.addparam('key', enc(newremotehead.hex()))
810 810 part.addparam('old', enc(str(phases.draft)))
811 811 part.addparam('new', enc(str(phases.public)))
812 812 part2node.append((part.id, newremotehead))
813 813 pushop.pkfailcb[part.id] = handlefailure
814 814
815 815 def handlereply(op):
816 816 for partid, node in part2node:
817 817 partrep = op.records.getreplies(partid)
818 818 results = partrep['pushkey']
819 819 assert len(results) <= 1
820 820 msg = None
821 821 if not results:
822 822 msg = _('server ignored update of %s to public!\n') % node
823 823 elif not int(results[0]['return']):
824 824 msg = _('updating %s to public failed!\n') % node
825 825 if msg is not None:
826 826 pushop.ui.warn(msg)
827 827 return handlereply
828 828
829 829 @b2partsgenerator('obsmarkers')
830 830 def _pushb2obsmarkers(pushop, bundler):
831 831 if 'obsmarkers' in pushop.stepsdone:
832 832 return
833 833 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
834 834 if obsolete.commonversion(remoteversions) is None:
835 835 return
836 836 pushop.stepsdone.add('obsmarkers')
837 837 if pushop.outobsmarkers:
838 838 markers = sorted(pushop.outobsmarkers)
839 839 bundle2.buildobsmarkerspart(bundler, markers)
840 840
841 841 @b2partsgenerator('bookmarks')
842 842 def _pushb2bookmarks(pushop, bundler):
843 843 """handle bookmark push through bundle2"""
844 844 if 'bookmarks' in pushop.stepsdone:
845 845 return
846 846 b2caps = bundle2.bundle2caps(pushop.remote)
847 847 if 'pushkey' not in b2caps:
848 848 return
849 849 pushop.stepsdone.add('bookmarks')
850 850 part2book = []
851 851 enc = pushkey.encode
852 852
853 853 def handlefailure(pushop, exc):
854 854 targetid = int(exc.partid)
855 855 for partid, book, action in part2book:
856 856 if partid == targetid:
857 857 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
858 858 # we should not be called for part we did not generated
859 859 assert False
860 860
861 861 for book, old, new in pushop.outbookmarks:
862 862 part = bundler.newpart('pushkey')
863 863 part.addparam('namespace', enc('bookmarks'))
864 864 part.addparam('key', enc(book))
865 865 part.addparam('old', enc(old))
866 866 part.addparam('new', enc(new))
867 867 action = 'update'
868 868 if not old:
869 869 action = 'export'
870 870 elif not new:
871 871 action = 'delete'
872 872 part2book.append((part.id, book, action))
873 873 pushop.pkfailcb[part.id] = handlefailure
874 874
875 875 def handlereply(op):
876 876 ui = pushop.ui
877 877 for partid, book, action in part2book:
878 878 partrep = op.records.getreplies(partid)
879 879 results = partrep['pushkey']
880 880 assert len(results) <= 1
881 881 if not results:
882 882 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
883 883 else:
884 884 ret = int(results[0]['return'])
885 885 if ret:
886 886 ui.status(bookmsgmap[action][0] % book)
887 887 else:
888 888 ui.warn(bookmsgmap[action][1] % book)
889 889 if pushop.bkresult is not None:
890 890 pushop.bkresult = 1
891 891 return handlereply
892 892
893 893
894 894 def _pushbundle2(pushop):
895 895 """push data to the remote using bundle2
896 896
897 897 The only currently supported type of data is changegroup but this will
898 898 evolve in the future."""
899 899 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
900 900 pushback = (pushop.trmanager
901 901 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
902 902
903 903 # create reply capability
904 904 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
905 905 allowpushback=pushback))
906 906 bundler.newpart('replycaps', data=capsblob)
907 907 replyhandlers = []
908 908 for partgenname in b2partsgenorder:
909 909 partgen = b2partsgenmapping[partgenname]
910 910 ret = partgen(pushop, bundler)
911 911 if callable(ret):
912 912 replyhandlers.append(ret)
913 913 # do not push if nothing to push
914 914 if bundler.nbparts <= 1:
915 915 return
916 916 stream = util.chunkbuffer(bundler.getchunks())
917 917 try:
918 918 try:
919 919 reply = pushop.remote.unbundle(
920 920 stream, ['force'], pushop.remote.url())
921 921 except error.BundleValueError as exc:
922 922 raise error.Abort(_('missing support for %s') % exc)
923 923 try:
924 924 trgetter = None
925 925 if pushback:
926 926 trgetter = pushop.trmanager.transaction
927 927 op = bundle2.processbundle(pushop.repo, reply, trgetter)
928 928 except error.BundleValueError as exc:
929 929 raise error.Abort(_('missing support for %s') % exc)
930 930 except bundle2.AbortFromPart as exc:
931 931 pushop.ui.status(_('remote: %s\n') % exc)
932 932 if exc.hint is not None:
933 933 pushop.ui.status(_('remote: %s\n') % ('(%s)' % exc.hint))
934 934 raise error.Abort(_('push failed on remote'))
935 935 except error.PushkeyFailed as exc:
936 936 partid = int(exc.partid)
937 937 if partid not in pushop.pkfailcb:
938 938 raise
939 939 pushop.pkfailcb[partid](pushop, exc)
940 940 for rephand in replyhandlers:
941 941 rephand(op)
942 942
943 943 def _pushchangeset(pushop):
944 944 """Make the actual push of changeset bundle to remote repo"""
945 945 if 'changesets' in pushop.stepsdone:
946 946 return
947 947 pushop.stepsdone.add('changesets')
948 948 if not _pushcheckoutgoing(pushop):
949 949 return
950 950 pushop.repo.prepushoutgoinghooks(pushop)
951 951 outgoing = pushop.outgoing
952 952 unbundle = pushop.remote.capable('unbundle')
953 953 # TODO: get bundlecaps from remote
954 954 bundlecaps = None
955 955 # create a changegroup from local
956 956 if pushop.revs is None and not (outgoing.excluded
957 957 or pushop.repo.changelog.filteredrevs):
958 958 # push everything,
959 959 # use the fast path, no race possible on push
960 960 bundler = changegroup.cg1packer(pushop.repo, bundlecaps)
961 961 cg = changegroup.getsubset(pushop.repo,
962 962 outgoing,
963 963 bundler,
964 964 'push',
965 965 fastpath=True)
966 966 else:
967 967 cg = changegroup.getchangegroup(pushop.repo, 'push', outgoing,
968 968 bundlecaps=bundlecaps)
969 969
970 970 # apply changegroup to remote
971 971 if unbundle:
972 972 # local repo finds heads on server, finds out what
973 973 # revs it must push. once revs transferred, if server
974 974 # finds it has different heads (someone else won
975 975 # commit/push race), server aborts.
976 976 if pushop.force:
977 977 remoteheads = ['force']
978 978 else:
979 979 remoteheads = pushop.remoteheads
980 980 # ssh: return remote's addchangegroup()
981 981 # http: return remote's addchangegroup() or 0 for error
982 982 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
983 983 pushop.repo.url())
984 984 else:
985 985 # we return an integer indicating remote head count
986 986 # change
987 987 pushop.cgresult = pushop.remote.addchangegroup(cg, 'push',
988 988 pushop.repo.url())
989 989
990 990 def _pushsyncphase(pushop):
991 991 """synchronise phase information locally and remotely"""
992 992 cheads = pushop.commonheads
993 993 # even when we don't push, exchanging phase data is useful
994 994 remotephases = pushop.remote.listkeys('phases')
995 995 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
996 996 and remotephases # server supports phases
997 997 and pushop.cgresult is None # nothing was pushed
998 998 and remotephases.get('publishing', False)):
999 999 # When:
1000 1000 # - this is a subrepo push
1001 1001 # - and remote support phase
1002 1002 # - and no changeset was pushed
1003 1003 # - and remote is publishing
1004 1004 # We may be in issue 3871 case!
1005 1005 # We drop the possible phase synchronisation done by
1006 1006 # courtesy to publish changesets possibly locally draft
1007 1007 # on the remote.
1008 1008 remotephases = {'publishing': 'True'}
1009 1009 if not remotephases: # old server or public only reply from non-publishing
1010 1010 _localphasemove(pushop, cheads)
1011 1011 # don't push any phase data as there is nothing to push
1012 1012 else:
1013 1013 ana = phases.analyzeremotephases(pushop.repo, cheads,
1014 1014 remotephases)
1015 1015 pheads, droots = ana
1016 1016 ### Apply remote phase on local
1017 1017 if remotephases.get('publishing', False):
1018 1018 _localphasemove(pushop, cheads)
1019 1019 else: # publish = False
1020 1020 _localphasemove(pushop, pheads)
1021 1021 _localphasemove(pushop, cheads, phases.draft)
1022 1022 ### Apply local phase on remote
1023 1023
1024 1024 if pushop.cgresult:
1025 1025 if 'phases' in pushop.stepsdone:
1026 1026 # phases already pushed though bundle2
1027 1027 return
1028 1028 outdated = pushop.outdatedphases
1029 1029 else:
1030 1030 outdated = pushop.fallbackoutdatedphases
1031 1031
1032 1032 pushop.stepsdone.add('phases')
1033 1033
1034 1034 # filter heads already turned public by the push
1035 1035 outdated = [c for c in outdated if c.node() not in pheads]
1036 1036 # fallback to independent pushkey command
1037 1037 for newremotehead in outdated:
1038 1038 r = pushop.remote.pushkey('phases',
1039 1039 newremotehead.hex(),
1040 1040 str(phases.draft),
1041 1041 str(phases.public))
1042 1042 if not r:
1043 1043 pushop.ui.warn(_('updating %s to public failed!\n')
1044 1044 % newremotehead)
1045 1045
1046 1046 def _localphasemove(pushop, nodes, phase=phases.public):
1047 1047 """move <nodes> to <phase> in the local source repo"""
1048 1048 if pushop.trmanager:
1049 1049 phases.advanceboundary(pushop.repo,
1050 1050 pushop.trmanager.transaction(),
1051 1051 phase,
1052 1052 nodes)
1053 1053 else:
1054 1054 # repo is not locked, do not change any phases!
1055 1055 # Informs the user that phases should have been moved when
1056 1056 # applicable.
1057 1057 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1058 1058 phasestr = phases.phasenames[phase]
1059 1059 if actualmoves:
1060 1060 pushop.ui.status(_('cannot lock source repo, skipping '
1061 1061 'local %s phase update\n') % phasestr)
1062 1062
1063 1063 def _pushobsolete(pushop):
1064 1064 """utility function to push obsolete markers to a remote"""
1065 1065 if 'obsmarkers' in pushop.stepsdone:
1066 1066 return
1067 1067 repo = pushop.repo
1068 1068 remote = pushop.remote
1069 1069 pushop.stepsdone.add('obsmarkers')
1070 1070 if pushop.outobsmarkers:
1071 1071 pushop.ui.debug('try to push obsolete markers to remote\n')
1072 1072 rslts = []
1073 1073 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1074 1074 for key in sorted(remotedata, reverse=True):
1075 1075 # reverse sort to ensure we end with dump0
1076 1076 data = remotedata[key]
1077 1077 rslts.append(remote.pushkey('obsolete', key, '', data))
1078 1078 if [r for r in rslts if not r]:
1079 1079 msg = _('failed to push some obsolete markers!\n')
1080 1080 repo.ui.warn(msg)
1081 1081
1082 1082 def _pushbookmark(pushop):
1083 1083 """Update bookmark position on remote"""
1084 1084 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1085 1085 return
1086 1086 pushop.stepsdone.add('bookmarks')
1087 1087 ui = pushop.ui
1088 1088 remote = pushop.remote
1089 1089
1090 1090 for b, old, new in pushop.outbookmarks:
1091 1091 action = 'update'
1092 1092 if not old:
1093 1093 action = 'export'
1094 1094 elif not new:
1095 1095 action = 'delete'
1096 1096 if remote.pushkey('bookmarks', b, old, new):
1097 1097 ui.status(bookmsgmap[action][0] % b)
1098 1098 else:
1099 1099 ui.warn(bookmsgmap[action][1] % b)
1100 1100 # discovery can have set the value form invalid entry
1101 1101 if pushop.bkresult is not None:
1102 1102 pushop.bkresult = 1
1103 1103
1104 1104 class pulloperation(object):
1105 1105 """A object that represent a single pull operation
1106 1106
1107 1107 It purpose is to carry pull related state and very common operation.
1108 1108
1109 1109 A new should be created at the beginning of each pull and discarded
1110 1110 afterward.
1111 1111 """
1112 1112
1113 1113 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1114 1114 remotebookmarks=None, streamclonerequested=None):
1115 1115 # repo we pull into
1116 1116 self.repo = repo
1117 1117 # repo we pull from
1118 1118 self.remote = remote
1119 1119 # revision we try to pull (None is "all")
1120 1120 self.heads = heads
1121 1121 # bookmark pulled explicitly
1122 1122 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1123 1123 for bookmark in bookmarks]
1124 1124 # do we force pull?
1125 1125 self.force = force
1126 1126 # whether a streaming clone was requested
1127 1127 self.streamclonerequested = streamclonerequested
1128 1128 # transaction manager
1129 1129 self.trmanager = None
1130 1130 # set of common changeset between local and remote before pull
1131 1131 self.common = None
1132 1132 # set of pulled head
1133 1133 self.rheads = None
1134 1134 # list of missing changeset to fetch remotely
1135 1135 self.fetch = None
1136 1136 # remote bookmarks data
1137 1137 self.remotebookmarks = remotebookmarks
1138 1138 # result of changegroup pulling (used as return code by pull)
1139 1139 self.cgresult = None
1140 1140 # list of step already done
1141 1141 self.stepsdone = set()
1142 1142 # Whether we attempted a clone from pre-generated bundles.
1143 1143 self.clonebundleattempted = False
1144 1144
1145 1145 @util.propertycache
1146 1146 def pulledsubset(self):
1147 1147 """heads of the set of changeset target by the pull"""
1148 1148 # compute target subset
1149 1149 if self.heads is None:
1150 1150 # We pulled every thing possible
1151 1151 # sync on everything common
1152 1152 c = set(self.common)
1153 1153 ret = list(self.common)
1154 1154 for n in self.rheads:
1155 1155 if n not in c:
1156 1156 ret.append(n)
1157 1157 return ret
1158 1158 else:
1159 1159 # We pulled a specific subset
1160 1160 # sync on this subset
1161 1161 return self.heads
1162 1162
1163 1163 @util.propertycache
1164 1164 def canusebundle2(self):
1165 1165 return not _forcebundle1(self)
1166 1166
1167 1167 @util.propertycache
1168 1168 def remotebundle2caps(self):
1169 1169 return bundle2.bundle2caps(self.remote)
1170 1170
1171 1171 def gettransaction(self):
1172 1172 # deprecated; talk to trmanager directly
1173 1173 return self.trmanager.transaction()
1174 1174
1175 1175 class transactionmanager(object):
1176 1176 """An object to manage the life cycle of a transaction
1177 1177
1178 1178 It creates the transaction on demand and calls the appropriate hooks when
1179 1179 closing the transaction."""
1180 1180 def __init__(self, repo, source, url):
1181 1181 self.repo = repo
1182 1182 self.source = source
1183 1183 self.url = url
1184 1184 self._tr = None
1185 1185
1186 1186 def transaction(self):
1187 1187 """Return an open transaction object, constructing if necessary"""
1188 1188 if not self._tr:
1189 1189 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1190 1190 self._tr = self.repo.transaction(trname)
1191 1191 self._tr.hookargs['source'] = self.source
1192 1192 self._tr.hookargs['url'] = self.url
1193 1193 return self._tr
1194 1194
1195 1195 def close(self):
1196 1196 """close transaction if created"""
1197 1197 if self._tr is not None:
1198 1198 self._tr.close()
1199 1199
1200 1200 def release(self):
1201 1201 """release transaction if created"""
1202 1202 if self._tr is not None:
1203 1203 self._tr.release()
1204 1204
1205 1205 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1206 1206 streamclonerequested=None):
1207 1207 """Fetch repository data from a remote.
1208 1208
1209 1209 This is the main function used to retrieve data from a remote repository.
1210 1210
1211 1211 ``repo`` is the local repository to clone into.
1212 1212 ``remote`` is a peer instance.
1213 1213 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1214 1214 default) means to pull everything from the remote.
1215 1215 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1216 1216 default, all remote bookmarks are pulled.
1217 1217 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1218 1218 initialization.
1219 1219 ``streamclonerequested`` is a boolean indicating whether a "streaming
1220 1220 clone" is requested. A "streaming clone" is essentially a raw file copy
1221 1221 of revlogs from the server. This only works when the local repository is
1222 1222 empty. The default value of ``None`` means to respect the server
1223 1223 configuration for preferring stream clones.
1224 1224
1225 1225 Returns the ``pulloperation`` created for this pull.
1226 1226 """
1227 1227 if opargs is None:
1228 1228 opargs = {}
1229 1229 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1230 1230 streamclonerequested=streamclonerequested, **opargs)
1231 1231 if pullop.remote.local():
1232 1232 missing = set(pullop.remote.requirements) - pullop.repo.supported
1233 1233 if missing:
1234 1234 msg = _("required features are not"
1235 1235 " supported in the destination:"
1236 1236 " %s") % (', '.join(sorted(missing)))
1237 1237 raise error.Abort(msg)
1238 1238
1239 1239 wlock = lock = None
1240 1240 try:
1241 1241 wlock = pullop.repo.wlock()
1242 1242 lock = pullop.repo.lock()
1243 1243 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1244 1244 streamclone.maybeperformlegacystreamclone(pullop)
1245 1245 # This should ideally be in _pullbundle2(). However, it needs to run
1246 1246 # before discovery to avoid extra work.
1247 1247 _maybeapplyclonebundle(pullop)
1248 1248 _pulldiscovery(pullop)
1249 1249 if pullop.canusebundle2:
1250 1250 _pullbundle2(pullop)
1251 1251 _pullchangeset(pullop)
1252 1252 _pullphase(pullop)
1253 1253 _pullbookmarks(pullop)
1254 1254 _pullobsolete(pullop)
1255 1255 pullop.trmanager.close()
1256 1256 finally:
1257 1257 lockmod.release(pullop.trmanager, lock, wlock)
1258 1258
1259 1259 return pullop
1260 1260
1261 1261 # list of steps to perform discovery before pull
1262 1262 pulldiscoveryorder = []
1263 1263
1264 1264 # Mapping between step name and function
1265 1265 #
1266 1266 # This exists to help extensions wrap steps if necessary
1267 1267 pulldiscoverymapping = {}
1268 1268
1269 1269 def pulldiscovery(stepname):
1270 1270 """decorator for function performing discovery before pull
1271 1271
1272 1272 The function is added to the step -> function mapping and appended to the
1273 1273 list of steps. Beware that decorated function will be added in order (this
1274 1274 may matter).
1275 1275
1276 1276 You can only use this decorator for a new step, if you want to wrap a step
1277 1277 from an extension, change the pulldiscovery dictionary directly."""
1278 1278 def dec(func):
1279 1279 assert stepname not in pulldiscoverymapping
1280 1280 pulldiscoverymapping[stepname] = func
1281 1281 pulldiscoveryorder.append(stepname)
1282 1282 return func
1283 1283 return dec
1284 1284
1285 1285 def _pulldiscovery(pullop):
1286 1286 """Run all discovery steps"""
1287 1287 for stepname in pulldiscoveryorder:
1288 1288 step = pulldiscoverymapping[stepname]
1289 1289 step(pullop)
1290 1290
1291 1291 @pulldiscovery('b1:bookmarks')
1292 1292 def _pullbookmarkbundle1(pullop):
1293 1293 """fetch bookmark data in bundle1 case
1294 1294
1295 1295 If not using bundle2, we have to fetch bookmarks before changeset
1296 1296 discovery to reduce the chance and impact of race conditions."""
1297 1297 if pullop.remotebookmarks is not None:
1298 1298 return
1299 1299 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1300 1300 # all known bundle2 servers now support listkeys, but lets be nice with
1301 1301 # new implementation.
1302 1302 return
1303 1303 pullop.remotebookmarks = pullop.remote.listkeys('bookmarks')
1304 1304
1305 1305
1306 1306 @pulldiscovery('changegroup')
1307 1307 def _pulldiscoverychangegroup(pullop):
1308 1308 """discovery phase for the pull
1309 1309
1310 1310 Current handle changeset discovery only, will change handle all discovery
1311 1311 at some point."""
1312 1312 tmp = discovery.findcommonincoming(pullop.repo,
1313 1313 pullop.remote,
1314 1314 heads=pullop.heads,
1315 1315 force=pullop.force)
1316 1316 common, fetch, rheads = tmp
1317 1317 nm = pullop.repo.unfiltered().changelog.nodemap
1318 1318 if fetch and rheads:
1319 1319 # If a remote heads in filtered locally, lets drop it from the unknown
1320 1320 # remote heads and put in back in common.
1321 1321 #
1322 1322 # This is a hackish solution to catch most of "common but locally
1323 1323 # hidden situation". We do not performs discovery on unfiltered
1324 1324 # repository because it end up doing a pathological amount of round
1325 1325 # trip for w huge amount of changeset we do not care about.
1326 1326 #
1327 1327 # If a set of such "common but filtered" changeset exist on the server
1328 1328 # but are not including a remote heads, we'll not be able to detect it,
1329 1329 scommon = set(common)
1330 1330 filteredrheads = []
1331 1331 for n in rheads:
1332 1332 if n in nm:
1333 1333 if n not in scommon:
1334 1334 common.append(n)
1335 1335 else:
1336 1336 filteredrheads.append(n)
1337 1337 if not filteredrheads:
1338 1338 fetch = []
1339 1339 rheads = filteredrheads
1340 1340 pullop.common = common
1341 1341 pullop.fetch = fetch
1342 1342 pullop.rheads = rheads
1343 1343
1344 1344 def _pullbundle2(pullop):
1345 1345 """pull data using bundle2
1346 1346
1347 1347 For now, the only supported data are changegroup."""
1348 1348 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
1349 1349
1350 1350 # At the moment we don't do stream clones over bundle2. If that is
1351 1351 # implemented then here's where the check for that will go.
1352 1352 streaming = False
1353 1353
1354 1354 # pulling changegroup
1355 1355 pullop.stepsdone.add('changegroup')
1356 1356
1357 1357 kwargs['common'] = pullop.common
1358 1358 kwargs['heads'] = pullop.heads or pullop.rheads
1359 1359 kwargs['cg'] = pullop.fetch
1360 1360 if 'listkeys' in pullop.remotebundle2caps:
1361 1361 kwargs['listkeys'] = ['phases']
1362 1362 if pullop.remotebookmarks is None:
1363 1363 # make sure to always includes bookmark data when migrating
1364 1364 # `hg incoming --bundle` to using this function.
1365 1365 kwargs['listkeys'].append('bookmarks')
1366 1366
1367 1367 # If this is a full pull / clone and the server supports the clone bundles
1368 1368 # feature, tell the server whether we attempted a clone bundle. The
1369 1369 # presence of this flag indicates the client supports clone bundles. This
1370 1370 # will enable the server to treat clients that support clone bundles
1371 1371 # differently from those that don't.
1372 1372 if (pullop.remote.capable('clonebundles')
1373 1373 and pullop.heads is None and list(pullop.common) == [nullid]):
1374 1374 kwargs['cbattempted'] = pullop.clonebundleattempted
1375 1375
1376 1376 if streaming:
1377 1377 pullop.repo.ui.status(_('streaming all changes\n'))
1378 1378 elif not pullop.fetch:
1379 1379 pullop.repo.ui.status(_("no changes found\n"))
1380 1380 pullop.cgresult = 0
1381 1381 else:
1382 1382 if pullop.heads is None and list(pullop.common) == [nullid]:
1383 1383 pullop.repo.ui.status(_("requesting all changes\n"))
1384 1384 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1385 1385 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1386 1386 if obsolete.commonversion(remoteversions) is not None:
1387 1387 kwargs['obsmarkers'] = True
1388 1388 pullop.stepsdone.add('obsmarkers')
1389 1389 _pullbundle2extraprepare(pullop, kwargs)
1390 1390 bundle = pullop.remote.getbundle('pull', **pycompat.strkwargs(kwargs))
1391 1391 try:
1392 1392 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
1393 1393 except bundle2.AbortFromPart as exc:
1394 1394 pullop.repo.ui.status(_('remote: abort: %s\n') % exc)
1395 1395 raise error.Abort(_('pull failed on remote'), hint=exc.hint)
1396 1396 except error.BundleValueError as exc:
1397 1397 raise error.Abort(_('missing support for %s') % exc)
1398 1398
1399 1399 if pullop.fetch:
1400 1400 results = [cg['return'] for cg in op.records['changegroup']]
1401 1401 pullop.cgresult = changegroup.combineresults(results)
1402 1402
1403 1403 # processing phases change
1404 1404 for namespace, value in op.records['listkeys']:
1405 1405 if namespace == 'phases':
1406 1406 _pullapplyphases(pullop, value)
1407 1407
1408 1408 # processing bookmark update
1409 1409 for namespace, value in op.records['listkeys']:
1410 1410 if namespace == 'bookmarks':
1411 1411 pullop.remotebookmarks = value
1412 1412
1413 1413 # bookmark data were either already there or pulled in the bundle
1414 1414 if pullop.remotebookmarks is not None:
1415 1415 _pullbookmarks(pullop)
1416 1416
1417 1417 def _pullbundle2extraprepare(pullop, kwargs):
1418 1418 """hook function so that extensions can extend the getbundle call"""
1419 1419 pass
1420 1420
1421 1421 def _pullchangeset(pullop):
1422 1422 """pull changeset from unbundle into the local repo"""
1423 1423 # We delay the open of the transaction as late as possible so we
1424 1424 # don't open transaction for nothing or you break future useful
1425 1425 # rollback call
1426 1426 if 'changegroup' in pullop.stepsdone:
1427 1427 return
1428 1428 pullop.stepsdone.add('changegroup')
1429 1429 if not pullop.fetch:
1430 1430 pullop.repo.ui.status(_("no changes found\n"))
1431 1431 pullop.cgresult = 0
1432 1432 return
1433 1433 tr = pullop.gettransaction()
1434 1434 if pullop.heads is None and list(pullop.common) == [nullid]:
1435 1435 pullop.repo.ui.status(_("requesting all changes\n"))
1436 1436 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1437 1437 # issue1320, avoid a race if remote changed after discovery
1438 1438 pullop.heads = pullop.rheads
1439 1439
1440 1440 if pullop.remote.capable('getbundle'):
1441 1441 # TODO: get bundlecaps from remote
1442 1442 cg = pullop.remote.getbundle('pull', common=pullop.common,
1443 1443 heads=pullop.heads or pullop.rheads)
1444 1444 elif pullop.heads is None:
1445 1445 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1446 1446 elif not pullop.remote.capable('changegroupsubset'):
1447 1447 raise error.Abort(_("partial pull cannot be done because "
1448 1448 "other repository doesn't support "
1449 1449 "changegroupsubset."))
1450 1450 else:
1451 1451 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1452 1452 pullop.cgresult = cg.apply(pullop.repo, tr, 'pull', pullop.remote.url())
1453 1453
1454 1454 def _pullphase(pullop):
1455 1455 # Get remote phases data from remote
1456 1456 if 'phases' in pullop.stepsdone:
1457 1457 return
1458 1458 remotephases = pullop.remote.listkeys('phases')
1459 1459 _pullapplyphases(pullop, remotephases)
1460 1460
1461 1461 def _pullapplyphases(pullop, remotephases):
1462 1462 """apply phase movement from observed remote state"""
1463 1463 if 'phases' in pullop.stepsdone:
1464 1464 return
1465 1465 pullop.stepsdone.add('phases')
1466 1466 publishing = bool(remotephases.get('publishing', False))
1467 1467 if remotephases and not publishing:
1468 1468 # remote is new and non-publishing
1469 1469 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1470 1470 pullop.pulledsubset,
1471 1471 remotephases)
1472 1472 dheads = pullop.pulledsubset
1473 1473 else:
1474 1474 # Remote is old or publishing all common changesets
1475 1475 # should be seen as public
1476 1476 pheads = pullop.pulledsubset
1477 1477 dheads = []
1478 1478 unfi = pullop.repo.unfiltered()
1479 1479 phase = unfi._phasecache.phase
1480 1480 rev = unfi.changelog.nodemap.get
1481 1481 public = phases.public
1482 1482 draft = phases.draft
1483 1483
1484 1484 # exclude changesets already public locally and update the others
1485 1485 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1486 1486 if pheads:
1487 1487 tr = pullop.gettransaction()
1488 1488 phases.advanceboundary(pullop.repo, tr, public, pheads)
1489 1489
1490 1490 # exclude changesets already draft locally and update the others
1491 1491 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1492 1492 if dheads:
1493 1493 tr = pullop.gettransaction()
1494 1494 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1495 1495
1496 1496 def _pullbookmarks(pullop):
1497 1497 """process the remote bookmark information to update the local one"""
1498 1498 if 'bookmarks' in pullop.stepsdone:
1499 1499 return
1500 1500 pullop.stepsdone.add('bookmarks')
1501 1501 repo = pullop.repo
1502 1502 remotebookmarks = pullop.remotebookmarks
1503 1503 remotebookmarks = bookmod.unhexlifybookmarks(remotebookmarks)
1504 1504 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1505 1505 pullop.remote.url(),
1506 1506 pullop.gettransaction,
1507 1507 explicit=pullop.explicitbookmarks)
1508 1508
1509 1509 def _pullobsolete(pullop):
1510 1510 """utility function to pull obsolete markers from a remote
1511 1511
1512 1512 The `gettransaction` is function that return the pull transaction, creating
1513 1513 one if necessary. We return the transaction to inform the calling code that
1514 1514 a new transaction have been created (when applicable).
1515 1515
1516 1516 Exists mostly to allow overriding for experimentation purpose"""
1517 1517 if 'obsmarkers' in pullop.stepsdone:
1518 1518 return
1519 1519 pullop.stepsdone.add('obsmarkers')
1520 1520 tr = None
1521 1521 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1522 1522 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1523 1523 remoteobs = pullop.remote.listkeys('obsolete')
1524 1524 if 'dump0' in remoteobs:
1525 1525 tr = pullop.gettransaction()
1526 1526 markers = []
1527 1527 for key in sorted(remoteobs, reverse=True):
1528 1528 if key.startswith('dump'):
1529 1529 data = util.b85decode(remoteobs[key])
1530 1530 version, newmarks = obsolete._readmarkers(data)
1531 1531 markers += newmarks
1532 1532 if markers:
1533 1533 pullop.repo.obsstore.add(tr, markers)
1534 1534 pullop.repo.invalidatevolatilesets()
1535 1535 return tr
1536 1536
1537 1537 def caps20to10(repo):
1538 1538 """return a set with appropriate options to use bundle20 during getbundle"""
1539 1539 caps = {'HG20'}
1540 1540 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
1541 1541 caps.add('bundle2=' + urlreq.quote(capsblob))
1542 1542 return caps
1543 1543
1544 1544 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1545 1545 getbundle2partsorder = []
1546 1546
1547 1547 # Mapping between step name and function
1548 1548 #
1549 1549 # This exists to help extensions wrap steps if necessary
1550 1550 getbundle2partsmapping = {}
1551 1551
1552 1552 def getbundle2partsgenerator(stepname, idx=None):
1553 1553 """decorator for function generating bundle2 part for getbundle
1554 1554
1555 1555 The function is added to the step -> function mapping and appended to the
1556 1556 list of steps. Beware that decorated functions will be added in order
1557 1557 (this may matter).
1558 1558
1559 1559 You can only use this decorator for new steps, if you want to wrap a step
1560 1560 from an extension, attack the getbundle2partsmapping dictionary directly."""
1561 1561 def dec(func):
1562 1562 assert stepname not in getbundle2partsmapping
1563 1563 getbundle2partsmapping[stepname] = func
1564 1564 if idx is None:
1565 1565 getbundle2partsorder.append(stepname)
1566 1566 else:
1567 1567 getbundle2partsorder.insert(idx, stepname)
1568 1568 return func
1569 1569 return dec
1570 1570
1571 1571 def bundle2requested(bundlecaps):
1572 1572 if bundlecaps is not None:
1573 1573 return any(cap.startswith('HG2') for cap in bundlecaps)
1574 1574 return False
1575 1575
1576 1576 def getbundlechunks(repo, source, heads=None, common=None, bundlecaps=None,
1577 1577 **kwargs):
1578 1578 """Return chunks constituting a bundle's raw data.
1579 1579
1580 1580 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1581 1581 passed.
1582 1582
1583 1583 Returns an iterator over raw chunks (of varying sizes).
1584 1584 """
1585 1585 kwargs = pycompat.byteskwargs(kwargs)
1586 1586 usebundle2 = bundle2requested(bundlecaps)
1587 1587 # bundle10 case
1588 1588 if not usebundle2:
1589 1589 if bundlecaps and not kwargs.get('cg', True):
1590 1590 raise ValueError(_('request for bundle10 must include changegroup'))
1591 1591
1592 1592 if kwargs:
1593 1593 raise ValueError(_('unsupported getbundle arguments: %s')
1594 1594 % ', '.join(sorted(kwargs.keys())))
1595 1595 outgoing = _computeoutgoing(repo, heads, common)
1596 1596 bundler = changegroup.getbundler('01', repo, bundlecaps)
1597 1597 return changegroup.getsubsetraw(repo, outgoing, bundler, source)
1598 1598
1599 1599 # bundle20 case
1600 1600 b2caps = {}
1601 1601 for bcaps in bundlecaps:
1602 1602 if bcaps.startswith('bundle2='):
1603 1603 blob = urlreq.unquote(bcaps[len('bundle2='):])
1604 1604 b2caps.update(bundle2.decodecaps(blob))
1605 1605 bundler = bundle2.bundle20(repo.ui, b2caps)
1606 1606
1607 1607 kwargs['heads'] = heads
1608 1608 kwargs['common'] = common
1609 1609
1610 1610 for name in getbundle2partsorder:
1611 1611 func = getbundle2partsmapping[name]
1612 1612 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1613 **kwargs)
1613 **pycompat.strkwargs(kwargs))
1614 1614
1615 1615 return bundler.getchunks()
1616 1616
1617 1617 @getbundle2partsgenerator('changegroup')
1618 1618 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1619 1619 b2caps=None, heads=None, common=None, **kwargs):
1620 1620 """add a changegroup part to the requested bundle"""
1621 1621 cg = None
1622 1622 if kwargs.get('cg', True):
1623 1623 # build changegroup bundle here.
1624 1624 version = '01'
1625 1625 cgversions = b2caps.get('changegroup')
1626 1626 if cgversions: # 3.1 and 3.2 ship with an empty value
1627 1627 cgversions = [v for v in cgversions
1628 1628 if v in changegroup.supportedoutgoingversions(repo)]
1629 1629 if not cgversions:
1630 1630 raise ValueError(_('no common changegroup version'))
1631 1631 version = max(cgversions)
1632 1632 outgoing = _computeoutgoing(repo, heads, common)
1633 1633 cg = changegroup.getlocalchangegroupraw(repo, source, outgoing,
1634 1634 bundlecaps=bundlecaps,
1635 1635 version=version)
1636 1636
1637 1637 if cg:
1638 1638 part = bundler.newpart('changegroup', data=cg)
1639 1639 if cgversions:
1640 1640 part.addparam('version', version)
1641 1641 part.addparam('nbchanges', str(len(outgoing.missing)), mandatory=False)
1642 1642 if 'treemanifest' in repo.requirements:
1643 1643 part.addparam('treemanifest', '1')
1644 1644
1645 1645 @getbundle2partsgenerator('listkeys')
1646 1646 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1647 1647 b2caps=None, **kwargs):
1648 1648 """add parts containing listkeys namespaces to the requested bundle"""
1649 1649 listkeys = kwargs.get('listkeys', ())
1650 1650 for namespace in listkeys:
1651 1651 part = bundler.newpart('listkeys')
1652 1652 part.addparam('namespace', namespace)
1653 1653 keys = repo.listkeys(namespace).items()
1654 1654 part.data = pushkey.encodekeys(keys)
1655 1655
1656 1656 @getbundle2partsgenerator('obsmarkers')
1657 1657 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1658 1658 b2caps=None, heads=None, **kwargs):
1659 1659 """add an obsolescence markers part to the requested bundle"""
1660 1660 if kwargs.get('obsmarkers', False):
1661 1661 if heads is None:
1662 1662 heads = repo.heads()
1663 1663 subset = [c.node() for c in repo.set('::%ln', heads)]
1664 1664 markers = repo.obsstore.relevantmarkers(subset)
1665 1665 markers = sorted(markers)
1666 1666 bundle2.buildobsmarkerspart(bundler, markers)
1667 1667
1668 1668 @getbundle2partsgenerator('hgtagsfnodes')
1669 1669 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
1670 1670 b2caps=None, heads=None, common=None,
1671 1671 **kwargs):
1672 1672 """Transfer the .hgtags filenodes mapping.
1673 1673
1674 1674 Only values for heads in this bundle will be transferred.
1675 1675
1676 1676 The part data consists of pairs of 20 byte changeset node and .hgtags
1677 1677 filenodes raw values.
1678 1678 """
1679 1679 # Don't send unless:
1680 1680 # - changeset are being exchanged,
1681 1681 # - the client supports it.
1682 1682 if not (kwargs.get('cg', True) and 'hgtagsfnodes' in b2caps):
1683 1683 return
1684 1684
1685 1685 outgoing = _computeoutgoing(repo, heads, common)
1686 1686 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
1687 1687
1688 1688 def _getbookmarks(repo, **kwargs):
1689 1689 """Returns bookmark to node mapping.
1690 1690
1691 1691 This function is primarily used to generate `bookmarks` bundle2 part.
1692 1692 It is a separate function in order to make it easy to wrap it
1693 1693 in extensions. Passing `kwargs` to the function makes it easy to
1694 1694 add new parameters in extensions.
1695 1695 """
1696 1696
1697 1697 return dict(bookmod.listbinbookmarks(repo))
1698 1698
1699 1699 def check_heads(repo, their_heads, context):
1700 1700 """check if the heads of a repo have been modified
1701 1701
1702 1702 Used by peer for unbundling.
1703 1703 """
1704 1704 heads = repo.heads()
1705 1705 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
1706 1706 if not (their_heads == ['force'] or their_heads == heads or
1707 1707 their_heads == ['hashed', heads_hash]):
1708 1708 # someone else committed/pushed/unbundled while we
1709 1709 # were transferring data
1710 1710 raise error.PushRaced('repository changed while %s - '
1711 1711 'please try again' % context)
1712 1712
1713 1713 def unbundle(repo, cg, heads, source, url):
1714 1714 """Apply a bundle to a repo.
1715 1715
1716 1716 this function makes sure the repo is locked during the application and have
1717 1717 mechanism to check that no push race occurred between the creation of the
1718 1718 bundle and its application.
1719 1719
1720 1720 If the push was raced as PushRaced exception is raised."""
1721 1721 r = 0
1722 1722 # need a transaction when processing a bundle2 stream
1723 1723 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
1724 1724 lockandtr = [None, None, None]
1725 1725 recordout = None
1726 1726 # quick fix for output mismatch with bundle2 in 3.4
1727 1727 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture',
1728 1728 False)
1729 1729 if url.startswith('remote:http:') or url.startswith('remote:https:'):
1730 1730 captureoutput = True
1731 1731 try:
1732 1732 # note: outside bundle1, 'heads' is expected to be empty and this
1733 1733 # 'check_heads' call wil be a no-op
1734 1734 check_heads(repo, heads, 'uploading changes')
1735 1735 # push can proceed
1736 1736 if not isinstance(cg, bundle2.unbundle20):
1737 1737 # legacy case: bundle1 (changegroup 01)
1738 1738 txnname = "\n".join([source, util.hidepassword(url)])
1739 1739 with repo.lock(), repo.transaction(txnname) as tr:
1740 1740 r = cg.apply(repo, tr, source, url)
1741 1741 else:
1742 1742 r = None
1743 1743 try:
1744 1744 def gettransaction():
1745 1745 if not lockandtr[2]:
1746 1746 lockandtr[0] = repo.wlock()
1747 1747 lockandtr[1] = repo.lock()
1748 1748 lockandtr[2] = repo.transaction(source)
1749 1749 lockandtr[2].hookargs['source'] = source
1750 1750 lockandtr[2].hookargs['url'] = url
1751 1751 lockandtr[2].hookargs['bundle2'] = '1'
1752 1752 return lockandtr[2]
1753 1753
1754 1754 # Do greedy locking by default until we're satisfied with lazy
1755 1755 # locking.
1756 1756 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
1757 1757 gettransaction()
1758 1758
1759 1759 op = bundle2.bundleoperation(repo, gettransaction,
1760 1760 captureoutput=captureoutput)
1761 1761 try:
1762 1762 op = bundle2.processbundle(repo, cg, op=op)
1763 1763 finally:
1764 1764 r = op.reply
1765 1765 if captureoutput and r is not None:
1766 1766 repo.ui.pushbuffer(error=True, subproc=True)
1767 1767 def recordout(output):
1768 1768 r.newpart('output', data=output, mandatory=False)
1769 1769 if lockandtr[2] is not None:
1770 1770 lockandtr[2].close()
1771 1771 except BaseException as exc:
1772 1772 exc.duringunbundle2 = True
1773 1773 if captureoutput and r is not None:
1774 1774 parts = exc._bundle2salvagedoutput = r.salvageoutput()
1775 1775 def recordout(output):
1776 1776 part = bundle2.bundlepart('output', data=output,
1777 1777 mandatory=False)
1778 1778 parts.append(part)
1779 1779 raise
1780 1780 finally:
1781 1781 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
1782 1782 if recordout is not None:
1783 1783 recordout(repo.ui.popbuffer())
1784 1784 return r
1785 1785
1786 1786 def _maybeapplyclonebundle(pullop):
1787 1787 """Apply a clone bundle from a remote, if possible."""
1788 1788
1789 1789 repo = pullop.repo
1790 1790 remote = pullop.remote
1791 1791
1792 1792 if not repo.ui.configbool('ui', 'clonebundles', True):
1793 1793 return
1794 1794
1795 1795 # Only run if local repo is empty.
1796 1796 if len(repo):
1797 1797 return
1798 1798
1799 1799 if pullop.heads:
1800 1800 return
1801 1801
1802 1802 if not remote.capable('clonebundles'):
1803 1803 return
1804 1804
1805 1805 res = remote._call('clonebundles')
1806 1806
1807 1807 # If we call the wire protocol command, that's good enough to record the
1808 1808 # attempt.
1809 1809 pullop.clonebundleattempted = True
1810 1810
1811 1811 entries = parseclonebundlesmanifest(repo, res)
1812 1812 if not entries:
1813 1813 repo.ui.note(_('no clone bundles available on remote; '
1814 1814 'falling back to regular clone\n'))
1815 1815 return
1816 1816
1817 1817 entries = filterclonebundleentries(repo, entries)
1818 1818 if not entries:
1819 1819 # There is a thundering herd concern here. However, if a server
1820 1820 # operator doesn't advertise bundles appropriate for its clients,
1821 1821 # they deserve what's coming. Furthermore, from a client's
1822 1822 # perspective, no automatic fallback would mean not being able to
1823 1823 # clone!
1824 1824 repo.ui.warn(_('no compatible clone bundles available on server; '
1825 1825 'falling back to regular clone\n'))
1826 1826 repo.ui.warn(_('(you may want to report this to the server '
1827 1827 'operator)\n'))
1828 1828 return
1829 1829
1830 1830 entries = sortclonebundleentries(repo.ui, entries)
1831 1831
1832 1832 url = entries[0]['URL']
1833 1833 repo.ui.status(_('applying clone bundle from %s\n') % url)
1834 1834 if trypullbundlefromurl(repo.ui, repo, url):
1835 1835 repo.ui.status(_('finished applying clone bundle\n'))
1836 1836 # Bundle failed.
1837 1837 #
1838 1838 # We abort by default to avoid the thundering herd of
1839 1839 # clients flooding a server that was expecting expensive
1840 1840 # clone load to be offloaded.
1841 1841 elif repo.ui.configbool('ui', 'clonebundlefallback', False):
1842 1842 repo.ui.warn(_('falling back to normal clone\n'))
1843 1843 else:
1844 1844 raise error.Abort(_('error applying bundle'),
1845 1845 hint=_('if this error persists, consider contacting '
1846 1846 'the server operator or disable clone '
1847 1847 'bundles via '
1848 1848 '"--config ui.clonebundles=false"'))
1849 1849
1850 1850 def parseclonebundlesmanifest(repo, s):
1851 1851 """Parses the raw text of a clone bundles manifest.
1852 1852
1853 1853 Returns a list of dicts. The dicts have a ``URL`` key corresponding
1854 1854 to the URL and other keys are the attributes for the entry.
1855 1855 """
1856 1856 m = []
1857 1857 for line in s.splitlines():
1858 1858 fields = line.split()
1859 1859 if not fields:
1860 1860 continue
1861 1861 attrs = {'URL': fields[0]}
1862 1862 for rawattr in fields[1:]:
1863 1863 key, value = rawattr.split('=', 1)
1864 1864 key = urlreq.unquote(key)
1865 1865 value = urlreq.unquote(value)
1866 1866 attrs[key] = value
1867 1867
1868 1868 # Parse BUNDLESPEC into components. This makes client-side
1869 1869 # preferences easier to specify since you can prefer a single
1870 1870 # component of the BUNDLESPEC.
1871 1871 if key == 'BUNDLESPEC':
1872 1872 try:
1873 1873 comp, version, params = parsebundlespec(repo, value,
1874 1874 externalnames=True)
1875 1875 attrs['COMPRESSION'] = comp
1876 1876 attrs['VERSION'] = version
1877 1877 except error.InvalidBundleSpecification:
1878 1878 pass
1879 1879 except error.UnsupportedBundleSpecification:
1880 1880 pass
1881 1881
1882 1882 m.append(attrs)
1883 1883
1884 1884 return m
1885 1885
1886 1886 def filterclonebundleentries(repo, entries):
1887 1887 """Remove incompatible clone bundle manifest entries.
1888 1888
1889 1889 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
1890 1890 and returns a new list consisting of only the entries that this client
1891 1891 should be able to apply.
1892 1892
1893 1893 There is no guarantee we'll be able to apply all returned entries because
1894 1894 the metadata we use to filter on may be missing or wrong.
1895 1895 """
1896 1896 newentries = []
1897 1897 for entry in entries:
1898 1898 spec = entry.get('BUNDLESPEC')
1899 1899 if spec:
1900 1900 try:
1901 1901 parsebundlespec(repo, spec, strict=True)
1902 1902 except error.InvalidBundleSpecification as e:
1903 1903 repo.ui.debug(str(e) + '\n')
1904 1904 continue
1905 1905 except error.UnsupportedBundleSpecification as e:
1906 1906 repo.ui.debug('filtering %s because unsupported bundle '
1907 1907 'spec: %s\n' % (entry['URL'], str(e)))
1908 1908 continue
1909 1909
1910 1910 if 'REQUIRESNI' in entry and not sslutil.hassni:
1911 1911 repo.ui.debug('filtering %s because SNI not supported\n' %
1912 1912 entry['URL'])
1913 1913 continue
1914 1914
1915 1915 newentries.append(entry)
1916 1916
1917 1917 return newentries
1918 1918
1919 1919 class clonebundleentry(object):
1920 1920 """Represents an item in a clone bundles manifest.
1921 1921
1922 1922 This rich class is needed to support sorting since sorted() in Python 3
1923 1923 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
1924 1924 won't work.
1925 1925 """
1926 1926
1927 1927 def __init__(self, value, prefers):
1928 1928 self.value = value
1929 1929 self.prefers = prefers
1930 1930
1931 1931 def _cmp(self, other):
1932 1932 for prefkey, prefvalue in self.prefers:
1933 1933 avalue = self.value.get(prefkey)
1934 1934 bvalue = other.value.get(prefkey)
1935 1935
1936 1936 # Special case for b missing attribute and a matches exactly.
1937 1937 if avalue is not None and bvalue is None and avalue == prefvalue:
1938 1938 return -1
1939 1939
1940 1940 # Special case for a missing attribute and b matches exactly.
1941 1941 if bvalue is not None and avalue is None and bvalue == prefvalue:
1942 1942 return 1
1943 1943
1944 1944 # We can't compare unless attribute present on both.
1945 1945 if avalue is None or bvalue is None:
1946 1946 continue
1947 1947
1948 1948 # Same values should fall back to next attribute.
1949 1949 if avalue == bvalue:
1950 1950 continue
1951 1951
1952 1952 # Exact matches come first.
1953 1953 if avalue == prefvalue:
1954 1954 return -1
1955 1955 if bvalue == prefvalue:
1956 1956 return 1
1957 1957
1958 1958 # Fall back to next attribute.
1959 1959 continue
1960 1960
1961 1961 # If we got here we couldn't sort by attributes and prefers. Fall
1962 1962 # back to index order.
1963 1963 return 0
1964 1964
1965 1965 def __lt__(self, other):
1966 1966 return self._cmp(other) < 0
1967 1967
1968 1968 def __gt__(self, other):
1969 1969 return self._cmp(other) > 0
1970 1970
1971 1971 def __eq__(self, other):
1972 1972 return self._cmp(other) == 0
1973 1973
1974 1974 def __le__(self, other):
1975 1975 return self._cmp(other) <= 0
1976 1976
1977 1977 def __ge__(self, other):
1978 1978 return self._cmp(other) >= 0
1979 1979
1980 1980 def __ne__(self, other):
1981 1981 return self._cmp(other) != 0
1982 1982
1983 1983 def sortclonebundleentries(ui, entries):
1984 1984 prefers = ui.configlist('ui', 'clonebundleprefers')
1985 1985 if not prefers:
1986 1986 return list(entries)
1987 1987
1988 1988 prefers = [p.split('=', 1) for p in prefers]
1989 1989
1990 1990 items = sorted(clonebundleentry(v, prefers) for v in entries)
1991 1991 return [i.value for i in items]
1992 1992
1993 1993 def trypullbundlefromurl(ui, repo, url):
1994 1994 """Attempt to apply a bundle from a URL."""
1995 1995 with repo.lock(), repo.transaction('bundleurl') as tr:
1996 1996 try:
1997 1997 fh = urlmod.open(ui, url)
1998 1998 cg = readbundle(ui, fh, 'stream')
1999 1999
2000 2000 if isinstance(cg, bundle2.unbundle20):
2001 2001 bundle2.applybundle(repo, cg, tr, 'clonebundles', url)
2002 2002 elif isinstance(cg, streamclone.streamcloneapplier):
2003 2003 cg.apply(repo)
2004 2004 else:
2005 2005 cg.apply(repo, tr, 'clonebundles', url)
2006 2006 return True
2007 2007 except urlerr.httperror as e:
2008 2008 ui.warn(_('HTTP error fetching bundle: %s\n') % str(e))
2009 2009 except urlerr.urlerror as e:
2010 2010 ui.warn(_('error fetching bundle: %s\n') % e.reason)
2011 2011
2012 2012 return False
@@ -1,465 +1,467
1 1 # Copyright (C) 2004, 2005 Canonical Ltd
2 2 #
3 3 # This program is free software; you can redistribute it and/or modify
4 4 # it under the terms of the GNU General Public License as published by
5 5 # the Free Software Foundation; either version 2 of the License, or
6 6 # (at your option) any later version.
7 7 #
8 8 # This program is distributed in the hope that it will be useful,
9 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 11 # GNU General Public License for more details.
12 12 #
13 13 # You should have received a copy of the GNU General Public License
14 14 # along with this program; if not, see <http://www.gnu.org/licenses/>.
15 15
16 16 # mbp: "you know that thing where cvs gives you conflict markers?"
17 17 # s: "i hate that."
18 18
19 19 from __future__ import absolute_import
20 20
21 21 import os
22 22
23 23 from .i18n import _
24 24 from . import (
25 25 error,
26 26 mdiff,
27 pycompat,
27 28 util,
28 29 vfs as vfsmod,
29 30 )
30 31
31 32 class CantReprocessAndShowBase(Exception):
32 33 pass
33 34
34 35 def intersect(ra, rb):
35 36 """Given two ranges return the range where they intersect or None.
36 37
37 38 >>> intersect((0, 10), (0, 6))
38 39 (0, 6)
39 40 >>> intersect((0, 10), (5, 15))
40 41 (5, 10)
41 42 >>> intersect((0, 10), (10, 15))
42 43 >>> intersect((0, 9), (10, 15))
43 44 >>> intersect((0, 9), (7, 15))
44 45 (7, 9)
45 46 """
46 47 assert ra[0] <= ra[1]
47 48 assert rb[0] <= rb[1]
48 49
49 50 sa = max(ra[0], rb[0])
50 51 sb = min(ra[1], rb[1])
51 52 if sa < sb:
52 53 return sa, sb
53 54 else:
54 55 return None
55 56
56 57 def compare_range(a, astart, aend, b, bstart, bend):
57 58 """Compare a[astart:aend] == b[bstart:bend], without slicing.
58 59 """
59 60 if (aend - astart) != (bend - bstart):
60 61 return False
61 62 for ia, ib in zip(xrange(astart, aend), xrange(bstart, bend)):
62 63 if a[ia] != b[ib]:
63 64 return False
64 65 else:
65 66 return True
66 67
67 68 class Merge3Text(object):
68 69 """3-way merge of texts.
69 70
70 71 Given strings BASE, OTHER, THIS, tries to produce a combined text
71 72 incorporating the changes from both BASE->OTHER and BASE->THIS."""
72 73 def __init__(self, basetext, atext, btext, base=None, a=None, b=None):
73 74 self.basetext = basetext
74 75 self.atext = atext
75 76 self.btext = btext
76 77 if base is None:
77 78 base = mdiff.splitnewlines(basetext)
78 79 if a is None:
79 80 a = mdiff.splitnewlines(atext)
80 81 if b is None:
81 82 b = mdiff.splitnewlines(btext)
82 83 self.base = base
83 84 self.a = a
84 85 self.b = b
85 86
86 87 def merge_lines(self,
87 88 name_a=None,
88 89 name_b=None,
89 90 name_base=None,
90 91 start_marker='<<<<<<<',
91 92 mid_marker='=======',
92 93 end_marker='>>>>>>>',
93 94 base_marker=None,
94 95 localorother=None,
95 96 minimize=False):
96 97 """Return merge in cvs-like form.
97 98 """
98 99 self.conflicts = False
99 100 newline = '\n'
100 101 if len(self.a) > 0:
101 102 if self.a[0].endswith('\r\n'):
102 103 newline = '\r\n'
103 104 elif self.a[0].endswith('\r'):
104 105 newline = '\r'
105 106 if name_a and start_marker:
106 107 start_marker = start_marker + ' ' + name_a
107 108 if name_b and end_marker:
108 109 end_marker = end_marker + ' ' + name_b
109 110 if name_base and base_marker:
110 111 base_marker = base_marker + ' ' + name_base
111 112 merge_regions = self.merge_regions()
112 113 if minimize:
113 114 merge_regions = self.minimize(merge_regions)
114 115 for t in merge_regions:
115 116 what = t[0]
116 117 if what == 'unchanged':
117 118 for i in range(t[1], t[2]):
118 119 yield self.base[i]
119 120 elif what == 'a' or what == 'same':
120 121 for i in range(t[1], t[2]):
121 122 yield self.a[i]
122 123 elif what == 'b':
123 124 for i in range(t[1], t[2]):
124 125 yield self.b[i]
125 126 elif what == 'conflict':
126 127 if localorother == 'local':
127 128 for i in range(t[3], t[4]):
128 129 yield self.a[i]
129 130 elif localorother == 'other':
130 131 for i in range(t[5], t[6]):
131 132 yield self.b[i]
132 133 else:
133 134 self.conflicts = True
134 135 if start_marker is not None:
135 136 yield start_marker + newline
136 137 for i in range(t[3], t[4]):
137 138 yield self.a[i]
138 139 if base_marker is not None:
139 140 yield base_marker + newline
140 141 for i in range(t[1], t[2]):
141 142 yield self.base[i]
142 143 if mid_marker is not None:
143 144 yield mid_marker + newline
144 145 for i in range(t[5], t[6]):
145 146 yield self.b[i]
146 147 if end_marker is not None:
147 148 yield end_marker + newline
148 149 else:
149 150 raise ValueError(what)
150 151
151 152 def merge_groups(self):
152 153 """Yield sequence of line groups. Each one is a tuple:
153 154
154 155 'unchanged', lines
155 156 Lines unchanged from base
156 157
157 158 'a', lines
158 159 Lines taken from a
159 160
160 161 'same', lines
161 162 Lines taken from a (and equal to b)
162 163
163 164 'b', lines
164 165 Lines taken from b
165 166
166 167 'conflict', base_lines, a_lines, b_lines
167 168 Lines from base were changed to either a or b and conflict.
168 169 """
169 170 for t in self.merge_regions():
170 171 what = t[0]
171 172 if what == 'unchanged':
172 173 yield what, self.base[t[1]:t[2]]
173 174 elif what == 'a' or what == 'same':
174 175 yield what, self.a[t[1]:t[2]]
175 176 elif what == 'b':
176 177 yield what, self.b[t[1]:t[2]]
177 178 elif what == 'conflict':
178 179 yield (what,
179 180 self.base[t[1]:t[2]],
180 181 self.a[t[3]:t[4]],
181 182 self.b[t[5]:t[6]])
182 183 else:
183 184 raise ValueError(what)
184 185
185 186 def merge_regions(self):
186 187 """Return sequences of matching and conflicting regions.
187 188
188 189 This returns tuples, where the first value says what kind we
189 190 have:
190 191
191 192 'unchanged', start, end
192 193 Take a region of base[start:end]
193 194
194 195 'same', astart, aend
195 196 b and a are different from base but give the same result
196 197
197 198 'a', start, end
198 199 Non-clashing insertion from a[start:end]
199 200
200 201 'conflict', zstart, zend, astart, aend, bstart, bend
201 202 Conflict between a and b, with z as common ancestor
202 203
203 204 Method is as follows:
204 205
205 206 The two sequences align only on regions which match the base
206 207 and both descendants. These are found by doing a two-way diff
207 208 of each one against the base, and then finding the
208 209 intersections between those regions. These "sync regions"
209 210 are by definition unchanged in both and easily dealt with.
210 211
211 212 The regions in between can be in any of three cases:
212 213 conflicted, or changed on only one side.
213 214 """
214 215
215 216 # section a[0:ia] has been disposed of, etc
216 217 iz = ia = ib = 0
217 218
218 219 for region in self.find_sync_regions():
219 220 zmatch, zend, amatch, aend, bmatch, bend = region
220 221 #print 'match base [%d:%d]' % (zmatch, zend)
221 222
222 223 matchlen = zend - zmatch
223 224 assert matchlen >= 0
224 225 assert matchlen == (aend - amatch)
225 226 assert matchlen == (bend - bmatch)
226 227
227 228 len_a = amatch - ia
228 229 len_b = bmatch - ib
229 230 len_base = zmatch - iz
230 231 assert len_a >= 0
231 232 assert len_b >= 0
232 233 assert len_base >= 0
233 234
234 235 #print 'unmatched a=%d, b=%d' % (len_a, len_b)
235 236
236 237 if len_a or len_b:
237 238 # try to avoid actually slicing the lists
238 239 equal_a = compare_range(self.a, ia, amatch,
239 240 self.base, iz, zmatch)
240 241 equal_b = compare_range(self.b, ib, bmatch,
241 242 self.base, iz, zmatch)
242 243 same = compare_range(self.a, ia, amatch,
243 244 self.b, ib, bmatch)
244 245
245 246 if same:
246 247 yield 'same', ia, amatch
247 248 elif equal_a and not equal_b:
248 249 yield 'b', ib, bmatch
249 250 elif equal_b and not equal_a:
250 251 yield 'a', ia, amatch
251 252 elif not equal_a and not equal_b:
252 253 yield 'conflict', iz, zmatch, ia, amatch, ib, bmatch
253 254 else:
254 255 raise AssertionError("can't handle a=b=base but unmatched")
255 256
256 257 ia = amatch
257 258 ib = bmatch
258 259 iz = zmatch
259 260
260 261 # if the same part of the base was deleted on both sides
261 262 # that's OK, we can just skip it.
262 263
263 264
264 265 if matchlen > 0:
265 266 assert ia == amatch
266 267 assert ib == bmatch
267 268 assert iz == zmatch
268 269
269 270 yield 'unchanged', zmatch, zend
270 271 iz = zend
271 272 ia = aend
272 273 ib = bend
273 274
274 275 def minimize(self, merge_regions):
275 276 """Trim conflict regions of lines where A and B sides match.
276 277
277 278 Lines where both A and B have made the same changes at the beginning
278 279 or the end of each merge region are eliminated from the conflict
279 280 region and are instead considered the same.
280 281 """
281 282 for region in merge_regions:
282 283 if region[0] != "conflict":
283 284 yield region
284 285 continue
285 286 issue, z1, z2, a1, a2, b1, b2 = region
286 287 alen = a2 - a1
287 288 blen = b2 - b1
288 289
289 290 # find matches at the front
290 291 ii = 0
291 292 while ii < alen and ii < blen and \
292 293 self.a[a1 + ii] == self.b[b1 + ii]:
293 294 ii += 1
294 295 startmatches = ii
295 296
296 297 # find matches at the end
297 298 ii = 0
298 299 while ii < alen and ii < blen and \
299 300 self.a[a2 - ii - 1] == self.b[b2 - ii - 1]:
300 301 ii += 1
301 302 endmatches = ii
302 303
303 304 if startmatches > 0:
304 305 yield 'same', a1, a1 + startmatches
305 306
306 307 yield ('conflict', z1, z2,
307 308 a1 + startmatches, a2 - endmatches,
308 309 b1 + startmatches, b2 - endmatches)
309 310
310 311 if endmatches > 0:
311 312 yield 'same', a2 - endmatches, a2
312 313
313 314 def find_sync_regions(self):
314 315 """Return a list of sync regions, where both descendants match the base.
315 316
316 317 Generates a list of (base1, base2, a1, a2, b1, b2). There is
317 318 always a zero-length sync region at the end of all the files.
318 319 """
319 320
320 321 ia = ib = 0
321 322 amatches = mdiff.get_matching_blocks(self.basetext, self.atext)
322 323 bmatches = mdiff.get_matching_blocks(self.basetext, self.btext)
323 324 len_a = len(amatches)
324 325 len_b = len(bmatches)
325 326
326 327 sl = []
327 328
328 329 while ia < len_a and ib < len_b:
329 330 abase, amatch, alen = amatches[ia]
330 331 bbase, bmatch, blen = bmatches[ib]
331 332
332 333 # there is an unconflicted block at i; how long does it
333 334 # extend? until whichever one ends earlier.
334 335 i = intersect((abase, abase + alen), (bbase, bbase + blen))
335 336 if i:
336 337 intbase = i[0]
337 338 intend = i[1]
338 339 intlen = intend - intbase
339 340
340 341 # found a match of base[i[0], i[1]]; this may be less than
341 342 # the region that matches in either one
342 343 assert intlen <= alen
343 344 assert intlen <= blen
344 345 assert abase <= intbase
345 346 assert bbase <= intbase
346 347
347 348 asub = amatch + (intbase - abase)
348 349 bsub = bmatch + (intbase - bbase)
349 350 aend = asub + intlen
350 351 bend = bsub + intlen
351 352
352 353 assert self.base[intbase:intend] == self.a[asub:aend], \
353 354 (self.base[intbase:intend], self.a[asub:aend])
354 355
355 356 assert self.base[intbase:intend] == self.b[bsub:bend]
356 357
357 358 sl.append((intbase, intend,
358 359 asub, aend,
359 360 bsub, bend))
360 361
361 362 # advance whichever one ends first in the base text
362 363 if (abase + alen) < (bbase + blen):
363 364 ia += 1
364 365 else:
365 366 ib += 1
366 367
367 368 intbase = len(self.base)
368 369 abase = len(self.a)
369 370 bbase = len(self.b)
370 371 sl.append((intbase, intbase, abase, abase, bbase, bbase))
371 372
372 373 return sl
373 374
374 375 def find_unconflicted(self):
375 376 """Return a list of ranges in base that are not conflicted."""
376 377 am = mdiff.get_matching_blocks(self.basetext, self.atext)
377 378 bm = mdiff.get_matching_blocks(self.basetext, self.btext)
378 379
379 380 unc = []
380 381
381 382 while am and bm:
382 383 # there is an unconflicted block at i; how long does it
383 384 # extend? until whichever one ends earlier.
384 385 a1 = am[0][0]
385 386 a2 = a1 + am[0][2]
386 387 b1 = bm[0][0]
387 388 b2 = b1 + bm[0][2]
388 389 i = intersect((a1, a2), (b1, b2))
389 390 if i:
390 391 unc.append(i)
391 392
392 393 if a2 < b2:
393 394 del am[0]
394 395 else:
395 396 del bm[0]
396 397
397 398 return unc
398 399
399 400 def simplemerge(ui, local, base, other, **opts):
400 401 def readfile(filename):
401 402 f = open(filename, "rb")
402 403 text = f.read()
403 404 f.close()
404 405 if util.binary(text):
405 406 msg = _("%s looks like a binary file.") % filename
406 407 if not opts.get('quiet'):
407 408 ui.warn(_('warning: %s\n') % msg)
408 409 if not opts.get('text'):
409 410 raise error.Abort(msg)
410 411 return text
411 412
412 413 mode = opts.get('mode','merge')
413 414 if mode == 'union':
414 415 name_a = None
415 416 name_b = None
416 417 name_base = None
417 418 else:
418 419 name_a = local
419 420 name_b = other
420 421 name_base = None
421 422 labels = opts.get('label', [])
422 423 if len(labels) > 0:
423 424 name_a = labels[0]
424 425 if len(labels) > 1:
425 426 name_b = labels[1]
426 427 if len(labels) > 2:
427 428 name_base = labels[2]
428 429 if len(labels) > 3:
429 430 raise error.Abort(_("can only specify three labels."))
430 431
431 432 try:
432 433 localtext = readfile(local)
433 434 basetext = readfile(base)
434 435 othertext = readfile(other)
435 436 except error.Abort:
436 437 return 1
437 438
438 439 local = os.path.realpath(local)
439 440 if not opts.get('print'):
440 441 opener = vfsmod.vfs(os.path.dirname(local))
441 442 out = opener(os.path.basename(local), "w", atomictemp=True)
442 443 else:
443 444 out = ui.fout
444 445
445 446 m3 = Merge3Text(basetext, localtext, othertext)
446 447 extrakwargs = {
447 448 "localorother": opts.get("localorother", None),
448 449 'minimize': True,
449 450 }
450 451 if mode == 'union':
451 452 extrakwargs['start_marker'] = None
452 453 extrakwargs['mid_marker'] = None
453 454 extrakwargs['end_marker'] = None
454 455 elif name_base is not None:
455 456 extrakwargs['base_marker'] = '|||||||'
456 457 extrakwargs['name_base'] = name_base
457 458 extrakwargs['minimize'] = False
458 for line in m3.merge_lines(name_a=name_a, name_b=name_b, **extrakwargs):
459 for line in m3.merge_lines(name_a=name_a, name_b=name_b,
460 **pycompat.strkwargs(extrakwargs)):
459 461 out.write(line)
460 462
461 463 if not opts.get('print'):
462 464 out.close()
463 465
464 466 if m3.conflicts and not mode == 'union':
465 467 return 1
@@ -1,709 +1,710
1 1 # templatekw.py - common changeset template keywords
2 2 #
3 3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 from .i18n import _
11 11 from .node import (
12 12 hex,
13 13 nullid,
14 14 short,
15 15 )
16 16
17 17 from . import (
18 18 encoding,
19 19 error,
20 20 hbisect,
21 21 obsutil,
22 22 patch,
23 23 pycompat,
24 24 registrar,
25 25 scmutil,
26 26 util,
27 27 )
28 28
29 29 class _hybrid(object):
30 30 """Wrapper for list or dict to support legacy template
31 31
32 32 This class allows us to handle both:
33 33 - "{files}" (legacy command-line-specific list hack) and
34 34 - "{files % '{file}\n'}" (hgweb-style with inlining and function support)
35 35 and to access raw values:
36 36 - "{ifcontains(file, files, ...)}", "{ifcontains(key, extras, ...)}"
37 37 - "{get(extras, key)}"
38 38 - "{files|json}"
39 39 """
40 40
41 41 def __init__(self, gen, values, makemap, joinfmt):
42 42 if gen is not None:
43 43 self.gen = gen
44 44 self._values = values
45 45 self._makemap = makemap
46 46 self.joinfmt = joinfmt
47 47 @util.propertycache
48 48 def gen(self):
49 49 return self._defaultgen()
50 50 def _defaultgen(self):
51 51 """Generator to stringify this as {join(self, ' ')}"""
52 52 for i, d in enumerate(self.itermaps()):
53 53 if i > 0:
54 54 yield ' '
55 55 yield self.joinfmt(d)
56 56 def itermaps(self):
57 57 makemap = self._makemap
58 58 for x in self._values:
59 59 yield makemap(x)
60 60 def __contains__(self, x):
61 61 return x in self._values
62 62 def __len__(self):
63 63 return len(self._values)
64 64 def __iter__(self):
65 65 return iter(self._values)
66 66 def __getattr__(self, name):
67 67 if name not in ('get', 'items', 'iteritems', 'iterkeys', 'itervalues',
68 68 'keys', 'values'):
69 69 raise AttributeError(name)
70 70 return getattr(self._values, name)
71 71
72 72 def hybriddict(data, key='key', value='value', fmt='%s=%s', gen=None):
73 73 """Wrap data to support both dict-like and string-like operations"""
74 74 return _hybrid(gen, data, lambda k: {key: k, value: data[k]},
75 75 lambda d: fmt % (d[key], d[value]))
76 76
77 77 def hybridlist(data, name, fmt='%s', gen=None):
78 78 """Wrap data to support both list-like and string-like operations"""
79 79 return _hybrid(gen, data, lambda x: {name: x}, lambda d: fmt % d[name])
80 80
81 81 def unwraphybrid(thing):
82 82 """Return an object which can be stringified possibly by using a legacy
83 83 template"""
84 84 if not util.safehasattr(thing, 'gen'):
85 85 return thing
86 86 return thing.gen
87 87
88 88 def showdict(name, data, mapping, plural=None, key='key', value='value',
89 89 fmt='%s=%s', separator=' '):
90 90 c = [{key: k, value: v} for k, v in data.iteritems()]
91 91 f = _showlist(name, c, mapping, plural, separator)
92 92 return hybriddict(data, key=key, value=value, fmt=fmt, gen=f)
93 93
94 94 def showlist(name, values, mapping, plural=None, element=None, separator=' '):
95 95 if not element:
96 96 element = name
97 97 f = _showlist(name, values, mapping, plural, separator)
98 98 return hybridlist(values, name=element, gen=f)
99 99
100 100 def _showlist(name, values, mapping, plural=None, separator=' '):
101 101 '''expand set of values.
102 102 name is name of key in template map.
103 103 values is list of strings or dicts.
104 104 plural is plural of name, if not simply name + 's'.
105 105 separator is used to join values as a string
106 106
107 107 expansion works like this, given name 'foo'.
108 108
109 109 if values is empty, expand 'no_foos'.
110 110
111 111 if 'foo' not in template map, return values as a string,
112 112 joined by 'separator'.
113 113
114 114 expand 'start_foos'.
115 115
116 116 for each value, expand 'foo'. if 'last_foo' in template
117 117 map, expand it instead of 'foo' for last key.
118 118
119 119 expand 'end_foos'.
120 120 '''
121 121 templ = mapping['templ']
122 strmapping = pycompat.strkwargs(mapping)
122 123 if not plural:
123 124 plural = name + 's'
124 125 if not values:
125 126 noname = 'no_' + plural
126 127 if noname in templ:
127 yield templ(noname, **mapping)
128 yield templ(noname, **strmapping)
128 129 return
129 130 if name not in templ:
130 131 if isinstance(values[0], bytes):
131 132 yield separator.join(values)
132 133 else:
133 134 for v in values:
134 yield dict(v, **mapping)
135 yield dict(v, **strmapping)
135 136 return
136 137 startname = 'start_' + plural
137 138 if startname in templ:
138 yield templ(startname, **mapping)
139 yield templ(startname, **strmapping)
139 140 vmapping = mapping.copy()
140 141 def one(v, tag=name):
141 142 try:
142 143 vmapping.update(v)
143 144 except (AttributeError, ValueError):
144 145 try:
145 146 for a, b in v:
146 147 vmapping[a] = b
147 148 except ValueError:
148 149 vmapping[name] = v
149 return templ(tag, **vmapping)
150 return templ(tag, **pycompat.strkwargs(vmapping))
150 151 lastname = 'last_' + name
151 152 if lastname in templ:
152 153 last = values.pop()
153 154 else:
154 155 last = None
155 156 for v in values:
156 157 yield one(v)
157 158 if last is not None:
158 159 yield one(last, tag=lastname)
159 160 endname = 'end_' + plural
160 161 if endname in templ:
161 yield templ(endname, **mapping)
162 yield templ(endname, **strmapping)
162 163
163 164 def _formatrevnode(ctx):
164 165 """Format changeset as '{rev}:{node|formatnode}', which is the default
165 166 template provided by cmdutil.changeset_templater"""
166 167 repo = ctx.repo()
167 168 if repo.ui.debugflag:
168 169 hexfunc = hex
169 170 else:
170 171 hexfunc = short
171 172 return '%d:%s' % (scmutil.intrev(ctx), hexfunc(scmutil.binnode(ctx)))
172 173
173 174 def getfiles(repo, ctx, revcache):
174 175 if 'files' not in revcache:
175 176 revcache['files'] = repo.status(ctx.p1(), ctx)[:3]
176 177 return revcache['files']
177 178
178 179 def getlatesttags(repo, ctx, cache, pattern=None):
179 180 '''return date, distance and name for the latest tag of rev'''
180 181
181 182 cachename = 'latesttags'
182 183 if pattern is not None:
183 184 cachename += '-' + pattern
184 185 match = util.stringmatcher(pattern)[2]
185 186 else:
186 187 match = util.always
187 188
188 189 if cachename not in cache:
189 190 # Cache mapping from rev to a tuple with tag date, tag
190 191 # distance and tag name
191 192 cache[cachename] = {-1: (0, 0, ['null'])}
192 193 latesttags = cache[cachename]
193 194
194 195 rev = ctx.rev()
195 196 todo = [rev]
196 197 while todo:
197 198 rev = todo.pop()
198 199 if rev in latesttags:
199 200 continue
200 201 ctx = repo[rev]
201 202 tags = [t for t in ctx.tags()
202 203 if (repo.tagtype(t) and repo.tagtype(t) != 'local'
203 204 and match(t))]
204 205 if tags:
205 206 latesttags[rev] = ctx.date()[0], 0, [t for t in sorted(tags)]
206 207 continue
207 208 try:
208 209 # The tuples are laid out so the right one can be found by
209 210 # comparison.
210 211 pdate, pdist, ptag = max(
211 212 latesttags[p.rev()] for p in ctx.parents())
212 213 except KeyError:
213 214 # Cache miss - recurse
214 215 todo.append(rev)
215 216 todo.extend(p.rev() for p in ctx.parents())
216 217 continue
217 218 latesttags[rev] = pdate, pdist + 1, ptag
218 219 return latesttags[rev]
219 220
220 221 def getrenamedfn(repo, endrev=None):
221 222 rcache = {}
222 223 if endrev is None:
223 224 endrev = len(repo)
224 225
225 226 def getrenamed(fn, rev):
226 227 '''looks up all renames for a file (up to endrev) the first
227 228 time the file is given. It indexes on the changerev and only
228 229 parses the manifest if linkrev != changerev.
229 230 Returns rename info for fn at changerev rev.'''
230 231 if fn not in rcache:
231 232 rcache[fn] = {}
232 233 fl = repo.file(fn)
233 234 for i in fl:
234 235 lr = fl.linkrev(i)
235 236 renamed = fl.renamed(fl.node(i))
236 237 rcache[fn][lr] = renamed
237 238 if lr >= endrev:
238 239 break
239 240 if rev in rcache[fn]:
240 241 return rcache[fn][rev]
241 242
242 243 # If linkrev != rev (i.e. rev not found in rcache) fallback to
243 244 # filectx logic.
244 245 try:
245 246 return repo[rev][fn].renamed()
246 247 except error.LookupError:
247 248 return None
248 249
249 250 return getrenamed
250 251
251 252 # default templates internally used for rendering of lists
252 253 defaulttempl = {
253 254 'parent': '{rev}:{node|formatnode} ',
254 255 'manifest': '{rev}:{node|formatnode}',
255 256 'file_copy': '{name} ({source})',
256 257 'envvar': '{key}={value}',
257 258 'extra': '{key}={value|stringescape}'
258 259 }
259 260 # filecopy is preserved for compatibility reasons
260 261 defaulttempl['filecopy'] = defaulttempl['file_copy']
261 262
262 263 # keywords are callables like:
263 264 # fn(repo, ctx, templ, cache, revcache, **args)
264 265 # with:
265 266 # repo - current repository instance
266 267 # ctx - the changectx being displayed
267 268 # templ - the templater instance
268 269 # cache - a cache dictionary for the whole templater run
269 270 # revcache - a cache dictionary for the current revision
270 271 keywords = {}
271 272
272 273 templatekeyword = registrar.templatekeyword(keywords)
273 274
274 275 @templatekeyword('author')
275 276 def showauthor(repo, ctx, templ, **args):
276 277 """String. The unmodified author of the changeset."""
277 278 return ctx.user()
278 279
279 280 @templatekeyword('bisect')
280 281 def showbisect(repo, ctx, templ, **args):
281 282 """String. The changeset bisection status."""
282 283 return hbisect.label(repo, ctx.node())
283 284
284 285 @templatekeyword('branch')
285 286 def showbranch(**args):
286 287 """String. The name of the branch on which the changeset was
287 288 committed.
288 289 """
289 290 return args[r'ctx'].branch()
290 291
291 292 @templatekeyword('branches')
292 293 def showbranches(**args):
293 294 """List of strings. The name of the branch on which the
294 295 changeset was committed. Will be empty if the branch name was
295 296 default. (DEPRECATED)
296 297 """
297 298 args = pycompat.byteskwargs(args)
298 299 branch = args['ctx'].branch()
299 300 if branch != 'default':
300 301 return showlist('branch', [branch], args, plural='branches')
301 302 return showlist('branch', [], args, plural='branches')
302 303
303 304 @templatekeyword('bookmarks')
304 305 def showbookmarks(**args):
305 306 """List of strings. Any bookmarks associated with the
306 307 changeset. Also sets 'active', the name of the active bookmark.
307 308 """
308 309 args = pycompat.byteskwargs(args)
309 310 repo = args['ctx']._repo
310 311 bookmarks = args['ctx'].bookmarks()
311 312 active = repo._activebookmark
312 313 makemap = lambda v: {'bookmark': v, 'active': active, 'current': active}
313 314 f = _showlist('bookmark', bookmarks, args)
314 315 return _hybrid(f, bookmarks, makemap, lambda x: x['bookmark'])
315 316
316 317 @templatekeyword('children')
317 318 def showchildren(**args):
318 319 """List of strings. The children of the changeset."""
319 320 args = pycompat.byteskwargs(args)
320 321 ctx = args['ctx']
321 322 childrevs = ['%d:%s' % (cctx, cctx) for cctx in ctx.children()]
322 323 return showlist('children', childrevs, args, element='child')
323 324
324 325 # Deprecated, but kept alive for help generation a purpose.
325 326 @templatekeyword('currentbookmark')
326 327 def showcurrentbookmark(**args):
327 328 """String. The active bookmark, if it is
328 329 associated with the changeset (DEPRECATED)"""
329 330 return showactivebookmark(**args)
330 331
331 332 @templatekeyword('activebookmark')
332 333 def showactivebookmark(**args):
333 334 """String. The active bookmark, if it is
334 335 associated with the changeset"""
335 336 active = args[r'repo']._activebookmark
336 337 if active and active in args[r'ctx'].bookmarks():
337 338 return active
338 339 return ''
339 340
340 341 @templatekeyword('date')
341 342 def showdate(repo, ctx, templ, **args):
342 343 """Date information. The date when the changeset was committed."""
343 344 return ctx.date()
344 345
345 346 @templatekeyword('desc')
346 347 def showdescription(repo, ctx, templ, **args):
347 348 """String. The text of the changeset description."""
348 349 s = ctx.description()
349 350 if isinstance(s, encoding.localstr):
350 351 # try hard to preserve utf-8 bytes
351 352 return encoding.tolocal(encoding.fromlocal(s).strip())
352 353 else:
353 354 return s.strip()
354 355
355 356 @templatekeyword('diffstat')
356 357 def showdiffstat(repo, ctx, templ, **args):
357 358 """String. Statistics of changes with the following format:
358 359 "modified files: +added/-removed lines"
359 360 """
360 361 stats = patch.diffstatdata(util.iterlines(ctx.diff(noprefix=False)))
361 362 maxname, maxtotal, adds, removes, binary = patch.diffstatsum(stats)
362 363 return '%s: +%s/-%s' % (len(stats), adds, removes)
363 364
364 365 @templatekeyword('envvars')
365 366 def showenvvars(repo, **args):
366 367 """A dictionary of environment variables. (EXPERIMENTAL)"""
367 368 args = pycompat.byteskwargs(args)
368 369 env = repo.ui.exportableenviron()
369 370 env = util.sortdict((k, env[k]) for k in sorted(env))
370 371 return showdict('envvar', env, args, plural='envvars')
371 372
372 373 @templatekeyword('extras')
373 374 def showextras(**args):
374 375 """List of dicts with key, value entries of the 'extras'
375 376 field of this changeset."""
376 377 args = pycompat.byteskwargs(args)
377 378 extras = args['ctx'].extra()
378 379 extras = util.sortdict((k, extras[k]) for k in sorted(extras))
379 380 makemap = lambda k: {'key': k, 'value': extras[k]}
380 381 c = [makemap(k) for k in extras]
381 382 f = _showlist('extra', c, args, plural='extras')
382 383 return _hybrid(f, extras, makemap,
383 384 lambda x: '%s=%s' % (x['key'], util.escapestr(x['value'])))
384 385
385 386 @templatekeyword('file_adds')
386 387 def showfileadds(**args):
387 388 """List of strings. Files added by this changeset."""
388 389 args = pycompat.byteskwargs(args)
389 390 repo, ctx, revcache = args['repo'], args['ctx'], args['revcache']
390 391 return showlist('file_add', getfiles(repo, ctx, revcache)[1], args,
391 392 element='file')
392 393
393 394 @templatekeyword('file_copies')
394 395 def showfilecopies(**args):
395 396 """List of strings. Files copied in this changeset with
396 397 their sources.
397 398 """
398 399 args = pycompat.byteskwargs(args)
399 400 cache, ctx = args['cache'], args['ctx']
400 401 copies = args['revcache'].get('copies')
401 402 if copies is None:
402 403 if 'getrenamed' not in cache:
403 404 cache['getrenamed'] = getrenamedfn(args['repo'])
404 405 copies = []
405 406 getrenamed = cache['getrenamed']
406 407 for fn in ctx.files():
407 408 rename = getrenamed(fn, ctx.rev())
408 409 if rename:
409 410 copies.append((fn, rename[0]))
410 411
411 412 copies = util.sortdict(copies)
412 413 return showdict('file_copy', copies, args, plural='file_copies',
413 414 key='name', value='source', fmt='%s (%s)')
414 415
415 416 # showfilecopiesswitch() displays file copies only if copy records are
416 417 # provided before calling the templater, usually with a --copies
417 418 # command line switch.
418 419 @templatekeyword('file_copies_switch')
419 420 def showfilecopiesswitch(**args):
420 421 """List of strings. Like "file_copies" but displayed
421 422 only if the --copied switch is set.
422 423 """
423 424 args = pycompat.byteskwargs(args)
424 425 copies = args['revcache'].get('copies') or []
425 426 copies = util.sortdict(copies)
426 427 return showdict('file_copy', copies, args, plural='file_copies',
427 428 key='name', value='source', fmt='%s (%s)')
428 429
429 430 @templatekeyword('file_dels')
430 431 def showfiledels(**args):
431 432 """List of strings. Files removed by this changeset."""
432 433 args = pycompat.byteskwargs(args)
433 434 repo, ctx, revcache = args['repo'], args['ctx'], args['revcache']
434 435 return showlist('file_del', getfiles(repo, ctx, revcache)[2], args,
435 436 element='file')
436 437
437 438 @templatekeyword('file_mods')
438 439 def showfilemods(**args):
439 440 """List of strings. Files modified by this changeset."""
440 441 args = pycompat.byteskwargs(args)
441 442 repo, ctx, revcache = args['repo'], args['ctx'], args['revcache']
442 443 return showlist('file_mod', getfiles(repo, ctx, revcache)[0], args,
443 444 element='file')
444 445
445 446 @templatekeyword('files')
446 447 def showfiles(**args):
447 448 """List of strings. All files modified, added, or removed by this
448 449 changeset.
449 450 """
450 451 args = pycompat.byteskwargs(args)
451 452 return showlist('file', args['ctx'].files(), args)
452 453
453 454 @templatekeyword('graphnode')
454 455 def showgraphnode(repo, ctx, **args):
455 456 """String. The character representing the changeset node in
456 457 an ASCII revision graph"""
457 458 wpnodes = repo.dirstate.parents()
458 459 if wpnodes[1] == nullid:
459 460 wpnodes = wpnodes[:1]
460 461 if ctx.node() in wpnodes:
461 462 return '@'
462 463 elif ctx.obsolete():
463 464 return 'x'
464 465 elif ctx.closesbranch():
465 466 return '_'
466 467 else:
467 468 return 'o'
468 469
469 470 @templatekeyword('index')
470 471 def showindex(**args):
471 472 """Integer. The current iteration of the loop. (0 indexed)"""
472 473 # just hosts documentation; should be overridden by template mapping
473 474 raise error.Abort(_("can't use index in this context"))
474 475
475 476 @templatekeyword('latesttag')
476 477 def showlatesttag(**args):
477 478 """List of strings. The global tags on the most recent globally
478 479 tagged ancestor of this changeset. If no such tags exist, the list
479 480 consists of the single string "null".
480 481 """
481 482 return showlatesttags(None, **args)
482 483
483 484 def showlatesttags(pattern, **args):
484 485 """helper method for the latesttag keyword and function"""
485 486 args = pycompat.byteskwargs(args)
486 487 repo, ctx = args['repo'], args['ctx']
487 488 cache = args['cache']
488 489 latesttags = getlatesttags(repo, ctx, cache, pattern)
489 490
490 491 # latesttag[0] is an implementation detail for sorting csets on different
491 492 # branches in a stable manner- it is the date the tagged cset was created,
492 493 # not the date the tag was created. Therefore it isn't made visible here.
493 494 makemap = lambda v: {
494 495 'changes': _showchangessincetag,
495 496 'distance': latesttags[1],
496 497 'latesttag': v, # BC with {latesttag % '{latesttag}'}
497 498 'tag': v
498 499 }
499 500
500 501 tags = latesttags[2]
501 502 f = _showlist('latesttag', tags, args, separator=':')
502 503 return _hybrid(f, tags, makemap, lambda x: x['latesttag'])
503 504
504 505 @templatekeyword('latesttagdistance')
505 506 def showlatesttagdistance(repo, ctx, templ, cache, **args):
506 507 """Integer. Longest path to the latest tag."""
507 508 return getlatesttags(repo, ctx, cache)[1]
508 509
509 510 @templatekeyword('changessincelatesttag')
510 511 def showchangessincelatesttag(repo, ctx, templ, cache, **args):
511 512 """Integer. All ancestors not in the latest tag."""
512 513 latesttag = getlatesttags(repo, ctx, cache)[2][0]
513 514
514 515 return _showchangessincetag(repo, ctx, tag=latesttag, **args)
515 516
516 517 def _showchangessincetag(repo, ctx, **args):
517 518 offset = 0
518 519 revs = [ctx.rev()]
519 520 tag = args[r'tag']
520 521
521 522 # The only() revset doesn't currently support wdir()
522 523 if ctx.rev() is None:
523 524 offset = 1
524 525 revs = [p.rev() for p in ctx.parents()]
525 526
526 527 return len(repo.revs('only(%ld, %s)', revs, tag)) + offset
527 528
528 529 @templatekeyword('manifest')
529 530 def showmanifest(**args):
530 531 repo, ctx, templ = args[r'repo'], args[r'ctx'], args[r'templ']
531 532 mnode = ctx.manifestnode()
532 533 if mnode is None:
533 534 # just avoid crash, we might want to use the 'ff...' hash in future
534 535 return
535 536 args = args.copy()
536 537 args.update({r'rev': repo.manifestlog._revlog.rev(mnode),
537 538 r'node': hex(mnode)})
538 539 return templ('manifest', **args)
539 540
540 541 def shownames(namespace, **args):
541 542 """helper method to generate a template keyword for a namespace"""
542 543 args = pycompat.byteskwargs(args)
543 544 ctx = args['ctx']
544 545 repo = ctx.repo()
545 546 ns = repo.names[namespace]
546 547 names = ns.names(repo, ctx.node())
547 548 return showlist(ns.templatename, names, args, plural=namespace)
548 549
549 550 @templatekeyword('namespaces')
550 551 def shownamespaces(**args):
551 552 """Dict of lists. Names attached to this changeset per
552 553 namespace."""
553 554 args = pycompat.byteskwargs(args)
554 555 ctx = args['ctx']
555 556 repo = ctx.repo()
556 557 namespaces = util.sortdict((k, showlist('name', ns.names(repo, ctx.node()),
557 558 args))
558 559 for k, ns in repo.names.iteritems())
559 560 f = _showlist('namespace', list(namespaces), args)
560 561 return _hybrid(f, namespaces,
561 562 lambda k: {'namespace': k, 'names': namespaces[k]},
562 563 lambda x: x['namespace'])
563 564
564 565 @templatekeyword('node')
565 566 def shownode(repo, ctx, templ, **args):
566 567 """String. The changeset identification hash, as a 40 hexadecimal
567 568 digit string.
568 569 """
569 570 return ctx.hex()
570 571
571 572 @templatekeyword('obsolete')
572 573 def showobsolete(repo, ctx, templ, **args):
573 574 """String. Whether the changeset is obsolete.
574 575 """
575 576 if ctx.obsolete():
576 577 return 'obsolete'
577 578 return ''
578 579
579 580 @templatekeyword("predecessors")
580 581 def showpredecessors(repo, ctx, **args):
581 582 """Returns the list if the closest visible successors
582 583 """
583 584 predecessors = sorted(obsutil.closestpredecessors(repo, ctx.node()))
584 585 predecessors = map(hex, predecessors)
585 586
586 587 return _hybrid(None, predecessors,
587 588 lambda x: {'ctx': repo[x], 'revcache': {}},
588 589 lambda d: _formatrevnode(d['ctx']))
589 590
590 591 @templatekeyword('p1rev')
591 592 def showp1rev(repo, ctx, templ, **args):
592 593 """Integer. The repository-local revision number of the changeset's
593 594 first parent, or -1 if the changeset has no parents."""
594 595 return ctx.p1().rev()
595 596
596 597 @templatekeyword('p2rev')
597 598 def showp2rev(repo, ctx, templ, **args):
598 599 """Integer. The repository-local revision number of the changeset's
599 600 second parent, or -1 if the changeset has no second parent."""
600 601 return ctx.p2().rev()
601 602
602 603 @templatekeyword('p1node')
603 604 def showp1node(repo, ctx, templ, **args):
604 605 """String. The identification hash of the changeset's first parent,
605 606 as a 40 digit hexadecimal string. If the changeset has no parents, all
606 607 digits are 0."""
607 608 return ctx.p1().hex()
608 609
609 610 @templatekeyword('p2node')
610 611 def showp2node(repo, ctx, templ, **args):
611 612 """String. The identification hash of the changeset's second
612 613 parent, as a 40 digit hexadecimal string. If the changeset has no second
613 614 parent, all digits are 0."""
614 615 return ctx.p2().hex()
615 616
616 617 @templatekeyword('parents')
617 618 def showparents(**args):
618 619 """List of strings. The parents of the changeset in "rev:node"
619 620 format. If the changeset has only one "natural" parent (the predecessor
620 621 revision) nothing is shown."""
621 622 args = pycompat.byteskwargs(args)
622 623 repo = args['repo']
623 624 ctx = args['ctx']
624 625 pctxs = scmutil.meaningfulparents(repo, ctx)
625 626 # ifcontains() needs a list of str
626 627 prevs = ["%d" % p.rev() for p in pctxs]
627 628 parents = [[('rev', p.rev()),
628 629 ('node', p.hex()),
629 630 ('phase', p.phasestr())]
630 631 for p in pctxs]
631 632 f = _showlist('parent', parents, args)
632 633 return _hybrid(f, prevs, lambda x: {'ctx': repo[int(x)], 'revcache': {}},
633 634 lambda d: _formatrevnode(d['ctx']))
634 635
635 636 @templatekeyword('phase')
636 637 def showphase(repo, ctx, templ, **args):
637 638 """String. The changeset phase name."""
638 639 return ctx.phasestr()
639 640
640 641 @templatekeyword('phaseidx')
641 642 def showphaseidx(repo, ctx, templ, **args):
642 643 """Integer. The changeset phase index."""
643 644 return ctx.phase()
644 645
645 646 @templatekeyword('rev')
646 647 def showrev(repo, ctx, templ, **args):
647 648 """Integer. The repository-local changeset revision number."""
648 649 return scmutil.intrev(ctx)
649 650
650 651 def showrevslist(name, revs, **args):
651 652 """helper to generate a list of revisions in which a mapped template will
652 653 be evaluated"""
653 654 args = pycompat.byteskwargs(args)
654 655 repo = args['ctx'].repo()
655 656 # ifcontains() needs a list of str
656 657 revs = ["%d" % r for r in revs]
657 658 f = _showlist(name, revs, args)
658 659 return _hybrid(f, revs,
659 660 lambda x: {name: x, 'ctx': repo[int(x)], 'revcache': {}},
660 661 lambda d: d[name])
661 662
662 663 @templatekeyword('subrepos')
663 664 def showsubrepos(**args):
664 665 """List of strings. Updated subrepositories in the changeset."""
665 666 args = pycompat.byteskwargs(args)
666 667 ctx = args['ctx']
667 668 substate = ctx.substate
668 669 if not substate:
669 670 return showlist('subrepo', [], args)
670 671 psubstate = ctx.parents()[0].substate or {}
671 672 subrepos = []
672 673 for sub in substate:
673 674 if sub not in psubstate or substate[sub] != psubstate[sub]:
674 675 subrepos.append(sub) # modified or newly added in ctx
675 676 for sub in psubstate:
676 677 if sub not in substate:
677 678 subrepos.append(sub) # removed in ctx
678 679 return showlist('subrepo', sorted(subrepos), args)
679 680
680 681 # don't remove "showtags" definition, even though namespaces will put
681 682 # a helper function for "tags" keyword into "keywords" map automatically,
682 683 # because online help text is built without namespaces initialization
683 684 @templatekeyword('tags')
684 685 def showtags(**args):
685 686 """List of strings. Any tags associated with the changeset."""
686 687 return shownames('tags', **args)
687 688
688 689 def loadkeyword(ui, extname, registrarobj):
689 690 """Load template keyword from specified registrarobj
690 691 """
691 692 for name, func in registrarobj._table.iteritems():
692 693 keywords[name] = func
693 694
694 695 @templatekeyword('termwidth')
695 696 def termwidth(repo, ctx, templ, **args):
696 697 """Integer. The width of the current terminal."""
697 698 return repo.ui.termwidth()
698 699
699 700 @templatekeyword('troubles')
700 701 def showtroubles(**args):
701 702 """List of strings. Evolution troubles affecting the changeset.
702 703
703 704 (EXPERIMENTAL)
704 705 """
705 706 args = pycompat.byteskwargs(args)
706 707 return showlist('trouble', args['ctx'].troubles(), args)
707 708
708 709 # tell hggettext to extract docstrings from these functions:
709 710 i18nfunctions = keywords.values()
@@ -1,1380 +1,1380
1 1 # templater.py - template expansion for output
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import os
11 11 import re
12 12 import types
13 13
14 14 from .i18n import _
15 15 from . import (
16 16 color,
17 17 config,
18 18 encoding,
19 19 error,
20 20 minirst,
21 21 parser,
22 22 pycompat,
23 23 registrar,
24 24 revset as revsetmod,
25 25 revsetlang,
26 26 templatefilters,
27 27 templatekw,
28 28 util,
29 29 )
30 30
31 31 # template parsing
32 32
33 33 elements = {
34 34 # token-type: binding-strength, primary, prefix, infix, suffix
35 35 "(": (20, None, ("group", 1, ")"), ("func", 1, ")"), None),
36 36 "%": (16, None, None, ("%", 16), None),
37 37 "|": (15, None, None, ("|", 15), None),
38 38 "*": (5, None, None, ("*", 5), None),
39 39 "/": (5, None, None, ("/", 5), None),
40 40 "+": (4, None, None, ("+", 4), None),
41 41 "-": (4, None, ("negate", 19), ("-", 4), None),
42 42 "=": (3, None, None, ("keyvalue", 3), None),
43 43 ",": (2, None, None, ("list", 2), None),
44 44 ")": (0, None, None, None, None),
45 45 "integer": (0, "integer", None, None, None),
46 46 "symbol": (0, "symbol", None, None, None),
47 47 "string": (0, "string", None, None, None),
48 48 "template": (0, "template", None, None, None),
49 49 "end": (0, None, None, None, None),
50 50 }
51 51
52 52 def tokenize(program, start, end, term=None):
53 53 """Parse a template expression into a stream of tokens, which must end
54 54 with term if specified"""
55 55 pos = start
56 56 program = pycompat.bytestr(program)
57 57 while pos < end:
58 58 c = program[pos]
59 59 if c.isspace(): # skip inter-token whitespace
60 60 pass
61 61 elif c in "(=,)%|+-*/": # handle simple operators
62 62 yield (c, None, pos)
63 63 elif c in '"\'': # handle quoted templates
64 64 s = pos + 1
65 65 data, pos = _parsetemplate(program, s, end, c)
66 66 yield ('template', data, s)
67 67 pos -= 1
68 68 elif c == 'r' and program[pos:pos + 2] in ("r'", 'r"'):
69 69 # handle quoted strings
70 70 c = program[pos + 1]
71 71 s = pos = pos + 2
72 72 while pos < end: # find closing quote
73 73 d = program[pos]
74 74 if d == '\\': # skip over escaped characters
75 75 pos += 2
76 76 continue
77 77 if d == c:
78 78 yield ('string', program[s:pos], s)
79 79 break
80 80 pos += 1
81 81 else:
82 82 raise error.ParseError(_("unterminated string"), s)
83 83 elif c.isdigit():
84 84 s = pos
85 85 while pos < end:
86 86 d = program[pos]
87 87 if not d.isdigit():
88 88 break
89 89 pos += 1
90 90 yield ('integer', program[s:pos], s)
91 91 pos -= 1
92 92 elif (c == '\\' and program[pos:pos + 2] in (r"\'", r'\"')
93 93 or c == 'r' and program[pos:pos + 3] in (r"r\'", r'r\"')):
94 94 # handle escaped quoted strings for compatibility with 2.9.2-3.4,
95 95 # where some of nested templates were preprocessed as strings and
96 96 # then compiled. therefore, \"...\" was allowed. (issue4733)
97 97 #
98 98 # processing flow of _evalifliteral() at 5ab28a2e9962:
99 99 # outer template string -> stringify() -> compiletemplate()
100 100 # ------------------------ ------------ ------------------
101 101 # {f("\\\\ {g(\"\\\"\")}"} \\ {g("\"")} [r'\\', {g("\"")}]
102 102 # ~~~~~~~~
103 103 # escaped quoted string
104 104 if c == 'r':
105 105 pos += 1
106 106 token = 'string'
107 107 else:
108 108 token = 'template'
109 109 quote = program[pos:pos + 2]
110 110 s = pos = pos + 2
111 111 while pos < end: # find closing escaped quote
112 112 if program.startswith('\\\\\\', pos, end):
113 113 pos += 4 # skip over double escaped characters
114 114 continue
115 115 if program.startswith(quote, pos, end):
116 116 # interpret as if it were a part of an outer string
117 117 data = parser.unescapestr(program[s:pos])
118 118 if token == 'template':
119 119 data = _parsetemplate(data, 0, len(data))[0]
120 120 yield (token, data, s)
121 121 pos += 1
122 122 break
123 123 pos += 1
124 124 else:
125 125 raise error.ParseError(_("unterminated string"), s)
126 126 elif c.isalnum() or c in '_':
127 127 s = pos
128 128 pos += 1
129 129 while pos < end: # find end of symbol
130 130 d = program[pos]
131 131 if not (d.isalnum() or d == "_"):
132 132 break
133 133 pos += 1
134 134 sym = program[s:pos]
135 135 yield ('symbol', sym, s)
136 136 pos -= 1
137 137 elif c == term:
138 138 yield ('end', None, pos + 1)
139 139 return
140 140 else:
141 141 raise error.ParseError(_("syntax error"), pos)
142 142 pos += 1
143 143 if term:
144 144 raise error.ParseError(_("unterminated template expansion"), start)
145 145 yield ('end', None, pos)
146 146
147 147 def _parsetemplate(tmpl, start, stop, quote=''):
148 148 r"""
149 149 >>> _parsetemplate('foo{bar}"baz', 0, 12)
150 150 ([('string', 'foo'), ('symbol', 'bar'), ('string', '"baz')], 12)
151 151 >>> _parsetemplate('foo{bar}"baz', 0, 12, quote='"')
152 152 ([('string', 'foo'), ('symbol', 'bar')], 9)
153 153 >>> _parsetemplate('foo"{bar}', 0, 9, quote='"')
154 154 ([('string', 'foo')], 4)
155 155 >>> _parsetemplate(r'foo\"bar"baz', 0, 12, quote='"')
156 156 ([('string', 'foo"'), ('string', 'bar')], 9)
157 157 >>> _parsetemplate(r'foo\\"bar', 0, 10, quote='"')
158 158 ([('string', 'foo\\')], 6)
159 159 """
160 160 parsed = []
161 161 sepchars = '{' + quote
162 162 pos = start
163 163 p = parser.parser(elements)
164 164 while pos < stop:
165 165 n = min((tmpl.find(c, pos, stop) for c in sepchars),
166 166 key=lambda n: (n < 0, n))
167 167 if n < 0:
168 168 parsed.append(('string', parser.unescapestr(tmpl[pos:stop])))
169 169 pos = stop
170 170 break
171 171 c = tmpl[n]
172 172 bs = (n - pos) - len(tmpl[pos:n].rstrip('\\'))
173 173 if bs % 2 == 1:
174 174 # escaped (e.g. '\{', '\\\{', but not '\\{')
175 175 parsed.append(('string', parser.unescapestr(tmpl[pos:n - 1]) + c))
176 176 pos = n + 1
177 177 continue
178 178 if n > pos:
179 179 parsed.append(('string', parser.unescapestr(tmpl[pos:n])))
180 180 if c == quote:
181 181 return parsed, n + 1
182 182
183 183 parseres, pos = p.parse(tokenize(tmpl, n + 1, stop, '}'))
184 184 parsed.append(parseres)
185 185
186 186 if quote:
187 187 raise error.ParseError(_("unterminated string"), start)
188 188 return parsed, pos
189 189
190 190 def _unnesttemplatelist(tree):
191 191 """Expand list of templates to node tuple
192 192
193 193 >>> def f(tree):
194 194 ... print prettyformat(_unnesttemplatelist(tree))
195 195 >>> f(('template', []))
196 196 ('string', '')
197 197 >>> f(('template', [('string', 'foo')]))
198 198 ('string', 'foo')
199 199 >>> f(('template', [('string', 'foo'), ('symbol', 'rev')]))
200 200 (template
201 201 ('string', 'foo')
202 202 ('symbol', 'rev'))
203 203 >>> f(('template', [('symbol', 'rev')])) # template(rev) -> str
204 204 (template
205 205 ('symbol', 'rev'))
206 206 >>> f(('template', [('template', [('string', 'foo')])]))
207 207 ('string', 'foo')
208 208 """
209 209 if not isinstance(tree, tuple):
210 210 return tree
211 211 op = tree[0]
212 212 if op != 'template':
213 213 return (op,) + tuple(_unnesttemplatelist(x) for x in tree[1:])
214 214
215 215 assert len(tree) == 2
216 216 xs = tuple(_unnesttemplatelist(x) for x in tree[1])
217 217 if not xs:
218 218 return ('string', '') # empty template ""
219 219 elif len(xs) == 1 and xs[0][0] == 'string':
220 220 return xs[0] # fast path for string with no template fragment "x"
221 221 else:
222 222 return (op,) + xs
223 223
224 224 def parse(tmpl):
225 225 """Parse template string into tree"""
226 226 parsed, pos = _parsetemplate(tmpl, 0, len(tmpl))
227 227 assert pos == len(tmpl), 'unquoted template should be consumed'
228 228 return _unnesttemplatelist(('template', parsed))
229 229
230 230 def _parseexpr(expr):
231 231 """Parse a template expression into tree
232 232
233 233 >>> _parseexpr('"foo"')
234 234 ('string', 'foo')
235 235 >>> _parseexpr('foo(bar)')
236 236 ('func', ('symbol', 'foo'), ('symbol', 'bar'))
237 237 >>> _parseexpr('foo(')
238 238 Traceback (most recent call last):
239 239 ...
240 240 ParseError: ('not a prefix: end', 4)
241 241 >>> _parseexpr('"foo" "bar"')
242 242 Traceback (most recent call last):
243 243 ...
244 244 ParseError: ('invalid token', 7)
245 245 """
246 246 p = parser.parser(elements)
247 247 tree, pos = p.parse(tokenize(expr, 0, len(expr)))
248 248 if pos != len(expr):
249 249 raise error.ParseError(_('invalid token'), pos)
250 250 return _unnesttemplatelist(tree)
251 251
252 252 def prettyformat(tree):
253 253 return parser.prettyformat(tree, ('integer', 'string', 'symbol'))
254 254
255 255 def compileexp(exp, context, curmethods):
256 256 """Compile parsed template tree to (func, data) pair"""
257 257 t = exp[0]
258 258 if t in curmethods:
259 259 return curmethods[t](exp, context)
260 260 raise error.ParseError(_("unknown method '%s'") % t)
261 261
262 262 # template evaluation
263 263
264 264 def getsymbol(exp):
265 265 if exp[0] == 'symbol':
266 266 return exp[1]
267 267 raise error.ParseError(_("expected a symbol, got '%s'") % exp[0])
268 268
269 269 def getlist(x):
270 270 if not x:
271 271 return []
272 272 if x[0] == 'list':
273 273 return getlist(x[1]) + [x[2]]
274 274 return [x]
275 275
276 276 def gettemplate(exp, context):
277 277 """Compile given template tree or load named template from map file;
278 278 returns (func, data) pair"""
279 279 if exp[0] in ('template', 'string'):
280 280 return compileexp(exp, context, methods)
281 281 if exp[0] == 'symbol':
282 282 # unlike runsymbol(), here 'symbol' is always taken as template name
283 283 # even if it exists in mapping. this allows us to override mapping
284 284 # by web templates, e.g. 'changelogtag' is redefined in map file.
285 285 return context._load(exp[1])
286 286 raise error.ParseError(_("expected template specifier"))
287 287
288 288 def findsymbolicname(arg):
289 289 """Find symbolic name for the given compiled expression; returns None
290 290 if nothing found reliably"""
291 291 while True:
292 292 func, data = arg
293 293 if func is runsymbol:
294 294 return data
295 295 elif func is runfilter:
296 296 arg = data[0]
297 297 else:
298 298 return None
299 299
300 300 def evalfuncarg(context, mapping, arg):
301 301 func, data = arg
302 302 # func() may return string, generator of strings or arbitrary object such
303 303 # as date tuple, but filter does not want generator.
304 304 thing = func(context, mapping, data)
305 305 if isinstance(thing, types.GeneratorType):
306 306 thing = stringify(thing)
307 307 return thing
308 308
309 309 def evalboolean(context, mapping, arg):
310 310 """Evaluate given argument as boolean, but also takes boolean literals"""
311 311 func, data = arg
312 312 if func is runsymbol:
313 313 thing = func(context, mapping, data, default=None)
314 314 if thing is None:
315 315 # not a template keyword, takes as a boolean literal
316 316 thing = util.parsebool(data)
317 317 else:
318 318 thing = func(context, mapping, data)
319 319 if isinstance(thing, bool):
320 320 return thing
321 321 # other objects are evaluated as strings, which means 0 is True, but
322 322 # empty dict/list should be False as they are expected to be ''
323 323 return bool(stringify(thing))
324 324
325 325 def evalinteger(context, mapping, arg, err):
326 326 v = evalfuncarg(context, mapping, arg)
327 327 try:
328 328 return int(v)
329 329 except (TypeError, ValueError):
330 330 raise error.ParseError(err)
331 331
332 332 def evalstring(context, mapping, arg):
333 333 func, data = arg
334 334 return stringify(func(context, mapping, data))
335 335
336 336 def evalstringliteral(context, mapping, arg):
337 337 """Evaluate given argument as string template, but returns symbol name
338 338 if it is unknown"""
339 339 func, data = arg
340 340 if func is runsymbol:
341 341 thing = func(context, mapping, data, default=data)
342 342 else:
343 343 thing = func(context, mapping, data)
344 344 return stringify(thing)
345 345
346 346 def runinteger(context, mapping, data):
347 347 return int(data)
348 348
349 349 def runstring(context, mapping, data):
350 350 return data
351 351
352 352 def _recursivesymbolblocker(key):
353 353 def showrecursion(**args):
354 354 raise error.Abort(_("recursive reference '%s' in template") % key)
355 355 return showrecursion
356 356
357 357 def _runrecursivesymbol(context, mapping, key):
358 358 raise error.Abort(_("recursive reference '%s' in template") % key)
359 359
360 360 def runsymbol(context, mapping, key, default=''):
361 361 v = mapping.get(key)
362 362 if v is None:
363 363 v = context._defaults.get(key)
364 364 if v is None:
365 365 # put poison to cut recursion. we can't move this to parsing phase
366 366 # because "x = {x}" is allowed if "x" is a keyword. (issue4758)
367 367 safemapping = mapping.copy()
368 368 safemapping[key] = _recursivesymbolblocker(key)
369 369 try:
370 370 v = context.process(key, safemapping)
371 371 except TemplateNotFound:
372 372 v = default
373 373 if callable(v):
374 return v(**mapping)
374 return v(**pycompat.strkwargs(mapping))
375 375 return v
376 376
377 377 def buildtemplate(exp, context):
378 378 ctmpl = [compileexp(e, context, methods) for e in exp[1:]]
379 379 return (runtemplate, ctmpl)
380 380
381 381 def runtemplate(context, mapping, template):
382 382 for func, data in template:
383 383 yield func(context, mapping, data)
384 384
385 385 def buildfilter(exp, context):
386 386 n = getsymbol(exp[2])
387 387 if n in context._filters:
388 388 filt = context._filters[n]
389 389 arg = compileexp(exp[1], context, methods)
390 390 return (runfilter, (arg, filt))
391 391 if n in funcs:
392 392 f = funcs[n]
393 393 args = _buildfuncargs(exp[1], context, methods, n, f._argspec)
394 394 return (f, args)
395 395 raise error.ParseError(_("unknown function '%s'") % n)
396 396
397 397 def runfilter(context, mapping, data):
398 398 arg, filt = data
399 399 thing = evalfuncarg(context, mapping, arg)
400 400 try:
401 401 return filt(thing)
402 402 except (ValueError, AttributeError, TypeError):
403 403 sym = findsymbolicname(arg)
404 404 if sym:
405 405 msg = (_("template filter '%s' is not compatible with keyword '%s'")
406 406 % (filt.func_name, sym))
407 407 else:
408 408 msg = _("incompatible use of template filter '%s'") % filt.func_name
409 409 raise error.Abort(msg)
410 410
411 411 def buildmap(exp, context):
412 412 func, data = compileexp(exp[1], context, methods)
413 413 tfunc, tdata = gettemplate(exp[2], context)
414 414 return (runmap, (func, data, tfunc, tdata))
415 415
416 416 def runmap(context, mapping, data):
417 417 func, data, tfunc, tdata = data
418 418 d = func(context, mapping, data)
419 419 if util.safehasattr(d, 'itermaps'):
420 420 diter = d.itermaps()
421 421 else:
422 422 try:
423 423 diter = iter(d)
424 424 except TypeError:
425 425 if func is runsymbol:
426 426 raise error.ParseError(_("keyword '%s' is not iterable") % data)
427 427 else:
428 428 raise error.ParseError(_("%r is not iterable") % d)
429 429
430 430 for i, v in enumerate(diter):
431 431 lm = mapping.copy()
432 432 lm['index'] = i
433 433 if isinstance(v, dict):
434 434 lm.update(v)
435 435 lm['originalnode'] = mapping.get('node')
436 436 yield tfunc(context, lm, tdata)
437 437 else:
438 438 # v is not an iterable of dicts, this happen when 'key'
439 439 # has been fully expanded already and format is useless.
440 440 # If so, return the expanded value.
441 441 yield v
442 442
443 443 def buildnegate(exp, context):
444 444 arg = compileexp(exp[1], context, exprmethods)
445 445 return (runnegate, arg)
446 446
447 447 def runnegate(context, mapping, data):
448 448 data = evalinteger(context, mapping, data,
449 449 _('negation needs an integer argument'))
450 450 return -data
451 451
452 452 def buildarithmetic(exp, context, func):
453 453 left = compileexp(exp[1], context, exprmethods)
454 454 right = compileexp(exp[2], context, exprmethods)
455 455 return (runarithmetic, (func, left, right))
456 456
457 457 def runarithmetic(context, mapping, data):
458 458 func, left, right = data
459 459 left = evalinteger(context, mapping, left,
460 460 _('arithmetic only defined on integers'))
461 461 right = evalinteger(context, mapping, right,
462 462 _('arithmetic only defined on integers'))
463 463 try:
464 464 return func(left, right)
465 465 except ZeroDivisionError:
466 466 raise error.Abort(_('division by zero is not defined'))
467 467
468 468 def buildfunc(exp, context):
469 469 n = getsymbol(exp[1])
470 470 if n in funcs:
471 471 f = funcs[n]
472 472 args = _buildfuncargs(exp[2], context, exprmethods, n, f._argspec)
473 473 return (f, args)
474 474 if n in context._filters:
475 475 args = _buildfuncargs(exp[2], context, exprmethods, n, argspec=None)
476 476 if len(args) != 1:
477 477 raise error.ParseError(_("filter %s expects one argument") % n)
478 478 f = context._filters[n]
479 479 return (runfilter, (args[0], f))
480 480 raise error.ParseError(_("unknown function '%s'") % n)
481 481
482 482 def _buildfuncargs(exp, context, curmethods, funcname, argspec):
483 483 """Compile parsed tree of function arguments into list or dict of
484 484 (func, data) pairs
485 485
486 486 >>> context = engine(lambda t: (runsymbol, t))
487 487 >>> def fargs(expr, argspec):
488 488 ... x = _parseexpr(expr)
489 489 ... n = getsymbol(x[1])
490 490 ... return _buildfuncargs(x[2], context, exprmethods, n, argspec)
491 491 >>> fargs('a(l=1, k=2)', 'k l m').keys()
492 492 ['l', 'k']
493 493 >>> args = fargs('a(opts=1, k=2)', '**opts')
494 494 >>> args.keys(), args['opts'].keys()
495 495 (['opts'], ['opts', 'k'])
496 496 """
497 497 def compiledict(xs):
498 498 return util.sortdict((k, compileexp(x, context, curmethods))
499 499 for k, x in xs.iteritems())
500 500 def compilelist(xs):
501 501 return [compileexp(x, context, curmethods) for x in xs]
502 502
503 503 if not argspec:
504 504 # filter or function with no argspec: return list of positional args
505 505 return compilelist(getlist(exp))
506 506
507 507 # function with argspec: return dict of named args
508 508 _poskeys, varkey, _keys, optkey = argspec = parser.splitargspec(argspec)
509 509 treeargs = parser.buildargsdict(getlist(exp), funcname, argspec,
510 510 keyvaluenode='keyvalue', keynode='symbol')
511 511 compargs = util.sortdict()
512 512 if varkey:
513 513 compargs[varkey] = compilelist(treeargs.pop(varkey))
514 514 if optkey:
515 515 compargs[optkey] = compiledict(treeargs.pop(optkey))
516 516 compargs.update(compiledict(treeargs))
517 517 return compargs
518 518
519 519 def buildkeyvaluepair(exp, content):
520 520 raise error.ParseError(_("can't use a key-value pair in this context"))
521 521
522 522 # dict of template built-in functions
523 523 funcs = {}
524 524
525 525 templatefunc = registrar.templatefunc(funcs)
526 526
527 527 @templatefunc('date(date[, fmt])')
528 528 def date(context, mapping, args):
529 529 """Format a date. See :hg:`help dates` for formatting
530 530 strings. The default is a Unix date format, including the timezone:
531 531 "Mon Sep 04 15:13:13 2006 0700"."""
532 532 if not (1 <= len(args) <= 2):
533 533 # i18n: "date" is a keyword
534 534 raise error.ParseError(_("date expects one or two arguments"))
535 535
536 536 date = evalfuncarg(context, mapping, args[0])
537 537 fmt = None
538 538 if len(args) == 2:
539 539 fmt = evalstring(context, mapping, args[1])
540 540 try:
541 541 if fmt is None:
542 542 return util.datestr(date)
543 543 else:
544 544 return util.datestr(date, fmt)
545 545 except (TypeError, ValueError):
546 546 # i18n: "date" is a keyword
547 547 raise error.ParseError(_("date expects a date information"))
548 548
549 549 @templatefunc('dict([[key=]value...])', argspec='*args **kwargs')
550 550 def dict_(context, mapping, args):
551 551 """Construct a dict from key-value pairs. A key may be omitted if
552 552 a value expression can provide an unambiguous name."""
553 553 data = util.sortdict()
554 554
555 555 for v in args['args']:
556 556 k = findsymbolicname(v)
557 557 if not k:
558 558 raise error.ParseError(_('dict key cannot be inferred'))
559 559 if k in data or k in args['kwargs']:
560 560 raise error.ParseError(_("duplicated dict key '%s' inferred") % k)
561 561 data[k] = evalfuncarg(context, mapping, v)
562 562
563 563 data.update((k, evalfuncarg(context, mapping, v))
564 564 for k, v in args['kwargs'].iteritems())
565 565 return templatekw.hybriddict(data)
566 566
567 567 @templatefunc('diff([includepattern [, excludepattern]])')
568 568 def diff(context, mapping, args):
569 569 """Show a diff, optionally
570 570 specifying files to include or exclude."""
571 571 if len(args) > 2:
572 572 # i18n: "diff" is a keyword
573 573 raise error.ParseError(_("diff expects zero, one, or two arguments"))
574 574
575 575 def getpatterns(i):
576 576 if i < len(args):
577 577 s = evalstring(context, mapping, args[i]).strip()
578 578 if s:
579 579 return [s]
580 580 return []
581 581
582 582 ctx = mapping['ctx']
583 583 chunks = ctx.diff(match=ctx.match([], getpatterns(0), getpatterns(1)))
584 584
585 585 return ''.join(chunks)
586 586
587 587 @templatefunc('files(pattern)')
588 588 def files(context, mapping, args):
589 589 """All files of the current changeset matching the pattern. See
590 590 :hg:`help patterns`."""
591 591 if not len(args) == 1:
592 592 # i18n: "files" is a keyword
593 593 raise error.ParseError(_("files expects one argument"))
594 594
595 595 raw = evalstring(context, mapping, args[0])
596 596 ctx = mapping['ctx']
597 597 m = ctx.match([raw])
598 598 files = list(ctx.matches(m))
599 599 return templatekw.showlist("file", files, mapping)
600 600
601 601 @templatefunc('fill(text[, width[, initialident[, hangindent]]])')
602 602 def fill(context, mapping, args):
603 603 """Fill many
604 604 paragraphs with optional indentation. See the "fill" filter."""
605 605 if not (1 <= len(args) <= 4):
606 606 # i18n: "fill" is a keyword
607 607 raise error.ParseError(_("fill expects one to four arguments"))
608 608
609 609 text = evalstring(context, mapping, args[0])
610 610 width = 76
611 611 initindent = ''
612 612 hangindent = ''
613 613 if 2 <= len(args) <= 4:
614 614 width = evalinteger(context, mapping, args[1],
615 615 # i18n: "fill" is a keyword
616 616 _("fill expects an integer width"))
617 617 try:
618 618 initindent = evalstring(context, mapping, args[2])
619 619 hangindent = evalstring(context, mapping, args[3])
620 620 except IndexError:
621 621 pass
622 622
623 623 return templatefilters.fill(text, width, initindent, hangindent)
624 624
625 625 @templatefunc('formatnode(node)')
626 626 def formatnode(context, mapping, args):
627 627 """Obtain the preferred form of a changeset hash. (DEPRECATED)"""
628 628 if len(args) != 1:
629 629 # i18n: "formatnode" is a keyword
630 630 raise error.ParseError(_("formatnode expects one argument"))
631 631
632 632 ui = mapping['ui']
633 633 node = evalstring(context, mapping, args[0])
634 634 if ui.debugflag:
635 635 return node
636 636 return templatefilters.short(node)
637 637
638 638 @templatefunc('pad(text, width[, fillchar=\' \'[, left=False]])',
639 639 argspec='text width fillchar left')
640 640 def pad(context, mapping, args):
641 641 """Pad text with a
642 642 fill character."""
643 643 if 'text' not in args or 'width' not in args:
644 644 # i18n: "pad" is a keyword
645 645 raise error.ParseError(_("pad() expects two to four arguments"))
646 646
647 647 width = evalinteger(context, mapping, args['width'],
648 648 # i18n: "pad" is a keyword
649 649 _("pad() expects an integer width"))
650 650
651 651 text = evalstring(context, mapping, args['text'])
652 652
653 653 left = False
654 654 fillchar = ' '
655 655 if 'fillchar' in args:
656 656 fillchar = evalstring(context, mapping, args['fillchar'])
657 657 if len(color.stripeffects(fillchar)) != 1:
658 658 # i18n: "pad" is a keyword
659 659 raise error.ParseError(_("pad() expects a single fill character"))
660 660 if 'left' in args:
661 661 left = evalboolean(context, mapping, args['left'])
662 662
663 663 fillwidth = width - encoding.colwidth(color.stripeffects(text))
664 664 if fillwidth <= 0:
665 665 return text
666 666 if left:
667 667 return fillchar * fillwidth + text
668 668 else:
669 669 return text + fillchar * fillwidth
670 670
671 671 @templatefunc('indent(text, indentchars[, firstline])')
672 672 def indent(context, mapping, args):
673 673 """Indents all non-empty lines
674 674 with the characters given in the indentchars string. An optional
675 675 third parameter will override the indent for the first line only
676 676 if present."""
677 677 if not (2 <= len(args) <= 3):
678 678 # i18n: "indent" is a keyword
679 679 raise error.ParseError(_("indent() expects two or three arguments"))
680 680
681 681 text = evalstring(context, mapping, args[0])
682 682 indent = evalstring(context, mapping, args[1])
683 683
684 684 if len(args) == 3:
685 685 firstline = evalstring(context, mapping, args[2])
686 686 else:
687 687 firstline = indent
688 688
689 689 # the indent function doesn't indent the first line, so we do it here
690 690 return templatefilters.indent(firstline + text, indent)
691 691
692 692 @templatefunc('get(dict, key)')
693 693 def get(context, mapping, args):
694 694 """Get an attribute/key from an object. Some keywords
695 695 are complex types. This function allows you to obtain the value of an
696 696 attribute on these types."""
697 697 if len(args) != 2:
698 698 # i18n: "get" is a keyword
699 699 raise error.ParseError(_("get() expects two arguments"))
700 700
701 701 dictarg = evalfuncarg(context, mapping, args[0])
702 702 if not util.safehasattr(dictarg, 'get'):
703 703 # i18n: "get" is a keyword
704 704 raise error.ParseError(_("get() expects a dict as first argument"))
705 705
706 706 key = evalfuncarg(context, mapping, args[1])
707 707 return dictarg.get(key)
708 708
709 709 @templatefunc('if(expr, then[, else])')
710 710 def if_(context, mapping, args):
711 711 """Conditionally execute based on the result of
712 712 an expression."""
713 713 if not (2 <= len(args) <= 3):
714 714 # i18n: "if" is a keyword
715 715 raise error.ParseError(_("if expects two or three arguments"))
716 716
717 717 test = evalboolean(context, mapping, args[0])
718 718 if test:
719 719 yield args[1][0](context, mapping, args[1][1])
720 720 elif len(args) == 3:
721 721 yield args[2][0](context, mapping, args[2][1])
722 722
723 723 @templatefunc('ifcontains(needle, haystack, then[, else])')
724 724 def ifcontains(context, mapping, args):
725 725 """Conditionally execute based
726 726 on whether the item "needle" is in "haystack"."""
727 727 if not (3 <= len(args) <= 4):
728 728 # i18n: "ifcontains" is a keyword
729 729 raise error.ParseError(_("ifcontains expects three or four arguments"))
730 730
731 731 needle = evalstring(context, mapping, args[0])
732 732 haystack = evalfuncarg(context, mapping, args[1])
733 733
734 734 if needle in haystack:
735 735 yield args[2][0](context, mapping, args[2][1])
736 736 elif len(args) == 4:
737 737 yield args[3][0](context, mapping, args[3][1])
738 738
739 739 @templatefunc('ifeq(expr1, expr2, then[, else])')
740 740 def ifeq(context, mapping, args):
741 741 """Conditionally execute based on
742 742 whether 2 items are equivalent."""
743 743 if not (3 <= len(args) <= 4):
744 744 # i18n: "ifeq" is a keyword
745 745 raise error.ParseError(_("ifeq expects three or four arguments"))
746 746
747 747 test = evalstring(context, mapping, args[0])
748 748 match = evalstring(context, mapping, args[1])
749 749 if test == match:
750 750 yield args[2][0](context, mapping, args[2][1])
751 751 elif len(args) == 4:
752 752 yield args[3][0](context, mapping, args[3][1])
753 753
754 754 @templatefunc('join(list, sep)')
755 755 def join(context, mapping, args):
756 756 """Join items in a list with a delimiter."""
757 757 if not (1 <= len(args) <= 2):
758 758 # i18n: "join" is a keyword
759 759 raise error.ParseError(_("join expects one or two arguments"))
760 760
761 761 joinset = args[0][0](context, mapping, args[0][1])
762 762 if util.safehasattr(joinset, 'itermaps'):
763 763 jf = joinset.joinfmt
764 764 joinset = [jf(x) for x in joinset.itermaps()]
765 765
766 766 joiner = " "
767 767 if len(args) > 1:
768 768 joiner = evalstring(context, mapping, args[1])
769 769
770 770 first = True
771 771 for x in joinset:
772 772 if first:
773 773 first = False
774 774 else:
775 775 yield joiner
776 776 yield x
777 777
778 778 @templatefunc('label(label, expr)')
779 779 def label(context, mapping, args):
780 780 """Apply a label to generated content. Content with
781 781 a label applied can result in additional post-processing, such as
782 782 automatic colorization."""
783 783 if len(args) != 2:
784 784 # i18n: "label" is a keyword
785 785 raise error.ParseError(_("label expects two arguments"))
786 786
787 787 ui = mapping['ui']
788 788 thing = evalstring(context, mapping, args[1])
789 789 # preserve unknown symbol as literal so effects like 'red', 'bold',
790 790 # etc. don't need to be quoted
791 791 label = evalstringliteral(context, mapping, args[0])
792 792
793 793 return ui.label(thing, label)
794 794
795 795 @templatefunc('latesttag([pattern])')
796 796 def latesttag(context, mapping, args):
797 797 """The global tags matching the given pattern on the
798 798 most recent globally tagged ancestor of this changeset.
799 799 If no such tags exist, the "{tag}" template resolves to
800 800 the string "null"."""
801 801 if len(args) > 1:
802 802 # i18n: "latesttag" is a keyword
803 803 raise error.ParseError(_("latesttag expects at most one argument"))
804 804
805 805 pattern = None
806 806 if len(args) == 1:
807 807 pattern = evalstring(context, mapping, args[0])
808 808
809 809 return templatekw.showlatesttags(pattern, **mapping)
810 810
811 811 @templatefunc('localdate(date[, tz])')
812 812 def localdate(context, mapping, args):
813 813 """Converts a date to the specified timezone.
814 814 The default is local date."""
815 815 if not (1 <= len(args) <= 2):
816 816 # i18n: "localdate" is a keyword
817 817 raise error.ParseError(_("localdate expects one or two arguments"))
818 818
819 819 date = evalfuncarg(context, mapping, args[0])
820 820 try:
821 821 date = util.parsedate(date)
822 822 except AttributeError: # not str nor date tuple
823 823 # i18n: "localdate" is a keyword
824 824 raise error.ParseError(_("localdate expects a date information"))
825 825 if len(args) >= 2:
826 826 tzoffset = None
827 827 tz = evalfuncarg(context, mapping, args[1])
828 828 if isinstance(tz, str):
829 829 tzoffset, remainder = util.parsetimezone(tz)
830 830 if remainder:
831 831 tzoffset = None
832 832 if tzoffset is None:
833 833 try:
834 834 tzoffset = int(tz)
835 835 except (TypeError, ValueError):
836 836 # i18n: "localdate" is a keyword
837 837 raise error.ParseError(_("localdate expects a timezone"))
838 838 else:
839 839 tzoffset = util.makedate()[1]
840 840 return (date[0], tzoffset)
841 841
842 842 @templatefunc('mod(a, b)')
843 843 def mod(context, mapping, args):
844 844 """Calculate a mod b such that a / b + a mod b == a"""
845 845 if not len(args) == 2:
846 846 # i18n: "mod" is a keyword
847 847 raise error.ParseError(_("mod expects two arguments"))
848 848
849 849 func = lambda a, b: a % b
850 850 return runarithmetic(context, mapping, (func, args[0], args[1]))
851 851
852 852 @templatefunc('relpath(path)')
853 853 def relpath(context, mapping, args):
854 854 """Convert a repository-absolute path into a filesystem path relative to
855 855 the current working directory."""
856 856 if len(args) != 1:
857 857 # i18n: "relpath" is a keyword
858 858 raise error.ParseError(_("relpath expects one argument"))
859 859
860 860 repo = mapping['ctx'].repo()
861 861 path = evalstring(context, mapping, args[0])
862 862 return repo.pathto(path)
863 863
864 864 @templatefunc('revset(query[, formatargs...])')
865 865 def revset(context, mapping, args):
866 866 """Execute a revision set query. See
867 867 :hg:`help revset`."""
868 868 if not len(args) > 0:
869 869 # i18n: "revset" is a keyword
870 870 raise error.ParseError(_("revset expects one or more arguments"))
871 871
872 872 raw = evalstring(context, mapping, args[0])
873 873 ctx = mapping['ctx']
874 874 repo = ctx.repo()
875 875
876 876 def query(expr):
877 877 m = revsetmod.match(repo.ui, expr)
878 878 return m(repo)
879 879
880 880 if len(args) > 1:
881 881 formatargs = [evalfuncarg(context, mapping, a) for a in args[1:]]
882 882 revs = query(revsetlang.formatspec(raw, *formatargs))
883 883 revs = list(revs)
884 884 else:
885 885 revsetcache = mapping['cache'].setdefault("revsetcache", {})
886 886 if raw in revsetcache:
887 887 revs = revsetcache[raw]
888 888 else:
889 889 revs = query(raw)
890 890 revs = list(revs)
891 891 revsetcache[raw] = revs
892 892
893 893 return templatekw.showrevslist("revision", revs, **mapping)
894 894
895 895 @templatefunc('rstdoc(text, style)')
896 896 def rstdoc(context, mapping, args):
897 897 """Format reStructuredText."""
898 898 if len(args) != 2:
899 899 # i18n: "rstdoc" is a keyword
900 900 raise error.ParseError(_("rstdoc expects two arguments"))
901 901
902 902 text = evalstring(context, mapping, args[0])
903 903 style = evalstring(context, mapping, args[1])
904 904
905 905 return minirst.format(text, style=style, keep=['verbose'])
906 906
907 907 @templatefunc('separate(sep, args)', argspec='sep *args')
908 908 def separate(context, mapping, args):
909 909 """Add a separator between non-empty arguments."""
910 910 if 'sep' not in args:
911 911 # i18n: "separate" is a keyword
912 912 raise error.ParseError(_("separate expects at least one argument"))
913 913
914 914 sep = evalstring(context, mapping, args['sep'])
915 915 first = True
916 916 for arg in args['args']:
917 917 argstr = evalstring(context, mapping, arg)
918 918 if not argstr:
919 919 continue
920 920 if first:
921 921 first = False
922 922 else:
923 923 yield sep
924 924 yield argstr
925 925
926 926 @templatefunc('shortest(node, minlength=4)')
927 927 def shortest(context, mapping, args):
928 928 """Obtain the shortest representation of
929 929 a node."""
930 930 if not (1 <= len(args) <= 2):
931 931 # i18n: "shortest" is a keyword
932 932 raise error.ParseError(_("shortest() expects one or two arguments"))
933 933
934 934 node = evalstring(context, mapping, args[0])
935 935
936 936 minlength = 4
937 937 if len(args) > 1:
938 938 minlength = evalinteger(context, mapping, args[1],
939 939 # i18n: "shortest" is a keyword
940 940 _("shortest() expects an integer minlength"))
941 941
942 942 # _partialmatch() of filtered changelog could take O(len(repo)) time,
943 943 # which would be unacceptably slow. so we look for hash collision in
944 944 # unfiltered space, which means some hashes may be slightly longer.
945 945 cl = mapping['ctx']._repo.unfiltered().changelog
946 946 def isvalid(test):
947 947 try:
948 948 if cl._partialmatch(test) is None:
949 949 return False
950 950
951 951 try:
952 952 i = int(test)
953 953 # if we are a pure int, then starting with zero will not be
954 954 # confused as a rev; or, obviously, if the int is larger than
955 955 # the value of the tip rev
956 956 if test[0] == '0' or i > len(cl):
957 957 return True
958 958 return False
959 959 except ValueError:
960 960 return True
961 961 except error.RevlogError:
962 962 return False
963 963 except error.WdirUnsupported:
964 964 # single 'ff...' match
965 965 return True
966 966
967 967 shortest = node
968 968 startlength = max(6, minlength)
969 969 length = startlength
970 970 while True:
971 971 test = node[:length]
972 972 if isvalid(test):
973 973 shortest = test
974 974 if length == minlength or length > startlength:
975 975 return shortest
976 976 length -= 1
977 977 else:
978 978 length += 1
979 979 if len(shortest) <= length:
980 980 return shortest
981 981
982 982 @templatefunc('strip(text[, chars])')
983 983 def strip(context, mapping, args):
984 984 """Strip characters from a string. By default,
985 985 strips all leading and trailing whitespace."""
986 986 if not (1 <= len(args) <= 2):
987 987 # i18n: "strip" is a keyword
988 988 raise error.ParseError(_("strip expects one or two arguments"))
989 989
990 990 text = evalstring(context, mapping, args[0])
991 991 if len(args) == 2:
992 992 chars = evalstring(context, mapping, args[1])
993 993 return text.strip(chars)
994 994 return text.strip()
995 995
996 996 @templatefunc('sub(pattern, replacement, expression)')
997 997 def sub(context, mapping, args):
998 998 """Perform text substitution
999 999 using regular expressions."""
1000 1000 if len(args) != 3:
1001 1001 # i18n: "sub" is a keyword
1002 1002 raise error.ParseError(_("sub expects three arguments"))
1003 1003
1004 1004 pat = evalstring(context, mapping, args[0])
1005 1005 rpl = evalstring(context, mapping, args[1])
1006 1006 src = evalstring(context, mapping, args[2])
1007 1007 try:
1008 1008 patre = re.compile(pat)
1009 1009 except re.error:
1010 1010 # i18n: "sub" is a keyword
1011 1011 raise error.ParseError(_("sub got an invalid pattern: %s") % pat)
1012 1012 try:
1013 1013 yield patre.sub(rpl, src)
1014 1014 except re.error:
1015 1015 # i18n: "sub" is a keyword
1016 1016 raise error.ParseError(_("sub got an invalid replacement: %s") % rpl)
1017 1017
1018 1018 @templatefunc('startswith(pattern, text)')
1019 1019 def startswith(context, mapping, args):
1020 1020 """Returns the value from the "text" argument
1021 1021 if it begins with the content from the "pattern" argument."""
1022 1022 if len(args) != 2:
1023 1023 # i18n: "startswith" is a keyword
1024 1024 raise error.ParseError(_("startswith expects two arguments"))
1025 1025
1026 1026 patn = evalstring(context, mapping, args[0])
1027 1027 text = evalstring(context, mapping, args[1])
1028 1028 if text.startswith(patn):
1029 1029 return text
1030 1030 return ''
1031 1031
1032 1032 @templatefunc('word(number, text[, separator])')
1033 1033 def word(context, mapping, args):
1034 1034 """Return the nth word from a string."""
1035 1035 if not (2 <= len(args) <= 3):
1036 1036 # i18n: "word" is a keyword
1037 1037 raise error.ParseError(_("word expects two or three arguments, got %d")
1038 1038 % len(args))
1039 1039
1040 1040 num = evalinteger(context, mapping, args[0],
1041 1041 # i18n: "word" is a keyword
1042 1042 _("word expects an integer index"))
1043 1043 text = evalstring(context, mapping, args[1])
1044 1044 if len(args) == 3:
1045 1045 splitter = evalstring(context, mapping, args[2])
1046 1046 else:
1047 1047 splitter = None
1048 1048
1049 1049 tokens = text.split(splitter)
1050 1050 if num >= len(tokens) or num < -len(tokens):
1051 1051 return ''
1052 1052 else:
1053 1053 return tokens[num]
1054 1054
1055 1055 # methods to interpret function arguments or inner expressions (e.g. {_(x)})
1056 1056 exprmethods = {
1057 1057 "integer": lambda e, c: (runinteger, e[1]),
1058 1058 "string": lambda e, c: (runstring, e[1]),
1059 1059 "symbol": lambda e, c: (runsymbol, e[1]),
1060 1060 "template": buildtemplate,
1061 1061 "group": lambda e, c: compileexp(e[1], c, exprmethods),
1062 1062 # ".": buildmember,
1063 1063 "|": buildfilter,
1064 1064 "%": buildmap,
1065 1065 "func": buildfunc,
1066 1066 "keyvalue": buildkeyvaluepair,
1067 1067 "+": lambda e, c: buildarithmetic(e, c, lambda a, b: a + b),
1068 1068 "-": lambda e, c: buildarithmetic(e, c, lambda a, b: a - b),
1069 1069 "negate": buildnegate,
1070 1070 "*": lambda e, c: buildarithmetic(e, c, lambda a, b: a * b),
1071 1071 "/": lambda e, c: buildarithmetic(e, c, lambda a, b: a // b),
1072 1072 }
1073 1073
1074 1074 # methods to interpret top-level template (e.g. {x}, {x|_}, {x % "y"})
1075 1075 methods = exprmethods.copy()
1076 1076 methods["integer"] = exprmethods["symbol"] # '{1}' as variable
1077 1077
1078 1078 class _aliasrules(parser.basealiasrules):
1079 1079 """Parsing and expansion rule set of template aliases"""
1080 1080 _section = _('template alias')
1081 1081 _parse = staticmethod(_parseexpr)
1082 1082
1083 1083 @staticmethod
1084 1084 def _trygetfunc(tree):
1085 1085 """Return (name, args) if tree is func(...) or ...|filter; otherwise
1086 1086 None"""
1087 1087 if tree[0] == 'func' and tree[1][0] == 'symbol':
1088 1088 return tree[1][1], getlist(tree[2])
1089 1089 if tree[0] == '|' and tree[2][0] == 'symbol':
1090 1090 return tree[2][1], [tree[1]]
1091 1091
1092 1092 def expandaliases(tree, aliases):
1093 1093 """Return new tree of aliases are expanded"""
1094 1094 aliasmap = _aliasrules.buildmap(aliases)
1095 1095 return _aliasrules.expand(aliasmap, tree)
1096 1096
1097 1097 # template engine
1098 1098
1099 1099 stringify = templatefilters.stringify
1100 1100
1101 1101 def _flatten(thing):
1102 1102 '''yield a single stream from a possibly nested set of iterators'''
1103 1103 thing = templatekw.unwraphybrid(thing)
1104 1104 if isinstance(thing, bytes):
1105 1105 yield thing
1106 1106 elif thing is None:
1107 1107 pass
1108 1108 elif not util.safehasattr(thing, '__iter__'):
1109 1109 yield pycompat.bytestr(thing)
1110 1110 else:
1111 1111 for i in thing:
1112 1112 i = templatekw.unwraphybrid(i)
1113 1113 if isinstance(i, bytes):
1114 1114 yield i
1115 1115 elif i is None:
1116 1116 pass
1117 1117 elif not util.safehasattr(i, '__iter__'):
1118 1118 yield pycompat.bytestr(i)
1119 1119 else:
1120 1120 for j in _flatten(i):
1121 1121 yield j
1122 1122
1123 1123 def unquotestring(s):
1124 1124 '''unwrap quotes if any; otherwise returns unmodified string'''
1125 1125 if len(s) < 2 or s[0] not in "'\"" or s[0] != s[-1]:
1126 1126 return s
1127 1127 return s[1:-1]
1128 1128
1129 1129 class engine(object):
1130 1130 '''template expansion engine.
1131 1131
1132 1132 template expansion works like this. a map file contains key=value
1133 1133 pairs. if value is quoted, it is treated as string. otherwise, it
1134 1134 is treated as name of template file.
1135 1135
1136 1136 templater is asked to expand a key in map. it looks up key, and
1137 1137 looks for strings like this: {foo}. it expands {foo} by looking up
1138 1138 foo in map, and substituting it. expansion is recursive: it stops
1139 1139 when there is no more {foo} to replace.
1140 1140
1141 1141 expansion also allows formatting and filtering.
1142 1142
1143 1143 format uses key to expand each item in list. syntax is
1144 1144 {key%format}.
1145 1145
1146 1146 filter uses function to transform value. syntax is
1147 1147 {key|filter1|filter2|...}.'''
1148 1148
1149 1149 def __init__(self, loader, filters=None, defaults=None, aliases=()):
1150 1150 self._loader = loader
1151 1151 if filters is None:
1152 1152 filters = {}
1153 1153 self._filters = filters
1154 1154 if defaults is None:
1155 1155 defaults = {}
1156 1156 self._defaults = defaults
1157 1157 self._aliasmap = _aliasrules.buildmap(aliases)
1158 1158 self._cache = {} # key: (func, data)
1159 1159
1160 1160 def _load(self, t):
1161 1161 '''load, parse, and cache a template'''
1162 1162 if t not in self._cache:
1163 1163 # put poison to cut recursion while compiling 't'
1164 1164 self._cache[t] = (_runrecursivesymbol, t)
1165 1165 try:
1166 1166 x = parse(self._loader(t))
1167 1167 if self._aliasmap:
1168 1168 x = _aliasrules.expand(self._aliasmap, x)
1169 1169 self._cache[t] = compileexp(x, self, methods)
1170 1170 except: # re-raises
1171 1171 del self._cache[t]
1172 1172 raise
1173 1173 return self._cache[t]
1174 1174
1175 1175 def process(self, t, mapping):
1176 1176 '''Perform expansion. t is name of map element to expand.
1177 1177 mapping contains added elements for use during expansion. Is a
1178 1178 generator.'''
1179 1179 func, data = self._load(t)
1180 1180 return _flatten(func(self, mapping, data))
1181 1181
1182 1182 engines = {'default': engine}
1183 1183
1184 1184 def stylelist():
1185 1185 paths = templatepaths()
1186 1186 if not paths:
1187 1187 return _('no templates found, try `hg debuginstall` for more info')
1188 1188 dirlist = os.listdir(paths[0])
1189 1189 stylelist = []
1190 1190 for file in dirlist:
1191 1191 split = file.split(".")
1192 1192 if split[-1] in ('orig', 'rej'):
1193 1193 continue
1194 1194 if split[0] == "map-cmdline":
1195 1195 stylelist.append(split[1])
1196 1196 return ", ".join(sorted(stylelist))
1197 1197
1198 1198 def _readmapfile(mapfile):
1199 1199 """Load template elements from the given map file"""
1200 1200 if not os.path.exists(mapfile):
1201 1201 raise error.Abort(_("style '%s' not found") % mapfile,
1202 1202 hint=_("available styles: %s") % stylelist())
1203 1203
1204 1204 base = os.path.dirname(mapfile)
1205 1205 conf = config.config(includepaths=templatepaths())
1206 1206 conf.read(mapfile)
1207 1207
1208 1208 cache = {}
1209 1209 tmap = {}
1210 1210 for key, val in conf[''].items():
1211 1211 if not val:
1212 1212 raise error.ParseError(_('missing value'), conf.source('', key))
1213 1213 if val[0] in "'\"":
1214 1214 if val[0] != val[-1]:
1215 1215 raise error.ParseError(_('unmatched quotes'),
1216 1216 conf.source('', key))
1217 1217 cache[key] = unquotestring(val)
1218 1218 elif key == "__base__":
1219 1219 # treat as a pointer to a base class for this style
1220 1220 path = util.normpath(os.path.join(base, val))
1221 1221
1222 1222 # fallback check in template paths
1223 1223 if not os.path.exists(path):
1224 1224 for p in templatepaths():
1225 1225 p2 = util.normpath(os.path.join(p, val))
1226 1226 if os.path.isfile(p2):
1227 1227 path = p2
1228 1228 break
1229 1229 p3 = util.normpath(os.path.join(p2, "map"))
1230 1230 if os.path.isfile(p3):
1231 1231 path = p3
1232 1232 break
1233 1233
1234 1234 bcache, btmap = _readmapfile(path)
1235 1235 for k in bcache:
1236 1236 if k not in cache:
1237 1237 cache[k] = bcache[k]
1238 1238 for k in btmap:
1239 1239 if k not in tmap:
1240 1240 tmap[k] = btmap[k]
1241 1241 else:
1242 1242 val = 'default', val
1243 1243 if ':' in val[1]:
1244 1244 val = val[1].split(':', 1)
1245 1245 tmap[key] = val[0], os.path.join(base, val[1])
1246 1246 return cache, tmap
1247 1247
1248 1248 class TemplateNotFound(error.Abort):
1249 1249 pass
1250 1250
1251 1251 class templater(object):
1252 1252
1253 1253 def __init__(self, filters=None, defaults=None, cache=None, aliases=(),
1254 1254 minchunk=1024, maxchunk=65536):
1255 1255 '''set up template engine.
1256 1256 filters is dict of functions. each transforms a value into another.
1257 1257 defaults is dict of default map definitions.
1258 1258 aliases is list of alias (name, replacement) pairs.
1259 1259 '''
1260 1260 if filters is None:
1261 1261 filters = {}
1262 1262 if defaults is None:
1263 1263 defaults = {}
1264 1264 if cache is None:
1265 1265 cache = {}
1266 1266 self.cache = cache.copy()
1267 1267 self.map = {}
1268 1268 self.filters = templatefilters.filters.copy()
1269 1269 self.filters.update(filters)
1270 1270 self.defaults = defaults
1271 1271 self._aliases = aliases
1272 1272 self.minchunk, self.maxchunk = minchunk, maxchunk
1273 1273 self.ecache = {}
1274 1274
1275 1275 @classmethod
1276 1276 def frommapfile(cls, mapfile, filters=None, defaults=None, cache=None,
1277 1277 minchunk=1024, maxchunk=65536):
1278 1278 """Create templater from the specified map file"""
1279 1279 t = cls(filters, defaults, cache, [], minchunk, maxchunk)
1280 1280 cache, tmap = _readmapfile(mapfile)
1281 1281 t.cache.update(cache)
1282 1282 t.map = tmap
1283 1283 return t
1284 1284
1285 1285 def __contains__(self, key):
1286 1286 return key in self.cache or key in self.map
1287 1287
1288 1288 def load(self, t):
1289 1289 '''Get the template for the given template name. Use a local cache.'''
1290 1290 if t not in self.cache:
1291 1291 try:
1292 1292 self.cache[t] = util.readfile(self.map[t][1])
1293 1293 except KeyError as inst:
1294 1294 raise TemplateNotFound(_('"%s" not in template map') %
1295 1295 inst.args[0])
1296 1296 except IOError as inst:
1297 1297 raise IOError(inst.args[0], _('template file %s: %s') %
1298 1298 (self.map[t][1], inst.args[1]))
1299 1299 return self.cache[t]
1300 1300
1301 1301 def render(self, mapping):
1302 1302 """Render the default unnamed template and return result as string"""
1303 1303 return stringify(self('', **mapping))
1304 1304
1305 1305 def __call__(self, t, **mapping):
1306 1306 mapping = pycompat.byteskwargs(mapping)
1307 1307 ttype = t in self.map and self.map[t][0] or 'default'
1308 1308 if ttype not in self.ecache:
1309 1309 try:
1310 1310 ecls = engines[ttype]
1311 1311 except KeyError:
1312 1312 raise error.Abort(_('invalid template engine: %s') % ttype)
1313 1313 self.ecache[ttype] = ecls(self.load, self.filters, self.defaults,
1314 1314 self._aliases)
1315 1315 proc = self.ecache[ttype]
1316 1316
1317 1317 stream = proc.process(t, mapping)
1318 1318 if self.minchunk:
1319 1319 stream = util.increasingchunks(stream, min=self.minchunk,
1320 1320 max=self.maxchunk)
1321 1321 return stream
1322 1322
1323 1323 def templatepaths():
1324 1324 '''return locations used for template files.'''
1325 1325 pathsrel = ['templates']
1326 1326 paths = [os.path.normpath(os.path.join(util.datapath, f))
1327 1327 for f in pathsrel]
1328 1328 return [p for p in paths if os.path.isdir(p)]
1329 1329
1330 1330 def templatepath(name):
1331 1331 '''return location of template file. returns None if not found.'''
1332 1332 for p in templatepaths():
1333 1333 f = os.path.join(p, name)
1334 1334 if os.path.exists(f):
1335 1335 return f
1336 1336 return None
1337 1337
1338 1338 def stylemap(styles, paths=None):
1339 1339 """Return path to mapfile for a given style.
1340 1340
1341 1341 Searches mapfile in the following locations:
1342 1342 1. templatepath/style/map
1343 1343 2. templatepath/map-style
1344 1344 3. templatepath/map
1345 1345 """
1346 1346
1347 1347 if paths is None:
1348 1348 paths = templatepaths()
1349 1349 elif isinstance(paths, str):
1350 1350 paths = [paths]
1351 1351
1352 1352 if isinstance(styles, str):
1353 1353 styles = [styles]
1354 1354
1355 1355 for style in styles:
1356 1356 # only plain name is allowed to honor template paths
1357 1357 if (not style
1358 1358 or style in (os.curdir, os.pardir)
1359 1359 or pycompat.ossep in style
1360 1360 or pycompat.osaltsep and pycompat.osaltsep in style):
1361 1361 continue
1362 1362 locations = [os.path.join(style, 'map'), 'map-' + style]
1363 1363 locations.append('map')
1364 1364
1365 1365 for path in paths:
1366 1366 for location in locations:
1367 1367 mapfile = os.path.join(path, location)
1368 1368 if os.path.isfile(mapfile):
1369 1369 return style, mapfile
1370 1370
1371 1371 raise RuntimeError("No hgweb templates found in %r" % paths)
1372 1372
1373 1373 def loadfunction(ui, extname, registrarobj):
1374 1374 """Load template function from specified registrarobj
1375 1375 """
1376 1376 for name, func in registrarobj._table.iteritems():
1377 1377 funcs[name] = func
1378 1378
1379 1379 # tell hggettext to extract docstrings from these functions:
1380 1380 i18nfunctions = funcs.values()
General Comments 0
You need to be logged in to leave comments. Login now