##// END OF EJS Templates
spelling: fix some minor issues found by spell checker
Mads Kiilerich -
r18644:3e92772d default
parent child Browse files
Show More
@@ -268,7 +268,7 b' def perfrevlog(ui, repo, file_, **opts):'
268 def perfrevset(ui, repo, expr, clear=False):
268 def perfrevset(ui, repo, expr, clear=False):
269 """benchmark the execution time of a revset
269 """benchmark the execution time of a revset
270
270
271 Use the --clean option if need to evaluate the impact of build volative
271 Use the --clean option if need to evaluate the impact of build volatile
272 revisions set cache on the revset execution. Volatile cache hold filtered
272 revisions set cache on the revset execution. Volatile cache hold filtered
273 and obsolete related cache."""
273 and obsolete related cache."""
274 def d():
274 def d():
@@ -179,7 +179,7 b' editcomment = _("""# Edit history betwee'
179 def commitfuncfor(repo, src):
179 def commitfuncfor(repo, src):
180 """Build a commit function for the replacement of <src>
180 """Build a commit function for the replacement of <src>
181
181
182 This function ensure we apply the same treatement to all changesets.
182 This function ensure we apply the same treatment to all changesets.
183
183
184 - Add a 'histedit_source' entry in extra.
184 - Add a 'histedit_source' entry in extra.
185
185
@@ -626,7 +626,7 b' def bootstrapcontinue(ui, repo, parentct'
626 newchildren)
626 newchildren)
627 replacements.extend(repl)
627 replacements.extend(repl)
628 elif newchildren:
628 elif newchildren:
629 # otherwize update "parentctx" before proceding to further operation
629 # otherwise update "parentctx" before proceeding to further operation
630 parentctx = repo[newchildren[-1]]
630 parentctx = repo[newchildren[-1]]
631 return parentctx, replacements
631 return parentctx, replacements
632
632
@@ -446,7 +446,7 b' def reposetup(ui, repo):'
446 the largefiles.
446 the largefiles.
447 So we do the following:
447 So we do the following:
448 For directories that only have largefiles as matches,
448 For directories that only have largefiles as matches,
449 we explicitly add the largefiles to the matchlist and remove
449 we explicitly add the largefiles to the match list and remove
450 the directory.
450 the directory.
451 In other cases, we leave the match list unmodified.
451 In other cases, we leave the match list unmodified.
452 '''
452 '''
@@ -282,7 +282,7 b' def newcommit(repo, phase, *args, **kwar'
282 if phase is not None:
282 if phase is not None:
283 backup = repo.ui.backupconfig('phases', 'new-commit')
283 backup = repo.ui.backupconfig('phases', 'new-commit')
284 # Marking the repository as committing an mq patch can be used
284 # Marking the repository as committing an mq patch can be used
285 # to optimize operations like _branchtags().
285 # to optimize operations like branchtags().
286 repo._committingpatch = True
286 repo._committingpatch = True
287 try:
287 try:
288 if phase is not None:
288 if phase is not None:
@@ -1571,7 +1571,7 b' class queue(object):'
1571 r = list(dd)
1571 r = list(dd)
1572 a = list(aa)
1572 a = list(aa)
1573
1573
1574 # create 'match' that includes the files to be recommited.
1574 # create 'match' that includes the files to be recommitted.
1575 # apply matchfn via repo.status to ensure correct case handling.
1575 # apply matchfn via repo.status to ensure correct case handling.
1576 cm, ca, cr, cd = repo.status(patchparent, match=matchfn)[:4]
1576 cm, ca, cr, cd = repo.status(patchparent, match=matchfn)[:4]
1577 allmatches = set(cm + ca + cr + cd)
1577 allmatches = set(cm + ca + cr + cd)
@@ -692,8 +692,8 b' def buildstate(repo, dest, rebaseset, co'
692 # If we have multiple roots, we may have "hole" in the rebase set.
692 # If we have multiple roots, we may have "hole" in the rebase set.
693 # Rebase roots that descend from those "hole" should not be detached as
693 # Rebase roots that descend from those "hole" should not be detached as
694 # other root are. We use the special `revignored` to inform rebase that
694 # other root are. We use the special `revignored` to inform rebase that
695 # the revision should be ignored but that `defineparent` should search
695 # the revision should be ignored but that `defineparents` should search
696 # a rebase destination that make sense regarding rebaset topology.
696 # a rebase destination that make sense regarding rebased topology.
697 rebasedomain = set(repo.revs('%ld::%ld', rebaseset, rebaseset))
697 rebasedomain = set(repo.revs('%ld::%ld', rebaseset, rebaseset))
698 for ignored in set(rebasedomain) - set(rebaseset):
698 for ignored in set(rebasedomain) - set(rebaseset):
699 state[ignored] = revignored
699 state[ignored] = revignored
@@ -95,7 +95,7 b' class branchcache(dict):'
95 def _hashfiltered(self, repo):
95 def _hashfiltered(self, repo):
96 """build hash of revision filtered in the current cache
96 """build hash of revision filtered in the current cache
97
97
98 Tracking tipnode and tiprev is not enough to ensure validaty of the
98 Tracking tipnode and tiprev is not enough to ensure validity of the
99 cache as they do not help to distinct cache that ignored various
99 cache as they do not help to distinct cache that ignored various
100 revision bellow tiprev.
100 revision bellow tiprev.
101
101
@@ -114,9 +114,9 b' class branchcache(dict):'
114 return key
114 return key
115
115
116 def validfor(self, repo):
116 def validfor(self, repo):
117 """Is the cache content valide regarding a repo
117 """Is the cache content valid regarding a repo
118
118
119 - False when cached tipnode are unknown or if we detect a strip.
119 - False when cached tipnode is unknown or if we detect a strip.
120 - True when cache is up to date or a subset of current repo."""
120 - True when cache is up to date or a subset of current repo."""
121 try:
121 try:
122 return ((self.tipnode == repo.changelog.node(self.tiprev))
122 return ((self.tipnode == repo.changelog.node(self.tiprev))
@@ -426,12 +426,12 b' class filectx(object):'
426 # repository is filtered this may lead to `filectx` trying to build
426 # repository is filtered this may lead to `filectx` trying to build
427 # `changectx` for filtered revision. In such case we fallback to
427 # `changectx` for filtered revision. In such case we fallback to
428 # creating `changectx` on the unfiltered version of the reposition.
428 # creating `changectx` on the unfiltered version of the reposition.
429 # This fallback should not be an issue because`changectx` from
429 # This fallback should not be an issue because `changectx` from
430 # `filectx` are not used in complexe operation that care about
430 # `filectx` are not used in complex operations that care about
431 # filtering.
431 # filtering.
432 #
432 #
433 # This fallback is a cheap and dirty fix that prevent several
433 # This fallback is a cheap and dirty fix that prevent several
434 # crash. It does not ensure the behavior is correct. However the
434 # crashes. It does not ensure the behavior is correct. However the
435 # behavior was not correct before filtering either and "incorrect
435 # behavior was not correct before filtering either and "incorrect
436 # behavior" is seen as better as "crash"
436 # behavior" is seen as better as "crash"
437 #
437 #
@@ -49,7 +49,7 b' class filteredpropertycache(propertycach'
49
49
50
50
51 def hasunfilteredcache(repo, name):
51 def hasunfilteredcache(repo, name):
52 """check if an repo and a unfilteredproperty cached value for <name>"""
52 """check if a repo has an unfilteredpropertycache value for <name>"""
53 return name in vars(repo.unfiltered())
53 return name in vars(repo.unfiltered())
54
54
55 def unfilteredmethod(orig):
55 def unfilteredmethod(orig):
@@ -310,13 +310,13 b' class localrepository(object):'
310 def unfiltered(self):
310 def unfiltered(self):
311 """Return unfiltered version of the repository
311 """Return unfiltered version of the repository
312
312
313 Intended to be ovewritten by filtered repo."""
313 Intended to be overwritten by filtered repo."""
314 return self
314 return self
315
315
316 def filtered(self, name):
316 def filtered(self, name):
317 """Return a filtered version of a repository"""
317 """Return a filtered version of a repository"""
318 # build a new class with the mixin and the current class
318 # build a new class with the mixin and the current class
319 # (possibily subclass of the repo)
319 # (possibly subclass of the repo)
320 class proxycls(repoview.repoview, self.unfiltered().__class__):
320 class proxycls(repoview.repoview, self.unfiltered().__class__):
321 pass
321 pass
322 return proxycls(self, name)
322 return proxycls(self, name)
@@ -962,7 +962,7 b' class localrepository(object):'
962 delattr(self.unfiltered(), 'dirstate')
962 delattr(self.unfiltered(), 'dirstate')
963
963
964 def invalidate(self):
964 def invalidate(self):
965 unfiltered = self.unfiltered() # all filecaches are stored on unfiltered
965 unfiltered = self.unfiltered() # all file caches are stored unfiltered
966 for k in self._filecache:
966 for k in self._filecache:
967 # dirstate is invalidated separately in invalidatedirstate()
967 # dirstate is invalidated separately in invalidatedirstate()
968 if k == 'dirstate':
968 if k == 'dirstate':
@@ -1397,12 +1397,6 b' class localrepository(object):'
1397 '''Inform the repository that nodes have been destroyed.
1397 '''Inform the repository that nodes have been destroyed.
1398 Intended for use by strip and rollback, so there's a common
1398 Intended for use by strip and rollback, so there's a common
1399 place for anything that has to be done after destroying history.
1399 place for anything that has to be done after destroying history.
1400
1401 If you know the branchheadcache was uptodate before nodes were removed
1402 and you also know the set of candidate new heads that may have resulted
1403 from the destruction, you can set newheadnodes. This will enable the
1404 code to update the branchheads cache, rather than having future code
1405 decide it's invalid and regenerating it from scratch.
1406 '''
1400 '''
1407 # When one tries to:
1401 # When one tries to:
1408 # 1) destroy nodes thus calling this method (e.g. strip)
1402 # 1) destroy nodes thus calling this method (e.g. strip)
@@ -1417,7 +1411,7 b' class localrepository(object):'
1417 self._phasecache.write()
1411 self._phasecache.write()
1418
1412
1419 # update the 'served' branch cache to help read only server process
1413 # update the 'served' branch cache to help read only server process
1420 # Thanks to branchcach collaboration this is done from the nearest
1414 # Thanks to branchcache collaboration this is done from the nearest
1421 # filtered subset and it is expected to be fast.
1415 # filtered subset and it is expected to be fast.
1422 branchmap.updatecache(self.filtered('served'))
1416 branchmap.updatecache(self.filtered('served'))
1423
1417
@@ -46,7 +46,7 b' Examples:'
46 (A, (C, C))
46 (A, (C, C))
47
47
48 We use a single marker to distinct the "split" case from the "divergence"
48 We use a single marker to distinct the "split" case from the "divergence"
49 case. If two independants operation rewrite the same changeset A in to A' and
49 case. If two independents operation rewrite the same changeset A in to A' and
50 A'' when have an error case: divergent rewriting. We can detect it because
50 A'' when have an error case: divergent rewriting. We can detect it because
51 two markers will be created independently:
51 two markers will be created independently:
52
52
@@ -129,8 +129,9 b' from i18n import _'
129 #
129 #
130 # But by transitivity Ad is also a successors of A. To avoid having Ad marked
130 # But by transitivity Ad is also a successors of A. To avoid having Ad marked
131 # as bumped too, we add the `bumpedfix` flag to the marker. <A', (Ad,)>.
131 # as bumped too, we add the `bumpedfix` flag to the marker. <A', (Ad,)>.
132 # This flag mean that the successors are an interdiff that fix the bumped
132 # This flag mean that the successors express the changes between the public and
133 # situation, breaking the transitivity of "bumped" here.
133 # bumped version and fix the situation, breaking the transitivity of
134 # "bumped" here.
134 bumpedfix = 1
135 bumpedfix = 1
135
136
136 def _readmarkers(data):
137 def _readmarkers(data):
@@ -510,7 +511,7 b' def successorssets(repo, initialnode, ca'
510 # In such a situation, we arbitrary set the successors sets of
511 # In such a situation, we arbitrary set the successors sets of
511 # the node to nothing (node pruned) to break the cycle.
512 # the node to nothing (node pruned) to break the cycle.
512 #
513 #
513 # If no break was encountered we proceeed to phase 2.
514 # If no break was encountered we proceed to phase 2.
514 #
515 #
515 # Phase 2 computes successors sets of CURRENT (case 4); see details
516 # Phase 2 computes successors sets of CURRENT (case 4); see details
516 # in phase 2 itself.
517 # in phase 2 itself.
@@ -551,13 +552,13 b' def successorssets(repo, initialnode, ca'
551 # successors sets of all its "successors" node.
552 # successors sets of all its "successors" node.
552 #
553 #
553 # Each different marker is a divergence in the obsolescence
554 # Each different marker is a divergence in the obsolescence
554 # history. It contributes successors sets dictinct from other
555 # history. It contributes successors sets distinct from other
555 # markers.
556 # markers.
556 #
557 #
557 # Within a marker, a successor may have divergent successors
558 # Within a marker, a successor may have divergent successors
558 # sets. In such a case, the marker will contribute multiple
559 # sets. In such a case, the marker will contribute multiple
559 # divergent successors sets. If multiple successors have
560 # divergent successors sets. If multiple successors have
560 # divergents successors sets, a cartesian product is used.
561 # divergent successors sets, a cartesian product is used.
561 #
562 #
562 # At the end we post-process successors sets to remove
563 # At the end we post-process successors sets to remove
563 # duplicated entry and successors set that are strict subset of
564 # duplicated entry and successors set that are strict subset of
@@ -149,7 +149,7 b' class repoview(object):'
149 repoview.method() --> repo.__class__.method(repoview)
149 repoview.method() --> repo.__class__.method(repoview)
150
150
151 The inheritance has to be done dynamically because `repo` can be of any
151 The inheritance has to be done dynamically because `repo` can be of any
152 subclasses of `localrepo`. Eg: `bundlerepo` or `httprepo`.
152 subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
153 """
153 """
154
154
155 def __init__(self, repo, filtername):
155 def __init__(self, repo, filtername):
@@ -158,7 +158,7 b' class repoview(object):'
158 object.__setattr__(self, '_clcachekey', None)
158 object.__setattr__(self, '_clcachekey', None)
159 object.__setattr__(self, '_clcache', None)
159 object.__setattr__(self, '_clcache', None)
160
160
161 # not a cacheproperty on purpose we shall implement a proper cache later
161 # not a propertycache on purpose we shall implement a proper cache later
162 @property
162 @property
163 def changelog(self):
163 def changelog(self):
164 """return a filtered version of the changeset
164 """return a filtered version of the changeset
@@ -210,7 +210,7 b' class repoview(object):'
210 def __delattr__(self, attr):
210 def __delattr__(self, attr):
211 return delattr(self._unfilteredrepo, attr)
211 return delattr(self._unfilteredrepo, attr)
212
212
213 # The `requirement` attribut is initialiazed during __init__. But
213 # The `requirements` attribute is initialized during __init__. But
214 # __getattr__ won't be called as it also exists on the class. We need
214 # __getattr__ won't be called as it also exists on the class. We need
215 # explicit forwarding to main repo here
215 # explicit forwarding to main repo here
216 @property
216 @property
@@ -1,6 +1,6 b''
1 Test file dedicated to testing the divergent troubles from obsolete changeset.
1 Test file dedicated to testing the divergent troubles from obsolete changeset.
2
2
3 This is the most complexe troubles from far so we isolate it in a dedicated
3 This is the most complex troubles from far so we isolate it in a dedicated
4 file.
4 file.
5
5
6 Enable obsolete
6 Enable obsolete
@@ -294,7 +294,7 b' Even when subsequente rewriting happen'
294 e442cfc57690
294 e442cfc57690
295 $ hg log -r 'divergent()'
295 $ hg log -r 'divergent()'
296
296
297 Check more complexe obsolescence graft (with divergence)
297 Check more complex obsolescence graft (with divergence)
298
298
299 $ mkcommit B_0; hg up 0
299 $ mkcommit B_0; hg up 0
300 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
300 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
@@ -564,7 +564,7 b' We would expect heads are I, F if it was'
564
564
565 $ cd ..
565 $ cd ..
566
566
567 More complexe rebase with multiple roots
567 More complex rebase with multiple roots
568 each root have a different common ancestor with the destination and this is a detach
568 each root have a different common ancestor with the destination and this is a detach
569
569
570 (setup)
570 (setup)
General Comments 0
You need to be logged in to leave comments. Login now