##// END OF EJS Templates
phases: add a repository requirement about internal phase...
Boris Feld -
r39334:7a9f15ed default
parent child Browse files
Show More
@@ -1,1398 +1,1401 b''
1 # configitems.py - centralized declaration of configuration option
1 # configitems.py - centralized declaration of configuration option
2 #
2 #
3 # Copyright 2017 Pierre-Yves David <pierre-yves.david@octobus.net>
3 # Copyright 2017 Pierre-Yves David <pierre-yves.david@octobus.net>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import functools
10 import functools
11 import re
11 import re
12
12
13 from . import (
13 from . import (
14 encoding,
14 encoding,
15 error,
15 error,
16 )
16 )
17
17
18 def loadconfigtable(ui, extname, configtable):
18 def loadconfigtable(ui, extname, configtable):
19 """update config item known to the ui with the extension ones"""
19 """update config item known to the ui with the extension ones"""
20 for section, items in sorted(configtable.items()):
20 for section, items in sorted(configtable.items()):
21 knownitems = ui._knownconfig.setdefault(section, itemregister())
21 knownitems = ui._knownconfig.setdefault(section, itemregister())
22 knownkeys = set(knownitems)
22 knownkeys = set(knownitems)
23 newkeys = set(items)
23 newkeys = set(items)
24 for key in sorted(knownkeys & newkeys):
24 for key in sorted(knownkeys & newkeys):
25 msg = "extension '%s' overwrite config item '%s.%s'"
25 msg = "extension '%s' overwrite config item '%s.%s'"
26 msg %= (extname, section, key)
26 msg %= (extname, section, key)
27 ui.develwarn(msg, config='warn-config')
27 ui.develwarn(msg, config='warn-config')
28
28
29 knownitems.update(items)
29 knownitems.update(items)
30
30
31 class configitem(object):
31 class configitem(object):
32 """represent a known config item
32 """represent a known config item
33
33
34 :section: the official config section where to find this item,
34 :section: the official config section where to find this item,
35 :name: the official name within the section,
35 :name: the official name within the section,
36 :default: default value for this item,
36 :default: default value for this item,
37 :alias: optional list of tuples as alternatives,
37 :alias: optional list of tuples as alternatives,
38 :generic: this is a generic definition, match name using regular expression.
38 :generic: this is a generic definition, match name using regular expression.
39 """
39 """
40
40
41 def __init__(self, section, name, default=None, alias=(),
41 def __init__(self, section, name, default=None, alias=(),
42 generic=False, priority=0):
42 generic=False, priority=0):
43 self.section = section
43 self.section = section
44 self.name = name
44 self.name = name
45 self.default = default
45 self.default = default
46 self.alias = list(alias)
46 self.alias = list(alias)
47 self.generic = generic
47 self.generic = generic
48 self.priority = priority
48 self.priority = priority
49 self._re = None
49 self._re = None
50 if generic:
50 if generic:
51 self._re = re.compile(self.name)
51 self._re = re.compile(self.name)
52
52
53 class itemregister(dict):
53 class itemregister(dict):
54 """A specialized dictionary that can handle wild-card selection"""
54 """A specialized dictionary that can handle wild-card selection"""
55
55
56 def __init__(self):
56 def __init__(self):
57 super(itemregister, self).__init__()
57 super(itemregister, self).__init__()
58 self._generics = set()
58 self._generics = set()
59
59
60 def update(self, other):
60 def update(self, other):
61 super(itemregister, self).update(other)
61 super(itemregister, self).update(other)
62 self._generics.update(other._generics)
62 self._generics.update(other._generics)
63
63
64 def __setitem__(self, key, item):
64 def __setitem__(self, key, item):
65 super(itemregister, self).__setitem__(key, item)
65 super(itemregister, self).__setitem__(key, item)
66 if item.generic:
66 if item.generic:
67 self._generics.add(item)
67 self._generics.add(item)
68
68
69 def get(self, key):
69 def get(self, key):
70 baseitem = super(itemregister, self).get(key)
70 baseitem = super(itemregister, self).get(key)
71 if baseitem is not None and not baseitem.generic:
71 if baseitem is not None and not baseitem.generic:
72 return baseitem
72 return baseitem
73
73
74 # search for a matching generic item
74 # search for a matching generic item
75 generics = sorted(self._generics, key=(lambda x: (x.priority, x.name)))
75 generics = sorted(self._generics, key=(lambda x: (x.priority, x.name)))
76 for item in generics:
76 for item in generics:
77 # we use 'match' instead of 'search' to make the matching simpler
77 # we use 'match' instead of 'search' to make the matching simpler
78 # for people unfamiliar with regular expression. Having the match
78 # for people unfamiliar with regular expression. Having the match
79 # rooted to the start of the string will produce less surprising
79 # rooted to the start of the string will produce less surprising
80 # result for user writing simple regex for sub-attribute.
80 # result for user writing simple regex for sub-attribute.
81 #
81 #
82 # For example using "color\..*" match produces an unsurprising
82 # For example using "color\..*" match produces an unsurprising
83 # result, while using search could suddenly match apparently
83 # result, while using search could suddenly match apparently
84 # unrelated configuration that happens to contains "color."
84 # unrelated configuration that happens to contains "color."
85 # anywhere. This is a tradeoff where we favor requiring ".*" on
85 # anywhere. This is a tradeoff where we favor requiring ".*" on
86 # some match to avoid the need to prefix most pattern with "^".
86 # some match to avoid the need to prefix most pattern with "^".
87 # The "^" seems more error prone.
87 # The "^" seems more error prone.
88 if item._re.match(key):
88 if item._re.match(key):
89 return item
89 return item
90
90
91 return None
91 return None
92
92
93 coreitems = {}
93 coreitems = {}
94
94
95 def _register(configtable, *args, **kwargs):
95 def _register(configtable, *args, **kwargs):
96 item = configitem(*args, **kwargs)
96 item = configitem(*args, **kwargs)
97 section = configtable.setdefault(item.section, itemregister())
97 section = configtable.setdefault(item.section, itemregister())
98 if item.name in section:
98 if item.name in section:
99 msg = "duplicated config item registration for '%s.%s'"
99 msg = "duplicated config item registration for '%s.%s'"
100 raise error.ProgrammingError(msg % (item.section, item.name))
100 raise error.ProgrammingError(msg % (item.section, item.name))
101 section[item.name] = item
101 section[item.name] = item
102
102
103 # special value for case where the default is derived from other values
103 # special value for case where the default is derived from other values
104 dynamicdefault = object()
104 dynamicdefault = object()
105
105
106 # Registering actual config items
106 # Registering actual config items
107
107
108 def getitemregister(configtable):
108 def getitemregister(configtable):
109 f = functools.partial(_register, configtable)
109 f = functools.partial(_register, configtable)
110 # export pseudo enum as configitem.*
110 # export pseudo enum as configitem.*
111 f.dynamicdefault = dynamicdefault
111 f.dynamicdefault = dynamicdefault
112 return f
112 return f
113
113
114 coreconfigitem = getitemregister(coreitems)
114 coreconfigitem = getitemregister(coreitems)
115
115
116 coreconfigitem('alias', '.*',
116 coreconfigitem('alias', '.*',
117 default=dynamicdefault,
117 default=dynamicdefault,
118 generic=True,
118 generic=True,
119 )
119 )
120 coreconfigitem('annotate', 'nodates',
120 coreconfigitem('annotate', 'nodates',
121 default=False,
121 default=False,
122 )
122 )
123 coreconfigitem('annotate', 'showfunc',
123 coreconfigitem('annotate', 'showfunc',
124 default=False,
124 default=False,
125 )
125 )
126 coreconfigitem('annotate', 'unified',
126 coreconfigitem('annotate', 'unified',
127 default=None,
127 default=None,
128 )
128 )
129 coreconfigitem('annotate', 'git',
129 coreconfigitem('annotate', 'git',
130 default=False,
130 default=False,
131 )
131 )
132 coreconfigitem('annotate', 'ignorews',
132 coreconfigitem('annotate', 'ignorews',
133 default=False,
133 default=False,
134 )
134 )
135 coreconfigitem('annotate', 'ignorewsamount',
135 coreconfigitem('annotate', 'ignorewsamount',
136 default=False,
136 default=False,
137 )
137 )
138 coreconfigitem('annotate', 'ignoreblanklines',
138 coreconfigitem('annotate', 'ignoreblanklines',
139 default=False,
139 default=False,
140 )
140 )
141 coreconfigitem('annotate', 'ignorewseol',
141 coreconfigitem('annotate', 'ignorewseol',
142 default=False,
142 default=False,
143 )
143 )
144 coreconfigitem('annotate', 'nobinary',
144 coreconfigitem('annotate', 'nobinary',
145 default=False,
145 default=False,
146 )
146 )
147 coreconfigitem('annotate', 'noprefix',
147 coreconfigitem('annotate', 'noprefix',
148 default=False,
148 default=False,
149 )
149 )
150 coreconfigitem('annotate', 'word-diff',
150 coreconfigitem('annotate', 'word-diff',
151 default=False,
151 default=False,
152 )
152 )
153 coreconfigitem('auth', 'cookiefile',
153 coreconfigitem('auth', 'cookiefile',
154 default=None,
154 default=None,
155 )
155 )
156 # bookmarks.pushing: internal hack for discovery
156 # bookmarks.pushing: internal hack for discovery
157 coreconfigitem('bookmarks', 'pushing',
157 coreconfigitem('bookmarks', 'pushing',
158 default=list,
158 default=list,
159 )
159 )
160 # bundle.mainreporoot: internal hack for bundlerepo
160 # bundle.mainreporoot: internal hack for bundlerepo
161 coreconfigitem('bundle', 'mainreporoot',
161 coreconfigitem('bundle', 'mainreporoot',
162 default='',
162 default='',
163 )
163 )
164 # bundle.reorder: experimental config
164 # bundle.reorder: experimental config
165 coreconfigitem('bundle', 'reorder',
165 coreconfigitem('bundle', 'reorder',
166 default='auto',
166 default='auto',
167 )
167 )
168 coreconfigitem('censor', 'policy',
168 coreconfigitem('censor', 'policy',
169 default='abort',
169 default='abort',
170 )
170 )
171 coreconfigitem('chgserver', 'idletimeout',
171 coreconfigitem('chgserver', 'idletimeout',
172 default=3600,
172 default=3600,
173 )
173 )
174 coreconfigitem('chgserver', 'skiphash',
174 coreconfigitem('chgserver', 'skiphash',
175 default=False,
175 default=False,
176 )
176 )
177 coreconfigitem('cmdserver', 'log',
177 coreconfigitem('cmdserver', 'log',
178 default=None,
178 default=None,
179 )
179 )
180 coreconfigitem('color', '.*',
180 coreconfigitem('color', '.*',
181 default=None,
181 default=None,
182 generic=True,
182 generic=True,
183 )
183 )
184 coreconfigitem('color', 'mode',
184 coreconfigitem('color', 'mode',
185 default='auto',
185 default='auto',
186 )
186 )
187 coreconfigitem('color', 'pagermode',
187 coreconfigitem('color', 'pagermode',
188 default=dynamicdefault,
188 default=dynamicdefault,
189 )
189 )
190 coreconfigitem('commands', 'grep.all-files',
190 coreconfigitem('commands', 'grep.all-files',
191 default=False,
191 default=False,
192 )
192 )
193 coreconfigitem('commands', 'resolve.confirm',
193 coreconfigitem('commands', 'resolve.confirm',
194 default=False,
194 default=False,
195 )
195 )
196 coreconfigitem('commands', 'resolve.mark-check',
196 coreconfigitem('commands', 'resolve.mark-check',
197 default='none',
197 default='none',
198 )
198 )
199 coreconfigitem('commands', 'show.aliasprefix',
199 coreconfigitem('commands', 'show.aliasprefix',
200 default=list,
200 default=list,
201 )
201 )
202 coreconfigitem('commands', 'status.relative',
202 coreconfigitem('commands', 'status.relative',
203 default=False,
203 default=False,
204 )
204 )
205 coreconfigitem('commands', 'status.skipstates',
205 coreconfigitem('commands', 'status.skipstates',
206 default=[],
206 default=[],
207 )
207 )
208 coreconfigitem('commands', 'status.terse',
208 coreconfigitem('commands', 'status.terse',
209 default='',
209 default='',
210 )
210 )
211 coreconfigitem('commands', 'status.verbose',
211 coreconfigitem('commands', 'status.verbose',
212 default=False,
212 default=False,
213 )
213 )
214 coreconfigitem('commands', 'update.check',
214 coreconfigitem('commands', 'update.check',
215 default=None,
215 default=None,
216 )
216 )
217 coreconfigitem('commands', 'update.requiredest',
217 coreconfigitem('commands', 'update.requiredest',
218 default=False,
218 default=False,
219 )
219 )
220 coreconfigitem('committemplate', '.*',
220 coreconfigitem('committemplate', '.*',
221 default=None,
221 default=None,
222 generic=True,
222 generic=True,
223 )
223 )
224 coreconfigitem('convert', 'bzr.saverev',
224 coreconfigitem('convert', 'bzr.saverev',
225 default=True,
225 default=True,
226 )
226 )
227 coreconfigitem('convert', 'cvsps.cache',
227 coreconfigitem('convert', 'cvsps.cache',
228 default=True,
228 default=True,
229 )
229 )
230 coreconfigitem('convert', 'cvsps.fuzz',
230 coreconfigitem('convert', 'cvsps.fuzz',
231 default=60,
231 default=60,
232 )
232 )
233 coreconfigitem('convert', 'cvsps.logencoding',
233 coreconfigitem('convert', 'cvsps.logencoding',
234 default=None,
234 default=None,
235 )
235 )
236 coreconfigitem('convert', 'cvsps.mergefrom',
236 coreconfigitem('convert', 'cvsps.mergefrom',
237 default=None,
237 default=None,
238 )
238 )
239 coreconfigitem('convert', 'cvsps.mergeto',
239 coreconfigitem('convert', 'cvsps.mergeto',
240 default=None,
240 default=None,
241 )
241 )
242 coreconfigitem('convert', 'git.committeractions',
242 coreconfigitem('convert', 'git.committeractions',
243 default=lambda: ['messagedifferent'],
243 default=lambda: ['messagedifferent'],
244 )
244 )
245 coreconfigitem('convert', 'git.extrakeys',
245 coreconfigitem('convert', 'git.extrakeys',
246 default=list,
246 default=list,
247 )
247 )
248 coreconfigitem('convert', 'git.findcopiesharder',
248 coreconfigitem('convert', 'git.findcopiesharder',
249 default=False,
249 default=False,
250 )
250 )
251 coreconfigitem('convert', 'git.remoteprefix',
251 coreconfigitem('convert', 'git.remoteprefix',
252 default='remote',
252 default='remote',
253 )
253 )
254 coreconfigitem('convert', 'git.renamelimit',
254 coreconfigitem('convert', 'git.renamelimit',
255 default=400,
255 default=400,
256 )
256 )
257 coreconfigitem('convert', 'git.saverev',
257 coreconfigitem('convert', 'git.saverev',
258 default=True,
258 default=True,
259 )
259 )
260 coreconfigitem('convert', 'git.similarity',
260 coreconfigitem('convert', 'git.similarity',
261 default=50,
261 default=50,
262 )
262 )
263 coreconfigitem('convert', 'git.skipsubmodules',
263 coreconfigitem('convert', 'git.skipsubmodules',
264 default=False,
264 default=False,
265 )
265 )
266 coreconfigitem('convert', 'hg.clonebranches',
266 coreconfigitem('convert', 'hg.clonebranches',
267 default=False,
267 default=False,
268 )
268 )
269 coreconfigitem('convert', 'hg.ignoreerrors',
269 coreconfigitem('convert', 'hg.ignoreerrors',
270 default=False,
270 default=False,
271 )
271 )
272 coreconfigitem('convert', 'hg.revs',
272 coreconfigitem('convert', 'hg.revs',
273 default=None,
273 default=None,
274 )
274 )
275 coreconfigitem('convert', 'hg.saverev',
275 coreconfigitem('convert', 'hg.saverev',
276 default=False,
276 default=False,
277 )
277 )
278 coreconfigitem('convert', 'hg.sourcename',
278 coreconfigitem('convert', 'hg.sourcename',
279 default=None,
279 default=None,
280 )
280 )
281 coreconfigitem('convert', 'hg.startrev',
281 coreconfigitem('convert', 'hg.startrev',
282 default=None,
282 default=None,
283 )
283 )
284 coreconfigitem('convert', 'hg.tagsbranch',
284 coreconfigitem('convert', 'hg.tagsbranch',
285 default='default',
285 default='default',
286 )
286 )
287 coreconfigitem('convert', 'hg.usebranchnames',
287 coreconfigitem('convert', 'hg.usebranchnames',
288 default=True,
288 default=True,
289 )
289 )
290 coreconfigitem('convert', 'ignoreancestorcheck',
290 coreconfigitem('convert', 'ignoreancestorcheck',
291 default=False,
291 default=False,
292 )
292 )
293 coreconfigitem('convert', 'localtimezone',
293 coreconfigitem('convert', 'localtimezone',
294 default=False,
294 default=False,
295 )
295 )
296 coreconfigitem('convert', 'p4.encoding',
296 coreconfigitem('convert', 'p4.encoding',
297 default=dynamicdefault,
297 default=dynamicdefault,
298 )
298 )
299 coreconfigitem('convert', 'p4.startrev',
299 coreconfigitem('convert', 'p4.startrev',
300 default=0,
300 default=0,
301 )
301 )
302 coreconfigitem('convert', 'skiptags',
302 coreconfigitem('convert', 'skiptags',
303 default=False,
303 default=False,
304 )
304 )
305 coreconfigitem('convert', 'svn.debugsvnlog',
305 coreconfigitem('convert', 'svn.debugsvnlog',
306 default=True,
306 default=True,
307 )
307 )
308 coreconfigitem('convert', 'svn.trunk',
308 coreconfigitem('convert', 'svn.trunk',
309 default=None,
309 default=None,
310 )
310 )
311 coreconfigitem('convert', 'svn.tags',
311 coreconfigitem('convert', 'svn.tags',
312 default=None,
312 default=None,
313 )
313 )
314 coreconfigitem('convert', 'svn.branches',
314 coreconfigitem('convert', 'svn.branches',
315 default=None,
315 default=None,
316 )
316 )
317 coreconfigitem('convert', 'svn.startrev',
317 coreconfigitem('convert', 'svn.startrev',
318 default=0,
318 default=0,
319 )
319 )
320 coreconfigitem('debug', 'dirstate.delaywrite',
320 coreconfigitem('debug', 'dirstate.delaywrite',
321 default=0,
321 default=0,
322 )
322 )
323 coreconfigitem('defaults', '.*',
323 coreconfigitem('defaults', '.*',
324 default=None,
324 default=None,
325 generic=True,
325 generic=True,
326 )
326 )
327 coreconfigitem('devel', 'all-warnings',
327 coreconfigitem('devel', 'all-warnings',
328 default=False,
328 default=False,
329 )
329 )
330 coreconfigitem('devel', 'bundle2.debug',
330 coreconfigitem('devel', 'bundle2.debug',
331 default=False,
331 default=False,
332 )
332 )
333 coreconfigitem('devel', 'cache-vfs',
333 coreconfigitem('devel', 'cache-vfs',
334 default=None,
334 default=None,
335 )
335 )
336 coreconfigitem('devel', 'check-locks',
336 coreconfigitem('devel', 'check-locks',
337 default=False,
337 default=False,
338 )
338 )
339 coreconfigitem('devel', 'check-relroot',
339 coreconfigitem('devel', 'check-relroot',
340 default=False,
340 default=False,
341 )
341 )
342 coreconfigitem('devel', 'default-date',
342 coreconfigitem('devel', 'default-date',
343 default=None,
343 default=None,
344 )
344 )
345 coreconfigitem('devel', 'deprec-warn',
345 coreconfigitem('devel', 'deprec-warn',
346 default=False,
346 default=False,
347 )
347 )
348 coreconfigitem('devel', 'disableloaddefaultcerts',
348 coreconfigitem('devel', 'disableloaddefaultcerts',
349 default=False,
349 default=False,
350 )
350 )
351 coreconfigitem('devel', 'warn-empty-changegroup',
351 coreconfigitem('devel', 'warn-empty-changegroup',
352 default=False,
352 default=False,
353 )
353 )
354 coreconfigitem('devel', 'legacy.exchange',
354 coreconfigitem('devel', 'legacy.exchange',
355 default=list,
355 default=list,
356 )
356 )
357 coreconfigitem('devel', 'servercafile',
357 coreconfigitem('devel', 'servercafile',
358 default='',
358 default='',
359 )
359 )
360 coreconfigitem('devel', 'serverexactprotocol',
360 coreconfigitem('devel', 'serverexactprotocol',
361 default='',
361 default='',
362 )
362 )
363 coreconfigitem('devel', 'serverrequirecert',
363 coreconfigitem('devel', 'serverrequirecert',
364 default=False,
364 default=False,
365 )
365 )
366 coreconfigitem('devel', 'strip-obsmarkers',
366 coreconfigitem('devel', 'strip-obsmarkers',
367 default=True,
367 default=True,
368 )
368 )
369 coreconfigitem('devel', 'warn-config',
369 coreconfigitem('devel', 'warn-config',
370 default=None,
370 default=None,
371 )
371 )
372 coreconfigitem('devel', 'warn-config-default',
372 coreconfigitem('devel', 'warn-config-default',
373 default=None,
373 default=None,
374 )
374 )
375 coreconfigitem('devel', 'user.obsmarker',
375 coreconfigitem('devel', 'user.obsmarker',
376 default=None,
376 default=None,
377 )
377 )
378 coreconfigitem('devel', 'warn-config-unknown',
378 coreconfigitem('devel', 'warn-config-unknown',
379 default=None,
379 default=None,
380 )
380 )
381 coreconfigitem('devel', 'debug.extensions',
381 coreconfigitem('devel', 'debug.extensions',
382 default=False,
382 default=False,
383 )
383 )
384 coreconfigitem('devel', 'debug.peer-request',
384 coreconfigitem('devel', 'debug.peer-request',
385 default=False,
385 default=False,
386 )
386 )
387 coreconfigitem('diff', 'nodates',
387 coreconfigitem('diff', 'nodates',
388 default=False,
388 default=False,
389 )
389 )
390 coreconfigitem('diff', 'showfunc',
390 coreconfigitem('diff', 'showfunc',
391 default=False,
391 default=False,
392 )
392 )
393 coreconfigitem('diff', 'unified',
393 coreconfigitem('diff', 'unified',
394 default=None,
394 default=None,
395 )
395 )
396 coreconfigitem('diff', 'git',
396 coreconfigitem('diff', 'git',
397 default=False,
397 default=False,
398 )
398 )
399 coreconfigitem('diff', 'ignorews',
399 coreconfigitem('diff', 'ignorews',
400 default=False,
400 default=False,
401 )
401 )
402 coreconfigitem('diff', 'ignorewsamount',
402 coreconfigitem('diff', 'ignorewsamount',
403 default=False,
403 default=False,
404 )
404 )
405 coreconfigitem('diff', 'ignoreblanklines',
405 coreconfigitem('diff', 'ignoreblanklines',
406 default=False,
406 default=False,
407 )
407 )
408 coreconfigitem('diff', 'ignorewseol',
408 coreconfigitem('diff', 'ignorewseol',
409 default=False,
409 default=False,
410 )
410 )
411 coreconfigitem('diff', 'nobinary',
411 coreconfigitem('diff', 'nobinary',
412 default=False,
412 default=False,
413 )
413 )
414 coreconfigitem('diff', 'noprefix',
414 coreconfigitem('diff', 'noprefix',
415 default=False,
415 default=False,
416 )
416 )
417 coreconfigitem('diff', 'word-diff',
417 coreconfigitem('diff', 'word-diff',
418 default=False,
418 default=False,
419 )
419 )
420 coreconfigitem('email', 'bcc',
420 coreconfigitem('email', 'bcc',
421 default=None,
421 default=None,
422 )
422 )
423 coreconfigitem('email', 'cc',
423 coreconfigitem('email', 'cc',
424 default=None,
424 default=None,
425 )
425 )
426 coreconfigitem('email', 'charsets',
426 coreconfigitem('email', 'charsets',
427 default=list,
427 default=list,
428 )
428 )
429 coreconfigitem('email', 'from',
429 coreconfigitem('email', 'from',
430 default=None,
430 default=None,
431 )
431 )
432 coreconfigitem('email', 'method',
432 coreconfigitem('email', 'method',
433 default='smtp',
433 default='smtp',
434 )
434 )
435 coreconfigitem('email', 'reply-to',
435 coreconfigitem('email', 'reply-to',
436 default=None,
436 default=None,
437 )
437 )
438 coreconfigitem('email', 'to',
438 coreconfigitem('email', 'to',
439 default=None,
439 default=None,
440 )
440 )
441 coreconfigitem('experimental', 'archivemetatemplate',
441 coreconfigitem('experimental', 'archivemetatemplate',
442 default=dynamicdefault,
442 default=dynamicdefault,
443 )
443 )
444 coreconfigitem('experimental', 'bundle-phases',
444 coreconfigitem('experimental', 'bundle-phases',
445 default=False,
445 default=False,
446 )
446 )
447 coreconfigitem('experimental', 'bundle2-advertise',
447 coreconfigitem('experimental', 'bundle2-advertise',
448 default=True,
448 default=True,
449 )
449 )
450 coreconfigitem('experimental', 'bundle2-output-capture',
450 coreconfigitem('experimental', 'bundle2-output-capture',
451 default=False,
451 default=False,
452 )
452 )
453 coreconfigitem('experimental', 'bundle2.pushback',
453 coreconfigitem('experimental', 'bundle2.pushback',
454 default=False,
454 default=False,
455 )
455 )
456 coreconfigitem('experimental', 'bundle2.stream',
456 coreconfigitem('experimental', 'bundle2.stream',
457 default=False,
457 default=False,
458 )
458 )
459 coreconfigitem('experimental', 'bundle2lazylocking',
459 coreconfigitem('experimental', 'bundle2lazylocking',
460 default=False,
460 default=False,
461 )
461 )
462 coreconfigitem('experimental', 'bundlecomplevel',
462 coreconfigitem('experimental', 'bundlecomplevel',
463 default=None,
463 default=None,
464 )
464 )
465 coreconfigitem('experimental', 'bundlecomplevel.bzip2',
465 coreconfigitem('experimental', 'bundlecomplevel.bzip2',
466 default=None,
466 default=None,
467 )
467 )
468 coreconfigitem('experimental', 'bundlecomplevel.gzip',
468 coreconfigitem('experimental', 'bundlecomplevel.gzip',
469 default=None,
469 default=None,
470 )
470 )
471 coreconfigitem('experimental', 'bundlecomplevel.none',
471 coreconfigitem('experimental', 'bundlecomplevel.none',
472 default=None,
472 default=None,
473 )
473 )
474 coreconfigitem('experimental', 'bundlecomplevel.zstd',
474 coreconfigitem('experimental', 'bundlecomplevel.zstd',
475 default=None,
475 default=None,
476 )
476 )
477 coreconfigitem('experimental', 'changegroup3',
477 coreconfigitem('experimental', 'changegroup3',
478 default=False,
478 default=False,
479 )
479 )
480 coreconfigitem('experimental', 'clientcompressionengines',
480 coreconfigitem('experimental', 'clientcompressionengines',
481 default=list,
481 default=list,
482 )
482 )
483 coreconfigitem('experimental', 'copytrace',
483 coreconfigitem('experimental', 'copytrace',
484 default='on',
484 default='on',
485 )
485 )
486 coreconfigitem('experimental', 'copytrace.movecandidateslimit',
486 coreconfigitem('experimental', 'copytrace.movecandidateslimit',
487 default=100,
487 default=100,
488 )
488 )
489 coreconfigitem('experimental', 'copytrace.sourcecommitlimit',
489 coreconfigitem('experimental', 'copytrace.sourcecommitlimit',
490 default=100,
490 default=100,
491 )
491 )
492 coreconfigitem('experimental', 'crecordtest',
492 coreconfigitem('experimental', 'crecordtest',
493 default=None,
493 default=None,
494 )
494 )
495 coreconfigitem('experimental', 'directaccess',
495 coreconfigitem('experimental', 'directaccess',
496 default=False,
496 default=False,
497 )
497 )
498 coreconfigitem('experimental', 'directaccess.revnums',
498 coreconfigitem('experimental', 'directaccess.revnums',
499 default=False,
499 default=False,
500 )
500 )
501 coreconfigitem('experimental', 'editortmpinhg',
501 coreconfigitem('experimental', 'editortmpinhg',
502 default=False,
502 default=False,
503 )
503 )
504 coreconfigitem('experimental', 'evolution',
504 coreconfigitem('experimental', 'evolution',
505 default=list,
505 default=list,
506 )
506 )
507 coreconfigitem('experimental', 'evolution.allowdivergence',
507 coreconfigitem('experimental', 'evolution.allowdivergence',
508 default=False,
508 default=False,
509 alias=[('experimental', 'allowdivergence')]
509 alias=[('experimental', 'allowdivergence')]
510 )
510 )
511 coreconfigitem('experimental', 'evolution.allowunstable',
511 coreconfigitem('experimental', 'evolution.allowunstable',
512 default=None,
512 default=None,
513 )
513 )
514 coreconfigitem('experimental', 'evolution.createmarkers',
514 coreconfigitem('experimental', 'evolution.createmarkers',
515 default=None,
515 default=None,
516 )
516 )
517 coreconfigitem('experimental', 'evolution.effect-flags',
517 coreconfigitem('experimental', 'evolution.effect-flags',
518 default=True,
518 default=True,
519 alias=[('experimental', 'effect-flags')]
519 alias=[('experimental', 'effect-flags')]
520 )
520 )
521 coreconfigitem('experimental', 'evolution.exchange',
521 coreconfigitem('experimental', 'evolution.exchange',
522 default=None,
522 default=None,
523 )
523 )
524 coreconfigitem('experimental', 'evolution.bundle-obsmarker',
524 coreconfigitem('experimental', 'evolution.bundle-obsmarker',
525 default=False,
525 default=False,
526 )
526 )
527 coreconfigitem('experimental', 'evolution.report-instabilities',
527 coreconfigitem('experimental', 'evolution.report-instabilities',
528 default=True,
528 default=True,
529 )
529 )
530 coreconfigitem('experimental', 'evolution.track-operation',
530 coreconfigitem('experimental', 'evolution.track-operation',
531 default=True,
531 default=True,
532 )
532 )
533 coreconfigitem('experimental', 'maxdeltachainspan',
533 coreconfigitem('experimental', 'maxdeltachainspan',
534 default=-1,
534 default=-1,
535 )
535 )
536 coreconfigitem('experimental', 'mergetempdirprefix',
536 coreconfigitem('experimental', 'mergetempdirprefix',
537 default=None,
537 default=None,
538 )
538 )
539 coreconfigitem('experimental', 'mmapindexthreshold',
539 coreconfigitem('experimental', 'mmapindexthreshold',
540 default=None,
540 default=None,
541 )
541 )
542 coreconfigitem('experimental', 'nonnormalparanoidcheck',
542 coreconfigitem('experimental', 'nonnormalparanoidcheck',
543 default=False,
543 default=False,
544 )
544 )
545 coreconfigitem('experimental', 'exportableenviron',
545 coreconfigitem('experimental', 'exportableenviron',
546 default=list,
546 default=list,
547 )
547 )
548 coreconfigitem('experimental', 'extendedheader.index',
548 coreconfigitem('experimental', 'extendedheader.index',
549 default=None,
549 default=None,
550 )
550 )
551 coreconfigitem('experimental', 'extendedheader.similarity',
551 coreconfigitem('experimental', 'extendedheader.similarity',
552 default=False,
552 default=False,
553 )
553 )
554 coreconfigitem('experimental', 'format.compression',
554 coreconfigitem('experimental', 'format.compression',
555 default='zlib',
555 default='zlib',
556 )
556 )
557 coreconfigitem('experimental', 'graphshorten',
557 coreconfigitem('experimental', 'graphshorten',
558 default=False,
558 default=False,
559 )
559 )
560 coreconfigitem('experimental', 'graphstyle.parent',
560 coreconfigitem('experimental', 'graphstyle.parent',
561 default=dynamicdefault,
561 default=dynamicdefault,
562 )
562 )
563 coreconfigitem('experimental', 'graphstyle.missing',
563 coreconfigitem('experimental', 'graphstyle.missing',
564 default=dynamicdefault,
564 default=dynamicdefault,
565 )
565 )
566 coreconfigitem('experimental', 'graphstyle.grandparent',
566 coreconfigitem('experimental', 'graphstyle.grandparent',
567 default=dynamicdefault,
567 default=dynamicdefault,
568 )
568 )
569 coreconfigitem('experimental', 'hook-track-tags',
569 coreconfigitem('experimental', 'hook-track-tags',
570 default=False,
570 default=False,
571 )
571 )
572 coreconfigitem('experimental', 'httppeer.advertise-v2',
572 coreconfigitem('experimental', 'httppeer.advertise-v2',
573 default=False,
573 default=False,
574 )
574 )
575 coreconfigitem('experimental', 'httppostargs',
575 coreconfigitem('experimental', 'httppostargs',
576 default=False,
576 default=False,
577 )
577 )
578 coreconfigitem('experimental', 'mergedriver',
578 coreconfigitem('experimental', 'mergedriver',
579 default=None,
579 default=None,
580 )
580 )
581 coreconfigitem('experimental', 'nointerrupt', default=False)
581 coreconfigitem('experimental', 'nointerrupt', default=False)
582 coreconfigitem('experimental', 'nointerrupt-interactiveonly', default=True)
582 coreconfigitem('experimental', 'nointerrupt-interactiveonly', default=True)
583
583
584 coreconfigitem('experimental', 'obsmarkers-exchange-debug',
584 coreconfigitem('experimental', 'obsmarkers-exchange-debug',
585 default=False,
585 default=False,
586 )
586 )
587 coreconfigitem('experimental', 'remotenames',
587 coreconfigitem('experimental', 'remotenames',
588 default=False,
588 default=False,
589 )
589 )
590 coreconfigitem('experimental', 'removeemptydirs',
590 coreconfigitem('experimental', 'removeemptydirs',
591 default=True,
591 default=True,
592 )
592 )
593 coreconfigitem('experimental', 'revisions.prefixhexnode',
593 coreconfigitem('experimental', 'revisions.prefixhexnode',
594 default=False,
594 default=False,
595 )
595 )
596 coreconfigitem('experimental', 'revlogv2',
596 coreconfigitem('experimental', 'revlogv2',
597 default=None,
597 default=None,
598 )
598 )
599 coreconfigitem('experimental', 'revisions.disambiguatewithin',
599 coreconfigitem('experimental', 'revisions.disambiguatewithin',
600 default=None,
600 default=None,
601 )
601 )
602 coreconfigitem('experimental', 'single-head-per-branch',
602 coreconfigitem('experimental', 'single-head-per-branch',
603 default=False,
603 default=False,
604 )
604 )
605 coreconfigitem('experimental', 'sshserver.support-v2',
605 coreconfigitem('experimental', 'sshserver.support-v2',
606 default=False,
606 default=False,
607 )
607 )
608 coreconfigitem('experimental', 'spacemovesdown',
608 coreconfigitem('experimental', 'spacemovesdown',
609 default=False,
609 default=False,
610 )
610 )
611 coreconfigitem('experimental', 'sparse-read',
611 coreconfigitem('experimental', 'sparse-read',
612 default=False,
612 default=False,
613 )
613 )
614 coreconfigitem('experimental', 'sparse-read.density-threshold',
614 coreconfigitem('experimental', 'sparse-read.density-threshold',
615 default=0.50,
615 default=0.50,
616 )
616 )
617 coreconfigitem('experimental', 'sparse-read.min-gap-size',
617 coreconfigitem('experimental', 'sparse-read.min-gap-size',
618 default='65K',
618 default='65K',
619 )
619 )
620 coreconfigitem('experimental', 'treemanifest',
620 coreconfigitem('experimental', 'treemanifest',
621 default=False,
621 default=False,
622 )
622 )
623 coreconfigitem('experimental', 'update.atomic-file',
623 coreconfigitem('experimental', 'update.atomic-file',
624 default=False,
624 default=False,
625 )
625 )
626 coreconfigitem('experimental', 'sshpeer.advertise-v2',
626 coreconfigitem('experimental', 'sshpeer.advertise-v2',
627 default=False,
627 default=False,
628 )
628 )
629 coreconfigitem('experimental', 'web.apiserver',
629 coreconfigitem('experimental', 'web.apiserver',
630 default=False,
630 default=False,
631 )
631 )
632 coreconfigitem('experimental', 'web.api.http-v2',
632 coreconfigitem('experimental', 'web.api.http-v2',
633 default=False,
633 default=False,
634 )
634 )
635 coreconfigitem('experimental', 'web.api.debugreflect',
635 coreconfigitem('experimental', 'web.api.debugreflect',
636 default=False,
636 default=False,
637 )
637 )
638 coreconfigitem('experimental', 'worker.wdir-get-thread-safe',
638 coreconfigitem('experimental', 'worker.wdir-get-thread-safe',
639 default=False,
639 default=False,
640 )
640 )
641 coreconfigitem('experimental', 'xdiff',
641 coreconfigitem('experimental', 'xdiff',
642 default=False,
642 default=False,
643 )
643 )
644 coreconfigitem('extensions', '.*',
644 coreconfigitem('extensions', '.*',
645 default=None,
645 default=None,
646 generic=True,
646 generic=True,
647 )
647 )
648 coreconfigitem('extdata', '.*',
648 coreconfigitem('extdata', '.*',
649 default=None,
649 default=None,
650 generic=True,
650 generic=True,
651 )
651 )
652 coreconfigitem('format', 'chunkcachesize',
652 coreconfigitem('format', 'chunkcachesize',
653 default=None,
653 default=None,
654 )
654 )
655 coreconfigitem('format', 'dotencode',
655 coreconfigitem('format', 'dotencode',
656 default=True,
656 default=True,
657 )
657 )
658 coreconfigitem('format', 'generaldelta',
658 coreconfigitem('format', 'generaldelta',
659 default=False,
659 default=False,
660 )
660 )
661 coreconfigitem('format', 'manifestcachesize',
661 coreconfigitem('format', 'manifestcachesize',
662 default=None,
662 default=None,
663 )
663 )
664 coreconfigitem('format', 'maxchainlen',
664 coreconfigitem('format', 'maxchainlen',
665 default=None,
665 default=None,
666 )
666 )
667 coreconfigitem('format', 'obsstore-version',
667 coreconfigitem('format', 'obsstore-version',
668 default=None,
668 default=None,
669 )
669 )
670 coreconfigitem('format', 'sparse-revlog',
670 coreconfigitem('format', 'sparse-revlog',
671 default=False,
671 default=False,
672 )
672 )
673 coreconfigitem('format', 'usefncache',
673 coreconfigitem('format', 'usefncache',
674 default=True,
674 default=True,
675 )
675 )
676 coreconfigitem('format', 'usegeneraldelta',
676 coreconfigitem('format', 'usegeneraldelta',
677 default=True,
677 default=True,
678 )
678 )
679 coreconfigitem('format', 'usestore',
679 coreconfigitem('format', 'usestore',
680 default=True,
680 default=True,
681 )
681 )
682 coreconfigitem('format', 'internal-phase',
683 default=False,
684 )
682 coreconfigitem('fsmonitor', 'warn_when_unused',
685 coreconfigitem('fsmonitor', 'warn_when_unused',
683 default=True,
686 default=True,
684 )
687 )
685 coreconfigitem('fsmonitor', 'warn_update_file_count',
688 coreconfigitem('fsmonitor', 'warn_update_file_count',
686 default=50000,
689 default=50000,
687 )
690 )
688 coreconfigitem('hooks', '.*',
691 coreconfigitem('hooks', '.*',
689 default=dynamicdefault,
692 default=dynamicdefault,
690 generic=True,
693 generic=True,
691 )
694 )
692 coreconfigitem('hgweb-paths', '.*',
695 coreconfigitem('hgweb-paths', '.*',
693 default=list,
696 default=list,
694 generic=True,
697 generic=True,
695 )
698 )
696 coreconfigitem('hostfingerprints', '.*',
699 coreconfigitem('hostfingerprints', '.*',
697 default=list,
700 default=list,
698 generic=True,
701 generic=True,
699 )
702 )
700 coreconfigitem('hostsecurity', 'ciphers',
703 coreconfigitem('hostsecurity', 'ciphers',
701 default=None,
704 default=None,
702 )
705 )
703 coreconfigitem('hostsecurity', 'disabletls10warning',
706 coreconfigitem('hostsecurity', 'disabletls10warning',
704 default=False,
707 default=False,
705 )
708 )
706 coreconfigitem('hostsecurity', 'minimumprotocol',
709 coreconfigitem('hostsecurity', 'minimumprotocol',
707 default=dynamicdefault,
710 default=dynamicdefault,
708 )
711 )
709 coreconfigitem('hostsecurity', '.*:minimumprotocol$',
712 coreconfigitem('hostsecurity', '.*:minimumprotocol$',
710 default=dynamicdefault,
713 default=dynamicdefault,
711 generic=True,
714 generic=True,
712 )
715 )
713 coreconfigitem('hostsecurity', '.*:ciphers$',
716 coreconfigitem('hostsecurity', '.*:ciphers$',
714 default=dynamicdefault,
717 default=dynamicdefault,
715 generic=True,
718 generic=True,
716 )
719 )
717 coreconfigitem('hostsecurity', '.*:fingerprints$',
720 coreconfigitem('hostsecurity', '.*:fingerprints$',
718 default=list,
721 default=list,
719 generic=True,
722 generic=True,
720 )
723 )
721 coreconfigitem('hostsecurity', '.*:verifycertsfile$',
724 coreconfigitem('hostsecurity', '.*:verifycertsfile$',
722 default=None,
725 default=None,
723 generic=True,
726 generic=True,
724 )
727 )
725
728
726 coreconfigitem('http_proxy', 'always',
729 coreconfigitem('http_proxy', 'always',
727 default=False,
730 default=False,
728 )
731 )
729 coreconfigitem('http_proxy', 'host',
732 coreconfigitem('http_proxy', 'host',
730 default=None,
733 default=None,
731 )
734 )
732 coreconfigitem('http_proxy', 'no',
735 coreconfigitem('http_proxy', 'no',
733 default=list,
736 default=list,
734 )
737 )
735 coreconfigitem('http_proxy', 'passwd',
738 coreconfigitem('http_proxy', 'passwd',
736 default=None,
739 default=None,
737 )
740 )
738 coreconfigitem('http_proxy', 'user',
741 coreconfigitem('http_proxy', 'user',
739 default=None,
742 default=None,
740 )
743 )
741 coreconfigitem('logtoprocess', 'commandexception',
744 coreconfigitem('logtoprocess', 'commandexception',
742 default=None,
745 default=None,
743 )
746 )
744 coreconfigitem('logtoprocess', 'commandfinish',
747 coreconfigitem('logtoprocess', 'commandfinish',
745 default=None,
748 default=None,
746 )
749 )
747 coreconfigitem('logtoprocess', 'command',
750 coreconfigitem('logtoprocess', 'command',
748 default=None,
751 default=None,
749 )
752 )
750 coreconfigitem('logtoprocess', 'develwarn',
753 coreconfigitem('logtoprocess', 'develwarn',
751 default=None,
754 default=None,
752 )
755 )
753 coreconfigitem('logtoprocess', 'uiblocked',
756 coreconfigitem('logtoprocess', 'uiblocked',
754 default=None,
757 default=None,
755 )
758 )
756 coreconfigitem('merge', 'checkunknown',
759 coreconfigitem('merge', 'checkunknown',
757 default='abort',
760 default='abort',
758 )
761 )
759 coreconfigitem('merge', 'checkignored',
762 coreconfigitem('merge', 'checkignored',
760 default='abort',
763 default='abort',
761 )
764 )
762 coreconfigitem('experimental', 'merge.checkpathconflicts',
765 coreconfigitem('experimental', 'merge.checkpathconflicts',
763 default=False,
766 default=False,
764 )
767 )
765 coreconfigitem('merge', 'followcopies',
768 coreconfigitem('merge', 'followcopies',
766 default=True,
769 default=True,
767 )
770 )
768 coreconfigitem('merge', 'on-failure',
771 coreconfigitem('merge', 'on-failure',
769 default='continue',
772 default='continue',
770 )
773 )
771 coreconfigitem('merge', 'preferancestor',
774 coreconfigitem('merge', 'preferancestor',
772 default=lambda: ['*'],
775 default=lambda: ['*'],
773 )
776 )
774 coreconfigitem('merge', 'strict-capability-check',
777 coreconfigitem('merge', 'strict-capability-check',
775 default=False,
778 default=False,
776 )
779 )
777 coreconfigitem('merge-tools', '.*',
780 coreconfigitem('merge-tools', '.*',
778 default=None,
781 default=None,
779 generic=True,
782 generic=True,
780 )
783 )
781 coreconfigitem('merge-tools', br'.*\.args$',
784 coreconfigitem('merge-tools', br'.*\.args$',
782 default="$local $base $other",
785 default="$local $base $other",
783 generic=True,
786 generic=True,
784 priority=-1,
787 priority=-1,
785 )
788 )
786 coreconfigitem('merge-tools', br'.*\.binary$',
789 coreconfigitem('merge-tools', br'.*\.binary$',
787 default=False,
790 default=False,
788 generic=True,
791 generic=True,
789 priority=-1,
792 priority=-1,
790 )
793 )
791 coreconfigitem('merge-tools', br'.*\.check$',
794 coreconfigitem('merge-tools', br'.*\.check$',
792 default=list,
795 default=list,
793 generic=True,
796 generic=True,
794 priority=-1,
797 priority=-1,
795 )
798 )
796 coreconfigitem('merge-tools', br'.*\.checkchanged$',
799 coreconfigitem('merge-tools', br'.*\.checkchanged$',
797 default=False,
800 default=False,
798 generic=True,
801 generic=True,
799 priority=-1,
802 priority=-1,
800 )
803 )
801 coreconfigitem('merge-tools', br'.*\.executable$',
804 coreconfigitem('merge-tools', br'.*\.executable$',
802 default=dynamicdefault,
805 default=dynamicdefault,
803 generic=True,
806 generic=True,
804 priority=-1,
807 priority=-1,
805 )
808 )
806 coreconfigitem('merge-tools', br'.*\.fixeol$',
809 coreconfigitem('merge-tools', br'.*\.fixeol$',
807 default=False,
810 default=False,
808 generic=True,
811 generic=True,
809 priority=-1,
812 priority=-1,
810 )
813 )
811 coreconfigitem('merge-tools', br'.*\.gui$',
814 coreconfigitem('merge-tools', br'.*\.gui$',
812 default=False,
815 default=False,
813 generic=True,
816 generic=True,
814 priority=-1,
817 priority=-1,
815 )
818 )
816 coreconfigitem('merge-tools', br'.*\.mergemarkers$',
819 coreconfigitem('merge-tools', br'.*\.mergemarkers$',
817 default='basic',
820 default='basic',
818 generic=True,
821 generic=True,
819 priority=-1,
822 priority=-1,
820 )
823 )
821 coreconfigitem('merge-tools', br'.*\.mergemarkertemplate$',
824 coreconfigitem('merge-tools', br'.*\.mergemarkertemplate$',
822 default=dynamicdefault, # take from ui.mergemarkertemplate
825 default=dynamicdefault, # take from ui.mergemarkertemplate
823 generic=True,
826 generic=True,
824 priority=-1,
827 priority=-1,
825 )
828 )
826 coreconfigitem('merge-tools', br'.*\.priority$',
829 coreconfigitem('merge-tools', br'.*\.priority$',
827 default=0,
830 default=0,
828 generic=True,
831 generic=True,
829 priority=-1,
832 priority=-1,
830 )
833 )
831 coreconfigitem('merge-tools', br'.*\.premerge$',
834 coreconfigitem('merge-tools', br'.*\.premerge$',
832 default=dynamicdefault,
835 default=dynamicdefault,
833 generic=True,
836 generic=True,
834 priority=-1,
837 priority=-1,
835 )
838 )
836 coreconfigitem('merge-tools', br'.*\.symlink$',
839 coreconfigitem('merge-tools', br'.*\.symlink$',
837 default=False,
840 default=False,
838 generic=True,
841 generic=True,
839 priority=-1,
842 priority=-1,
840 )
843 )
841 coreconfigitem('pager', 'attend-.*',
844 coreconfigitem('pager', 'attend-.*',
842 default=dynamicdefault,
845 default=dynamicdefault,
843 generic=True,
846 generic=True,
844 )
847 )
845 coreconfigitem('pager', 'ignore',
848 coreconfigitem('pager', 'ignore',
846 default=list,
849 default=list,
847 )
850 )
848 coreconfigitem('pager', 'pager',
851 coreconfigitem('pager', 'pager',
849 default=dynamicdefault,
852 default=dynamicdefault,
850 )
853 )
851 coreconfigitem('patch', 'eol',
854 coreconfigitem('patch', 'eol',
852 default='strict',
855 default='strict',
853 )
856 )
854 coreconfigitem('patch', 'fuzz',
857 coreconfigitem('patch', 'fuzz',
855 default=2,
858 default=2,
856 )
859 )
857 coreconfigitem('paths', 'default',
860 coreconfigitem('paths', 'default',
858 default=None,
861 default=None,
859 )
862 )
860 coreconfigitem('paths', 'default-push',
863 coreconfigitem('paths', 'default-push',
861 default=None,
864 default=None,
862 )
865 )
863 coreconfigitem('paths', '.*',
866 coreconfigitem('paths', '.*',
864 default=None,
867 default=None,
865 generic=True,
868 generic=True,
866 )
869 )
867 coreconfigitem('phases', 'checksubrepos',
870 coreconfigitem('phases', 'checksubrepos',
868 default='follow',
871 default='follow',
869 )
872 )
870 coreconfigitem('phases', 'new-commit',
873 coreconfigitem('phases', 'new-commit',
871 default='draft',
874 default='draft',
872 )
875 )
873 coreconfigitem('phases', 'publish',
876 coreconfigitem('phases', 'publish',
874 default=True,
877 default=True,
875 )
878 )
876 coreconfigitem('profiling', 'enabled',
879 coreconfigitem('profiling', 'enabled',
877 default=False,
880 default=False,
878 )
881 )
879 coreconfigitem('profiling', 'format',
882 coreconfigitem('profiling', 'format',
880 default='text',
883 default='text',
881 )
884 )
882 coreconfigitem('profiling', 'freq',
885 coreconfigitem('profiling', 'freq',
883 default=1000,
886 default=1000,
884 )
887 )
885 coreconfigitem('profiling', 'limit',
888 coreconfigitem('profiling', 'limit',
886 default=30,
889 default=30,
887 )
890 )
888 coreconfigitem('profiling', 'nested',
891 coreconfigitem('profiling', 'nested',
889 default=0,
892 default=0,
890 )
893 )
891 coreconfigitem('profiling', 'output',
894 coreconfigitem('profiling', 'output',
892 default=None,
895 default=None,
893 )
896 )
894 coreconfigitem('profiling', 'showmax',
897 coreconfigitem('profiling', 'showmax',
895 default=0.999,
898 default=0.999,
896 )
899 )
897 coreconfigitem('profiling', 'showmin',
900 coreconfigitem('profiling', 'showmin',
898 default=dynamicdefault,
901 default=dynamicdefault,
899 )
902 )
900 coreconfigitem('profiling', 'sort',
903 coreconfigitem('profiling', 'sort',
901 default='inlinetime',
904 default='inlinetime',
902 )
905 )
903 coreconfigitem('profiling', 'statformat',
906 coreconfigitem('profiling', 'statformat',
904 default='hotpath',
907 default='hotpath',
905 )
908 )
906 coreconfigitem('profiling', 'time-track',
909 coreconfigitem('profiling', 'time-track',
907 default='cpu',
910 default='cpu',
908 )
911 )
909 coreconfigitem('profiling', 'type',
912 coreconfigitem('profiling', 'type',
910 default='stat',
913 default='stat',
911 )
914 )
912 coreconfigitem('progress', 'assume-tty',
915 coreconfigitem('progress', 'assume-tty',
913 default=False,
916 default=False,
914 )
917 )
915 coreconfigitem('progress', 'changedelay',
918 coreconfigitem('progress', 'changedelay',
916 default=1,
919 default=1,
917 )
920 )
918 coreconfigitem('progress', 'clear-complete',
921 coreconfigitem('progress', 'clear-complete',
919 default=True,
922 default=True,
920 )
923 )
921 coreconfigitem('progress', 'debug',
924 coreconfigitem('progress', 'debug',
922 default=False,
925 default=False,
923 )
926 )
924 coreconfigitem('progress', 'delay',
927 coreconfigitem('progress', 'delay',
925 default=3,
928 default=3,
926 )
929 )
927 coreconfigitem('progress', 'disable',
930 coreconfigitem('progress', 'disable',
928 default=False,
931 default=False,
929 )
932 )
930 coreconfigitem('progress', 'estimateinterval',
933 coreconfigitem('progress', 'estimateinterval',
931 default=60.0,
934 default=60.0,
932 )
935 )
933 coreconfigitem('progress', 'format',
936 coreconfigitem('progress', 'format',
934 default=lambda: ['topic', 'bar', 'number', 'estimate'],
937 default=lambda: ['topic', 'bar', 'number', 'estimate'],
935 )
938 )
936 coreconfigitem('progress', 'refresh',
939 coreconfigitem('progress', 'refresh',
937 default=0.1,
940 default=0.1,
938 )
941 )
939 coreconfigitem('progress', 'width',
942 coreconfigitem('progress', 'width',
940 default=dynamicdefault,
943 default=dynamicdefault,
941 )
944 )
942 coreconfigitem('push', 'pushvars.server',
945 coreconfigitem('push', 'pushvars.server',
943 default=False,
946 default=False,
944 )
947 )
945 coreconfigitem('storage', 'revlog.optimize-delta-parent-choice',
948 coreconfigitem('storage', 'revlog.optimize-delta-parent-choice',
946 default=True,
949 default=True,
947 alias=[('format', 'aggressivemergedeltas')],
950 alias=[('format', 'aggressivemergedeltas')],
948 )
951 )
949 coreconfigitem('server', 'bookmarks-pushkey-compat',
952 coreconfigitem('server', 'bookmarks-pushkey-compat',
950 default=True,
953 default=True,
951 )
954 )
952 coreconfigitem('server', 'bundle1',
955 coreconfigitem('server', 'bundle1',
953 default=True,
956 default=True,
954 )
957 )
955 coreconfigitem('server', 'bundle1gd',
958 coreconfigitem('server', 'bundle1gd',
956 default=None,
959 default=None,
957 )
960 )
958 coreconfigitem('server', 'bundle1.pull',
961 coreconfigitem('server', 'bundle1.pull',
959 default=None,
962 default=None,
960 )
963 )
961 coreconfigitem('server', 'bundle1gd.pull',
964 coreconfigitem('server', 'bundle1gd.pull',
962 default=None,
965 default=None,
963 )
966 )
964 coreconfigitem('server', 'bundle1.push',
967 coreconfigitem('server', 'bundle1.push',
965 default=None,
968 default=None,
966 )
969 )
967 coreconfigitem('server', 'bundle1gd.push',
970 coreconfigitem('server', 'bundle1gd.push',
968 default=None,
971 default=None,
969 )
972 )
970 coreconfigitem('server', 'compressionengines',
973 coreconfigitem('server', 'compressionengines',
971 default=list,
974 default=list,
972 )
975 )
973 coreconfigitem('server', 'concurrent-push-mode',
976 coreconfigitem('server', 'concurrent-push-mode',
974 default='strict',
977 default='strict',
975 )
978 )
976 coreconfigitem('server', 'disablefullbundle',
979 coreconfigitem('server', 'disablefullbundle',
977 default=False,
980 default=False,
978 )
981 )
979 coreconfigitem('server', 'maxhttpheaderlen',
982 coreconfigitem('server', 'maxhttpheaderlen',
980 default=1024,
983 default=1024,
981 )
984 )
982 coreconfigitem('server', 'pullbundle',
985 coreconfigitem('server', 'pullbundle',
983 default=False,
986 default=False,
984 )
987 )
985 coreconfigitem('server', 'preferuncompressed',
988 coreconfigitem('server', 'preferuncompressed',
986 default=False,
989 default=False,
987 )
990 )
988 coreconfigitem('server', 'streamunbundle',
991 coreconfigitem('server', 'streamunbundle',
989 default=False,
992 default=False,
990 )
993 )
991 coreconfigitem('server', 'uncompressed',
994 coreconfigitem('server', 'uncompressed',
992 default=True,
995 default=True,
993 )
996 )
994 coreconfigitem('server', 'uncompressedallowsecret',
997 coreconfigitem('server', 'uncompressedallowsecret',
995 default=False,
998 default=False,
996 )
999 )
997 coreconfigitem('server', 'validate',
1000 coreconfigitem('server', 'validate',
998 default=False,
1001 default=False,
999 )
1002 )
1000 coreconfigitem('server', 'zliblevel',
1003 coreconfigitem('server', 'zliblevel',
1001 default=-1,
1004 default=-1,
1002 )
1005 )
1003 coreconfigitem('server', 'zstdlevel',
1006 coreconfigitem('server', 'zstdlevel',
1004 default=3,
1007 default=3,
1005 )
1008 )
1006 coreconfigitem('share', 'pool',
1009 coreconfigitem('share', 'pool',
1007 default=None,
1010 default=None,
1008 )
1011 )
1009 coreconfigitem('share', 'poolnaming',
1012 coreconfigitem('share', 'poolnaming',
1010 default='identity',
1013 default='identity',
1011 )
1014 )
1012 coreconfigitem('smtp', 'host',
1015 coreconfigitem('smtp', 'host',
1013 default=None,
1016 default=None,
1014 )
1017 )
1015 coreconfigitem('smtp', 'local_hostname',
1018 coreconfigitem('smtp', 'local_hostname',
1016 default=None,
1019 default=None,
1017 )
1020 )
1018 coreconfigitem('smtp', 'password',
1021 coreconfigitem('smtp', 'password',
1019 default=None,
1022 default=None,
1020 )
1023 )
1021 coreconfigitem('smtp', 'port',
1024 coreconfigitem('smtp', 'port',
1022 default=dynamicdefault,
1025 default=dynamicdefault,
1023 )
1026 )
1024 coreconfigitem('smtp', 'tls',
1027 coreconfigitem('smtp', 'tls',
1025 default='none',
1028 default='none',
1026 )
1029 )
1027 coreconfigitem('smtp', 'username',
1030 coreconfigitem('smtp', 'username',
1028 default=None,
1031 default=None,
1029 )
1032 )
1030 coreconfigitem('sparse', 'missingwarning',
1033 coreconfigitem('sparse', 'missingwarning',
1031 default=True,
1034 default=True,
1032 )
1035 )
1033 coreconfigitem('subrepos', 'allowed',
1036 coreconfigitem('subrepos', 'allowed',
1034 default=dynamicdefault, # to make backporting simpler
1037 default=dynamicdefault, # to make backporting simpler
1035 )
1038 )
1036 coreconfigitem('subrepos', 'hg:allowed',
1039 coreconfigitem('subrepos', 'hg:allowed',
1037 default=dynamicdefault,
1040 default=dynamicdefault,
1038 )
1041 )
1039 coreconfigitem('subrepos', 'git:allowed',
1042 coreconfigitem('subrepos', 'git:allowed',
1040 default=dynamicdefault,
1043 default=dynamicdefault,
1041 )
1044 )
1042 coreconfigitem('subrepos', 'svn:allowed',
1045 coreconfigitem('subrepos', 'svn:allowed',
1043 default=dynamicdefault,
1046 default=dynamicdefault,
1044 )
1047 )
1045 coreconfigitem('templates', '.*',
1048 coreconfigitem('templates', '.*',
1046 default=None,
1049 default=None,
1047 generic=True,
1050 generic=True,
1048 )
1051 )
1049 coreconfigitem('trusted', 'groups',
1052 coreconfigitem('trusted', 'groups',
1050 default=list,
1053 default=list,
1051 )
1054 )
1052 coreconfigitem('trusted', 'users',
1055 coreconfigitem('trusted', 'users',
1053 default=list,
1056 default=list,
1054 )
1057 )
1055 coreconfigitem('ui', '_usedassubrepo',
1058 coreconfigitem('ui', '_usedassubrepo',
1056 default=False,
1059 default=False,
1057 )
1060 )
1058 coreconfigitem('ui', 'allowemptycommit',
1061 coreconfigitem('ui', 'allowemptycommit',
1059 default=False,
1062 default=False,
1060 )
1063 )
1061 coreconfigitem('ui', 'archivemeta',
1064 coreconfigitem('ui', 'archivemeta',
1062 default=True,
1065 default=True,
1063 )
1066 )
1064 coreconfigitem('ui', 'askusername',
1067 coreconfigitem('ui', 'askusername',
1065 default=False,
1068 default=False,
1066 )
1069 )
1067 coreconfigitem('ui', 'clonebundlefallback',
1070 coreconfigitem('ui', 'clonebundlefallback',
1068 default=False,
1071 default=False,
1069 )
1072 )
1070 coreconfigitem('ui', 'clonebundleprefers',
1073 coreconfigitem('ui', 'clonebundleprefers',
1071 default=list,
1074 default=list,
1072 )
1075 )
1073 coreconfigitem('ui', 'clonebundles',
1076 coreconfigitem('ui', 'clonebundles',
1074 default=True,
1077 default=True,
1075 )
1078 )
1076 coreconfigitem('ui', 'color',
1079 coreconfigitem('ui', 'color',
1077 default='auto',
1080 default='auto',
1078 )
1081 )
1079 coreconfigitem('ui', 'commitsubrepos',
1082 coreconfigitem('ui', 'commitsubrepos',
1080 default=False,
1083 default=False,
1081 )
1084 )
1082 coreconfigitem('ui', 'debug',
1085 coreconfigitem('ui', 'debug',
1083 default=False,
1086 default=False,
1084 )
1087 )
1085 coreconfigitem('ui', 'debugger',
1088 coreconfigitem('ui', 'debugger',
1086 default=None,
1089 default=None,
1087 )
1090 )
1088 coreconfigitem('ui', 'editor',
1091 coreconfigitem('ui', 'editor',
1089 default=dynamicdefault,
1092 default=dynamicdefault,
1090 )
1093 )
1091 coreconfigitem('ui', 'fallbackencoding',
1094 coreconfigitem('ui', 'fallbackencoding',
1092 default=None,
1095 default=None,
1093 )
1096 )
1094 coreconfigitem('ui', 'forcecwd',
1097 coreconfigitem('ui', 'forcecwd',
1095 default=None,
1098 default=None,
1096 )
1099 )
1097 coreconfigitem('ui', 'forcemerge',
1100 coreconfigitem('ui', 'forcemerge',
1098 default=None,
1101 default=None,
1099 )
1102 )
1100 coreconfigitem('ui', 'formatdebug',
1103 coreconfigitem('ui', 'formatdebug',
1101 default=False,
1104 default=False,
1102 )
1105 )
1103 coreconfigitem('ui', 'formatjson',
1106 coreconfigitem('ui', 'formatjson',
1104 default=False,
1107 default=False,
1105 )
1108 )
1106 coreconfigitem('ui', 'formatted',
1109 coreconfigitem('ui', 'formatted',
1107 default=None,
1110 default=None,
1108 )
1111 )
1109 coreconfigitem('ui', 'graphnodetemplate',
1112 coreconfigitem('ui', 'graphnodetemplate',
1110 default=None,
1113 default=None,
1111 )
1114 )
1112 coreconfigitem('ui', 'history-editing-backup',
1115 coreconfigitem('ui', 'history-editing-backup',
1113 default=True,
1116 default=True,
1114 )
1117 )
1115 coreconfigitem('ui', 'interactive',
1118 coreconfigitem('ui', 'interactive',
1116 default=None,
1119 default=None,
1117 )
1120 )
1118 coreconfigitem('ui', 'interface',
1121 coreconfigitem('ui', 'interface',
1119 default=None,
1122 default=None,
1120 )
1123 )
1121 coreconfigitem('ui', 'interface.chunkselector',
1124 coreconfigitem('ui', 'interface.chunkselector',
1122 default=None,
1125 default=None,
1123 )
1126 )
1124 coreconfigitem('ui', 'large-file-limit',
1127 coreconfigitem('ui', 'large-file-limit',
1125 default=10000000,
1128 default=10000000,
1126 )
1129 )
1127 coreconfigitem('ui', 'logblockedtimes',
1130 coreconfigitem('ui', 'logblockedtimes',
1128 default=False,
1131 default=False,
1129 )
1132 )
1130 coreconfigitem('ui', 'logtemplate',
1133 coreconfigitem('ui', 'logtemplate',
1131 default=None,
1134 default=None,
1132 )
1135 )
1133 coreconfigitem('ui', 'merge',
1136 coreconfigitem('ui', 'merge',
1134 default=None,
1137 default=None,
1135 )
1138 )
1136 coreconfigitem('ui', 'mergemarkers',
1139 coreconfigitem('ui', 'mergemarkers',
1137 default='basic',
1140 default='basic',
1138 )
1141 )
1139 coreconfigitem('ui', 'mergemarkertemplate',
1142 coreconfigitem('ui', 'mergemarkertemplate',
1140 default=('{node|short} '
1143 default=('{node|short} '
1141 '{ifeq(tags, "tip", "", '
1144 '{ifeq(tags, "tip", "", '
1142 'ifeq(tags, "", "", "{tags} "))}'
1145 'ifeq(tags, "", "", "{tags} "))}'
1143 '{if(bookmarks, "{bookmarks} ")}'
1146 '{if(bookmarks, "{bookmarks} ")}'
1144 '{ifeq(branch, "default", "", "{branch} ")}'
1147 '{ifeq(branch, "default", "", "{branch} ")}'
1145 '- {author|user}: {desc|firstline}')
1148 '- {author|user}: {desc|firstline}')
1146 )
1149 )
1147 coreconfigitem('ui', 'nontty',
1150 coreconfigitem('ui', 'nontty',
1148 default=False,
1151 default=False,
1149 )
1152 )
1150 coreconfigitem('ui', 'origbackuppath',
1153 coreconfigitem('ui', 'origbackuppath',
1151 default=None,
1154 default=None,
1152 )
1155 )
1153 coreconfigitem('ui', 'paginate',
1156 coreconfigitem('ui', 'paginate',
1154 default=True,
1157 default=True,
1155 )
1158 )
1156 coreconfigitem('ui', 'patch',
1159 coreconfigitem('ui', 'patch',
1157 default=None,
1160 default=None,
1158 )
1161 )
1159 coreconfigitem('ui', 'portablefilenames',
1162 coreconfigitem('ui', 'portablefilenames',
1160 default='warn',
1163 default='warn',
1161 )
1164 )
1162 coreconfigitem('ui', 'promptecho',
1165 coreconfigitem('ui', 'promptecho',
1163 default=False,
1166 default=False,
1164 )
1167 )
1165 coreconfigitem('ui', 'quiet',
1168 coreconfigitem('ui', 'quiet',
1166 default=False,
1169 default=False,
1167 )
1170 )
1168 coreconfigitem('ui', 'quietbookmarkmove',
1171 coreconfigitem('ui', 'quietbookmarkmove',
1169 default=False,
1172 default=False,
1170 )
1173 )
1171 coreconfigitem('ui', 'remotecmd',
1174 coreconfigitem('ui', 'remotecmd',
1172 default='hg',
1175 default='hg',
1173 )
1176 )
1174 coreconfigitem('ui', 'report_untrusted',
1177 coreconfigitem('ui', 'report_untrusted',
1175 default=True,
1178 default=True,
1176 )
1179 )
1177 coreconfigitem('ui', 'rollback',
1180 coreconfigitem('ui', 'rollback',
1178 default=True,
1181 default=True,
1179 )
1182 )
1180 coreconfigitem('ui', 'signal-safe-lock',
1183 coreconfigitem('ui', 'signal-safe-lock',
1181 default=True,
1184 default=True,
1182 )
1185 )
1183 coreconfigitem('ui', 'slash',
1186 coreconfigitem('ui', 'slash',
1184 default=False,
1187 default=False,
1185 )
1188 )
1186 coreconfigitem('ui', 'ssh',
1189 coreconfigitem('ui', 'ssh',
1187 default='ssh',
1190 default='ssh',
1188 )
1191 )
1189 coreconfigitem('ui', 'ssherrorhint',
1192 coreconfigitem('ui', 'ssherrorhint',
1190 default=None,
1193 default=None,
1191 )
1194 )
1192 coreconfigitem('ui', 'statuscopies',
1195 coreconfigitem('ui', 'statuscopies',
1193 default=False,
1196 default=False,
1194 )
1197 )
1195 coreconfigitem('ui', 'strict',
1198 coreconfigitem('ui', 'strict',
1196 default=False,
1199 default=False,
1197 )
1200 )
1198 coreconfigitem('ui', 'style',
1201 coreconfigitem('ui', 'style',
1199 default='',
1202 default='',
1200 )
1203 )
1201 coreconfigitem('ui', 'supportcontact',
1204 coreconfigitem('ui', 'supportcontact',
1202 default=None,
1205 default=None,
1203 )
1206 )
1204 coreconfigitem('ui', 'textwidth',
1207 coreconfigitem('ui', 'textwidth',
1205 default=78,
1208 default=78,
1206 )
1209 )
1207 coreconfigitem('ui', 'timeout',
1210 coreconfigitem('ui', 'timeout',
1208 default='600',
1211 default='600',
1209 )
1212 )
1210 coreconfigitem('ui', 'timeout.warn',
1213 coreconfigitem('ui', 'timeout.warn',
1211 default=0,
1214 default=0,
1212 )
1215 )
1213 coreconfigitem('ui', 'traceback',
1216 coreconfigitem('ui', 'traceback',
1214 default=False,
1217 default=False,
1215 )
1218 )
1216 coreconfigitem('ui', 'tweakdefaults',
1219 coreconfigitem('ui', 'tweakdefaults',
1217 default=False,
1220 default=False,
1218 )
1221 )
1219 coreconfigitem('ui', 'username',
1222 coreconfigitem('ui', 'username',
1220 alias=[('ui', 'user')]
1223 alias=[('ui', 'user')]
1221 )
1224 )
1222 coreconfigitem('ui', 'verbose',
1225 coreconfigitem('ui', 'verbose',
1223 default=False,
1226 default=False,
1224 )
1227 )
1225 coreconfigitem('verify', 'skipflags',
1228 coreconfigitem('verify', 'skipflags',
1226 default=None,
1229 default=None,
1227 )
1230 )
1228 coreconfigitem('web', 'allowbz2',
1231 coreconfigitem('web', 'allowbz2',
1229 default=False,
1232 default=False,
1230 )
1233 )
1231 coreconfigitem('web', 'allowgz',
1234 coreconfigitem('web', 'allowgz',
1232 default=False,
1235 default=False,
1233 )
1236 )
1234 coreconfigitem('web', 'allow-pull',
1237 coreconfigitem('web', 'allow-pull',
1235 alias=[('web', 'allowpull')],
1238 alias=[('web', 'allowpull')],
1236 default=True,
1239 default=True,
1237 )
1240 )
1238 coreconfigitem('web', 'allow-push',
1241 coreconfigitem('web', 'allow-push',
1239 alias=[('web', 'allow_push')],
1242 alias=[('web', 'allow_push')],
1240 default=list,
1243 default=list,
1241 )
1244 )
1242 coreconfigitem('web', 'allowzip',
1245 coreconfigitem('web', 'allowzip',
1243 default=False,
1246 default=False,
1244 )
1247 )
1245 coreconfigitem('web', 'archivesubrepos',
1248 coreconfigitem('web', 'archivesubrepos',
1246 default=False,
1249 default=False,
1247 )
1250 )
1248 coreconfigitem('web', 'cache',
1251 coreconfigitem('web', 'cache',
1249 default=True,
1252 default=True,
1250 )
1253 )
1251 coreconfigitem('web', 'contact',
1254 coreconfigitem('web', 'contact',
1252 default=None,
1255 default=None,
1253 )
1256 )
1254 coreconfigitem('web', 'deny_push',
1257 coreconfigitem('web', 'deny_push',
1255 default=list,
1258 default=list,
1256 )
1259 )
1257 coreconfigitem('web', 'guessmime',
1260 coreconfigitem('web', 'guessmime',
1258 default=False,
1261 default=False,
1259 )
1262 )
1260 coreconfigitem('web', 'hidden',
1263 coreconfigitem('web', 'hidden',
1261 default=False,
1264 default=False,
1262 )
1265 )
1263 coreconfigitem('web', 'labels',
1266 coreconfigitem('web', 'labels',
1264 default=list,
1267 default=list,
1265 )
1268 )
1266 coreconfigitem('web', 'logoimg',
1269 coreconfigitem('web', 'logoimg',
1267 default='hglogo.png',
1270 default='hglogo.png',
1268 )
1271 )
1269 coreconfigitem('web', 'logourl',
1272 coreconfigitem('web', 'logourl',
1270 default='https://mercurial-scm.org/',
1273 default='https://mercurial-scm.org/',
1271 )
1274 )
1272 coreconfigitem('web', 'accesslog',
1275 coreconfigitem('web', 'accesslog',
1273 default='-',
1276 default='-',
1274 )
1277 )
1275 coreconfigitem('web', 'address',
1278 coreconfigitem('web', 'address',
1276 default='',
1279 default='',
1277 )
1280 )
1278 coreconfigitem('web', 'allow-archive',
1281 coreconfigitem('web', 'allow-archive',
1279 alias=[('web', 'allow_archive')],
1282 alias=[('web', 'allow_archive')],
1280 default=list,
1283 default=list,
1281 )
1284 )
1282 coreconfigitem('web', 'allow_read',
1285 coreconfigitem('web', 'allow_read',
1283 default=list,
1286 default=list,
1284 )
1287 )
1285 coreconfigitem('web', 'baseurl',
1288 coreconfigitem('web', 'baseurl',
1286 default=None,
1289 default=None,
1287 )
1290 )
1288 coreconfigitem('web', 'cacerts',
1291 coreconfigitem('web', 'cacerts',
1289 default=None,
1292 default=None,
1290 )
1293 )
1291 coreconfigitem('web', 'certificate',
1294 coreconfigitem('web', 'certificate',
1292 default=None,
1295 default=None,
1293 )
1296 )
1294 coreconfigitem('web', 'collapse',
1297 coreconfigitem('web', 'collapse',
1295 default=False,
1298 default=False,
1296 )
1299 )
1297 coreconfigitem('web', 'csp',
1300 coreconfigitem('web', 'csp',
1298 default=None,
1301 default=None,
1299 )
1302 )
1300 coreconfigitem('web', 'deny_read',
1303 coreconfigitem('web', 'deny_read',
1301 default=list,
1304 default=list,
1302 )
1305 )
1303 coreconfigitem('web', 'descend',
1306 coreconfigitem('web', 'descend',
1304 default=True,
1307 default=True,
1305 )
1308 )
1306 coreconfigitem('web', 'description',
1309 coreconfigitem('web', 'description',
1307 default="",
1310 default="",
1308 )
1311 )
1309 coreconfigitem('web', 'encoding',
1312 coreconfigitem('web', 'encoding',
1310 default=lambda: encoding.encoding,
1313 default=lambda: encoding.encoding,
1311 )
1314 )
1312 coreconfigitem('web', 'errorlog',
1315 coreconfigitem('web', 'errorlog',
1313 default='-',
1316 default='-',
1314 )
1317 )
1315 coreconfigitem('web', 'ipv6',
1318 coreconfigitem('web', 'ipv6',
1316 default=False,
1319 default=False,
1317 )
1320 )
1318 coreconfigitem('web', 'maxchanges',
1321 coreconfigitem('web', 'maxchanges',
1319 default=10,
1322 default=10,
1320 )
1323 )
1321 coreconfigitem('web', 'maxfiles',
1324 coreconfigitem('web', 'maxfiles',
1322 default=10,
1325 default=10,
1323 )
1326 )
1324 coreconfigitem('web', 'maxshortchanges',
1327 coreconfigitem('web', 'maxshortchanges',
1325 default=60,
1328 default=60,
1326 )
1329 )
1327 coreconfigitem('web', 'motd',
1330 coreconfigitem('web', 'motd',
1328 default='',
1331 default='',
1329 )
1332 )
1330 coreconfigitem('web', 'name',
1333 coreconfigitem('web', 'name',
1331 default=dynamicdefault,
1334 default=dynamicdefault,
1332 )
1335 )
1333 coreconfigitem('web', 'port',
1336 coreconfigitem('web', 'port',
1334 default=8000,
1337 default=8000,
1335 )
1338 )
1336 coreconfigitem('web', 'prefix',
1339 coreconfigitem('web', 'prefix',
1337 default='',
1340 default='',
1338 )
1341 )
1339 coreconfigitem('web', 'push_ssl',
1342 coreconfigitem('web', 'push_ssl',
1340 default=True,
1343 default=True,
1341 )
1344 )
1342 coreconfigitem('web', 'refreshinterval',
1345 coreconfigitem('web', 'refreshinterval',
1343 default=20,
1346 default=20,
1344 )
1347 )
1345 coreconfigitem('web', 'server-header',
1348 coreconfigitem('web', 'server-header',
1346 default=None,
1349 default=None,
1347 )
1350 )
1348 coreconfigitem('web', 'staticurl',
1351 coreconfigitem('web', 'staticurl',
1349 default=None,
1352 default=None,
1350 )
1353 )
1351 coreconfigitem('web', 'stripes',
1354 coreconfigitem('web', 'stripes',
1352 default=1,
1355 default=1,
1353 )
1356 )
1354 coreconfigitem('web', 'style',
1357 coreconfigitem('web', 'style',
1355 default='paper',
1358 default='paper',
1356 )
1359 )
1357 coreconfigitem('web', 'templates',
1360 coreconfigitem('web', 'templates',
1358 default=None,
1361 default=None,
1359 )
1362 )
1360 coreconfigitem('web', 'view',
1363 coreconfigitem('web', 'view',
1361 default='served',
1364 default='served',
1362 )
1365 )
1363 coreconfigitem('worker', 'backgroundclose',
1366 coreconfigitem('worker', 'backgroundclose',
1364 default=dynamicdefault,
1367 default=dynamicdefault,
1365 )
1368 )
1366 # Windows defaults to a limit of 512 open files. A buffer of 128
1369 # Windows defaults to a limit of 512 open files. A buffer of 128
1367 # should give us enough headway.
1370 # should give us enough headway.
1368 coreconfigitem('worker', 'backgroundclosemaxqueue',
1371 coreconfigitem('worker', 'backgroundclosemaxqueue',
1369 default=384,
1372 default=384,
1370 )
1373 )
1371 coreconfigitem('worker', 'backgroundcloseminfilecount',
1374 coreconfigitem('worker', 'backgroundcloseminfilecount',
1372 default=2048,
1375 default=2048,
1373 )
1376 )
1374 coreconfigitem('worker', 'backgroundclosethreadcount',
1377 coreconfigitem('worker', 'backgroundclosethreadcount',
1375 default=4,
1378 default=4,
1376 )
1379 )
1377 coreconfigitem('worker', 'enabled',
1380 coreconfigitem('worker', 'enabled',
1378 default=True,
1381 default=True,
1379 )
1382 )
1380 coreconfigitem('worker', 'numcpus',
1383 coreconfigitem('worker', 'numcpus',
1381 default=None,
1384 default=None,
1382 )
1385 )
1383
1386
1384 # Rebase related configuration moved to core because other extension are doing
1387 # Rebase related configuration moved to core because other extension are doing
1385 # strange things. For example, shelve import the extensions to reuse some bit
1388 # strange things. For example, shelve import the extensions to reuse some bit
1386 # without formally loading it.
1389 # without formally loading it.
1387 coreconfigitem('commands', 'rebase.requiredest',
1390 coreconfigitem('commands', 'rebase.requiredest',
1388 default=False,
1391 default=False,
1389 )
1392 )
1390 coreconfigitem('experimental', 'rebaseskipobsolete',
1393 coreconfigitem('experimental', 'rebaseskipobsolete',
1391 default=True,
1394 default=True,
1392 )
1395 )
1393 coreconfigitem('rebase', 'singletransaction',
1396 coreconfigitem('rebase', 'singletransaction',
1394 default=False,
1397 default=False,
1395 )
1398 )
1396 coreconfigitem('rebase', 'experimental.inmemory',
1399 coreconfigitem('rebase', 'experimental.inmemory',
1397 default=False,
1400 default=False,
1398 )
1401 )
@@ -1,2431 +1,2435 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import hashlib
11 import hashlib
12 import os
12 import os
13 import random
13 import random
14 import sys
14 import sys
15 import time
15 import time
16 import weakref
16 import weakref
17
17
18 from .i18n import _
18 from .i18n import _
19 from .node import (
19 from .node import (
20 hex,
20 hex,
21 nullid,
21 nullid,
22 short,
22 short,
23 )
23 )
24 from . import (
24 from . import (
25 bookmarks,
25 bookmarks,
26 branchmap,
26 branchmap,
27 bundle2,
27 bundle2,
28 changegroup,
28 changegroup,
29 changelog,
29 changelog,
30 color,
30 color,
31 context,
31 context,
32 dirstate,
32 dirstate,
33 dirstateguard,
33 dirstateguard,
34 discovery,
34 discovery,
35 encoding,
35 encoding,
36 error,
36 error,
37 exchange,
37 exchange,
38 extensions,
38 extensions,
39 filelog,
39 filelog,
40 hook,
40 hook,
41 lock as lockmod,
41 lock as lockmod,
42 manifest,
42 manifest,
43 match as matchmod,
43 match as matchmod,
44 merge as mergemod,
44 merge as mergemod,
45 mergeutil,
45 mergeutil,
46 namespaces,
46 namespaces,
47 narrowspec,
47 narrowspec,
48 obsolete,
48 obsolete,
49 pathutil,
49 pathutil,
50 phases,
50 phases,
51 pushkey,
51 pushkey,
52 pycompat,
52 pycompat,
53 repository,
53 repository,
54 repoview,
54 repoview,
55 revset,
55 revset,
56 revsetlang,
56 revsetlang,
57 scmutil,
57 scmutil,
58 sparse,
58 sparse,
59 store,
59 store,
60 subrepoutil,
60 subrepoutil,
61 tags as tagsmod,
61 tags as tagsmod,
62 transaction,
62 transaction,
63 txnutil,
63 txnutil,
64 util,
64 util,
65 vfs as vfsmod,
65 vfs as vfsmod,
66 )
66 )
67 from .utils import (
67 from .utils import (
68 interfaceutil,
68 interfaceutil,
69 procutil,
69 procutil,
70 stringutil,
70 stringutil,
71 )
71 )
72
72
73 release = lockmod.release
73 release = lockmod.release
74 urlerr = util.urlerr
74 urlerr = util.urlerr
75 urlreq = util.urlreq
75 urlreq = util.urlreq
76
76
77 # set of (path, vfs-location) tuples. vfs-location is:
77 # set of (path, vfs-location) tuples. vfs-location is:
78 # - 'plain for vfs relative paths
78 # - 'plain for vfs relative paths
79 # - '' for svfs relative paths
79 # - '' for svfs relative paths
80 _cachedfiles = set()
80 _cachedfiles = set()
81
81
82 class _basefilecache(scmutil.filecache):
82 class _basefilecache(scmutil.filecache):
83 """All filecache usage on repo are done for logic that should be unfiltered
83 """All filecache usage on repo are done for logic that should be unfiltered
84 """
84 """
85 def __get__(self, repo, type=None):
85 def __get__(self, repo, type=None):
86 if repo is None:
86 if repo is None:
87 return self
87 return self
88 return super(_basefilecache, self).__get__(repo.unfiltered(), type)
88 return super(_basefilecache, self).__get__(repo.unfiltered(), type)
89 def __set__(self, repo, value):
89 def __set__(self, repo, value):
90 return super(_basefilecache, self).__set__(repo.unfiltered(), value)
90 return super(_basefilecache, self).__set__(repo.unfiltered(), value)
91 def __delete__(self, repo):
91 def __delete__(self, repo):
92 return super(_basefilecache, self).__delete__(repo.unfiltered())
92 return super(_basefilecache, self).__delete__(repo.unfiltered())
93
93
94 class repofilecache(_basefilecache):
94 class repofilecache(_basefilecache):
95 """filecache for files in .hg but outside of .hg/store"""
95 """filecache for files in .hg but outside of .hg/store"""
96 def __init__(self, *paths):
96 def __init__(self, *paths):
97 super(repofilecache, self).__init__(*paths)
97 super(repofilecache, self).__init__(*paths)
98 for path in paths:
98 for path in paths:
99 _cachedfiles.add((path, 'plain'))
99 _cachedfiles.add((path, 'plain'))
100
100
101 def join(self, obj, fname):
101 def join(self, obj, fname):
102 return obj.vfs.join(fname)
102 return obj.vfs.join(fname)
103
103
104 class storecache(_basefilecache):
104 class storecache(_basefilecache):
105 """filecache for files in the store"""
105 """filecache for files in the store"""
106 def __init__(self, *paths):
106 def __init__(self, *paths):
107 super(storecache, self).__init__(*paths)
107 super(storecache, self).__init__(*paths)
108 for path in paths:
108 for path in paths:
109 _cachedfiles.add((path, ''))
109 _cachedfiles.add((path, ''))
110
110
111 def join(self, obj, fname):
111 def join(self, obj, fname):
112 return obj.sjoin(fname)
112 return obj.sjoin(fname)
113
113
114 def isfilecached(repo, name):
114 def isfilecached(repo, name):
115 """check if a repo has already cached "name" filecache-ed property
115 """check if a repo has already cached "name" filecache-ed property
116
116
117 This returns (cachedobj-or-None, iscached) tuple.
117 This returns (cachedobj-or-None, iscached) tuple.
118 """
118 """
119 cacheentry = repo.unfiltered()._filecache.get(name, None)
119 cacheentry = repo.unfiltered()._filecache.get(name, None)
120 if not cacheentry:
120 if not cacheentry:
121 return None, False
121 return None, False
122 return cacheentry.obj, True
122 return cacheentry.obj, True
123
123
124 class unfilteredpropertycache(util.propertycache):
124 class unfilteredpropertycache(util.propertycache):
125 """propertycache that apply to unfiltered repo only"""
125 """propertycache that apply to unfiltered repo only"""
126
126
127 def __get__(self, repo, type=None):
127 def __get__(self, repo, type=None):
128 unfi = repo.unfiltered()
128 unfi = repo.unfiltered()
129 if unfi is repo:
129 if unfi is repo:
130 return super(unfilteredpropertycache, self).__get__(unfi)
130 return super(unfilteredpropertycache, self).__get__(unfi)
131 return getattr(unfi, self.name)
131 return getattr(unfi, self.name)
132
132
133 class filteredpropertycache(util.propertycache):
133 class filteredpropertycache(util.propertycache):
134 """propertycache that must take filtering in account"""
134 """propertycache that must take filtering in account"""
135
135
136 def cachevalue(self, obj, value):
136 def cachevalue(self, obj, value):
137 object.__setattr__(obj, self.name, value)
137 object.__setattr__(obj, self.name, value)
138
138
139
139
140 def hasunfilteredcache(repo, name):
140 def hasunfilteredcache(repo, name):
141 """check if a repo has an unfilteredpropertycache value for <name>"""
141 """check if a repo has an unfilteredpropertycache value for <name>"""
142 return name in vars(repo.unfiltered())
142 return name in vars(repo.unfiltered())
143
143
144 def unfilteredmethod(orig):
144 def unfilteredmethod(orig):
145 """decorate method that always need to be run on unfiltered version"""
145 """decorate method that always need to be run on unfiltered version"""
146 def wrapper(repo, *args, **kwargs):
146 def wrapper(repo, *args, **kwargs):
147 return orig(repo.unfiltered(), *args, **kwargs)
147 return orig(repo.unfiltered(), *args, **kwargs)
148 return wrapper
148 return wrapper
149
149
150 moderncaps = {'lookup', 'branchmap', 'pushkey', 'known', 'getbundle',
150 moderncaps = {'lookup', 'branchmap', 'pushkey', 'known', 'getbundle',
151 'unbundle'}
151 'unbundle'}
152 legacycaps = moderncaps.union({'changegroupsubset'})
152 legacycaps = moderncaps.union({'changegroupsubset'})
153
153
154 @interfaceutil.implementer(repository.ipeercommandexecutor)
154 @interfaceutil.implementer(repository.ipeercommandexecutor)
155 class localcommandexecutor(object):
155 class localcommandexecutor(object):
156 def __init__(self, peer):
156 def __init__(self, peer):
157 self._peer = peer
157 self._peer = peer
158 self._sent = False
158 self._sent = False
159 self._closed = False
159 self._closed = False
160
160
161 def __enter__(self):
161 def __enter__(self):
162 return self
162 return self
163
163
164 def __exit__(self, exctype, excvalue, exctb):
164 def __exit__(self, exctype, excvalue, exctb):
165 self.close()
165 self.close()
166
166
167 def callcommand(self, command, args):
167 def callcommand(self, command, args):
168 if self._sent:
168 if self._sent:
169 raise error.ProgrammingError('callcommand() cannot be used after '
169 raise error.ProgrammingError('callcommand() cannot be used after '
170 'sendcommands()')
170 'sendcommands()')
171
171
172 if self._closed:
172 if self._closed:
173 raise error.ProgrammingError('callcommand() cannot be used after '
173 raise error.ProgrammingError('callcommand() cannot be used after '
174 'close()')
174 'close()')
175
175
176 # We don't need to support anything fancy. Just call the named
176 # We don't need to support anything fancy. Just call the named
177 # method on the peer and return a resolved future.
177 # method on the peer and return a resolved future.
178 fn = getattr(self._peer, pycompat.sysstr(command))
178 fn = getattr(self._peer, pycompat.sysstr(command))
179
179
180 f = pycompat.futures.Future()
180 f = pycompat.futures.Future()
181
181
182 try:
182 try:
183 result = fn(**pycompat.strkwargs(args))
183 result = fn(**pycompat.strkwargs(args))
184 except Exception:
184 except Exception:
185 pycompat.future_set_exception_info(f, sys.exc_info()[1:])
185 pycompat.future_set_exception_info(f, sys.exc_info()[1:])
186 else:
186 else:
187 f.set_result(result)
187 f.set_result(result)
188
188
189 return f
189 return f
190
190
191 def sendcommands(self):
191 def sendcommands(self):
192 self._sent = True
192 self._sent = True
193
193
194 def close(self):
194 def close(self):
195 self._closed = True
195 self._closed = True
196
196
197 @interfaceutil.implementer(repository.ipeercommands)
197 @interfaceutil.implementer(repository.ipeercommands)
198 class localpeer(repository.peer):
198 class localpeer(repository.peer):
199 '''peer for a local repo; reflects only the most recent API'''
199 '''peer for a local repo; reflects only the most recent API'''
200
200
201 def __init__(self, repo, caps=None):
201 def __init__(self, repo, caps=None):
202 super(localpeer, self).__init__()
202 super(localpeer, self).__init__()
203
203
204 if caps is None:
204 if caps is None:
205 caps = moderncaps.copy()
205 caps = moderncaps.copy()
206 self._repo = repo.filtered('served')
206 self._repo = repo.filtered('served')
207 self.ui = repo.ui
207 self.ui = repo.ui
208 self._caps = repo._restrictcapabilities(caps)
208 self._caps = repo._restrictcapabilities(caps)
209
209
210 # Begin of _basepeer interface.
210 # Begin of _basepeer interface.
211
211
212 def url(self):
212 def url(self):
213 return self._repo.url()
213 return self._repo.url()
214
214
215 def local(self):
215 def local(self):
216 return self._repo
216 return self._repo
217
217
218 def peer(self):
218 def peer(self):
219 return self
219 return self
220
220
221 def canpush(self):
221 def canpush(self):
222 return True
222 return True
223
223
224 def close(self):
224 def close(self):
225 self._repo.close()
225 self._repo.close()
226
226
227 # End of _basepeer interface.
227 # End of _basepeer interface.
228
228
229 # Begin of _basewirecommands interface.
229 # Begin of _basewirecommands interface.
230
230
231 def branchmap(self):
231 def branchmap(self):
232 return self._repo.branchmap()
232 return self._repo.branchmap()
233
233
234 def capabilities(self):
234 def capabilities(self):
235 return self._caps
235 return self._caps
236
236
237 def clonebundles(self):
237 def clonebundles(self):
238 return self._repo.tryread('clonebundles.manifest')
238 return self._repo.tryread('clonebundles.manifest')
239
239
240 def debugwireargs(self, one, two, three=None, four=None, five=None):
240 def debugwireargs(self, one, two, three=None, four=None, five=None):
241 """Used to test argument passing over the wire"""
241 """Used to test argument passing over the wire"""
242 return "%s %s %s %s %s" % (one, two, pycompat.bytestr(three),
242 return "%s %s %s %s %s" % (one, two, pycompat.bytestr(three),
243 pycompat.bytestr(four),
243 pycompat.bytestr(four),
244 pycompat.bytestr(five))
244 pycompat.bytestr(five))
245
245
246 def getbundle(self, source, heads=None, common=None, bundlecaps=None,
246 def getbundle(self, source, heads=None, common=None, bundlecaps=None,
247 **kwargs):
247 **kwargs):
248 chunks = exchange.getbundlechunks(self._repo, source, heads=heads,
248 chunks = exchange.getbundlechunks(self._repo, source, heads=heads,
249 common=common, bundlecaps=bundlecaps,
249 common=common, bundlecaps=bundlecaps,
250 **kwargs)[1]
250 **kwargs)[1]
251 cb = util.chunkbuffer(chunks)
251 cb = util.chunkbuffer(chunks)
252
252
253 if exchange.bundle2requested(bundlecaps):
253 if exchange.bundle2requested(bundlecaps):
254 # When requesting a bundle2, getbundle returns a stream to make the
254 # When requesting a bundle2, getbundle returns a stream to make the
255 # wire level function happier. We need to build a proper object
255 # wire level function happier. We need to build a proper object
256 # from it in local peer.
256 # from it in local peer.
257 return bundle2.getunbundler(self.ui, cb)
257 return bundle2.getunbundler(self.ui, cb)
258 else:
258 else:
259 return changegroup.getunbundler('01', cb, None)
259 return changegroup.getunbundler('01', cb, None)
260
260
261 def heads(self):
261 def heads(self):
262 return self._repo.heads()
262 return self._repo.heads()
263
263
264 def known(self, nodes):
264 def known(self, nodes):
265 return self._repo.known(nodes)
265 return self._repo.known(nodes)
266
266
267 def listkeys(self, namespace):
267 def listkeys(self, namespace):
268 return self._repo.listkeys(namespace)
268 return self._repo.listkeys(namespace)
269
269
270 def lookup(self, key):
270 def lookup(self, key):
271 return self._repo.lookup(key)
271 return self._repo.lookup(key)
272
272
273 def pushkey(self, namespace, key, old, new):
273 def pushkey(self, namespace, key, old, new):
274 return self._repo.pushkey(namespace, key, old, new)
274 return self._repo.pushkey(namespace, key, old, new)
275
275
276 def stream_out(self):
276 def stream_out(self):
277 raise error.Abort(_('cannot perform stream clone against local '
277 raise error.Abort(_('cannot perform stream clone against local '
278 'peer'))
278 'peer'))
279
279
280 def unbundle(self, bundle, heads, url):
280 def unbundle(self, bundle, heads, url):
281 """apply a bundle on a repo
281 """apply a bundle on a repo
282
282
283 This function handles the repo locking itself."""
283 This function handles the repo locking itself."""
284 try:
284 try:
285 try:
285 try:
286 bundle = exchange.readbundle(self.ui, bundle, None)
286 bundle = exchange.readbundle(self.ui, bundle, None)
287 ret = exchange.unbundle(self._repo, bundle, heads, 'push', url)
287 ret = exchange.unbundle(self._repo, bundle, heads, 'push', url)
288 if util.safehasattr(ret, 'getchunks'):
288 if util.safehasattr(ret, 'getchunks'):
289 # This is a bundle20 object, turn it into an unbundler.
289 # This is a bundle20 object, turn it into an unbundler.
290 # This little dance should be dropped eventually when the
290 # This little dance should be dropped eventually when the
291 # API is finally improved.
291 # API is finally improved.
292 stream = util.chunkbuffer(ret.getchunks())
292 stream = util.chunkbuffer(ret.getchunks())
293 ret = bundle2.getunbundler(self.ui, stream)
293 ret = bundle2.getunbundler(self.ui, stream)
294 return ret
294 return ret
295 except Exception as exc:
295 except Exception as exc:
296 # If the exception contains output salvaged from a bundle2
296 # If the exception contains output salvaged from a bundle2
297 # reply, we need to make sure it is printed before continuing
297 # reply, we need to make sure it is printed before continuing
298 # to fail. So we build a bundle2 with such output and consume
298 # to fail. So we build a bundle2 with such output and consume
299 # it directly.
299 # it directly.
300 #
300 #
301 # This is not very elegant but allows a "simple" solution for
301 # This is not very elegant but allows a "simple" solution for
302 # issue4594
302 # issue4594
303 output = getattr(exc, '_bundle2salvagedoutput', ())
303 output = getattr(exc, '_bundle2salvagedoutput', ())
304 if output:
304 if output:
305 bundler = bundle2.bundle20(self._repo.ui)
305 bundler = bundle2.bundle20(self._repo.ui)
306 for out in output:
306 for out in output:
307 bundler.addpart(out)
307 bundler.addpart(out)
308 stream = util.chunkbuffer(bundler.getchunks())
308 stream = util.chunkbuffer(bundler.getchunks())
309 b = bundle2.getunbundler(self.ui, stream)
309 b = bundle2.getunbundler(self.ui, stream)
310 bundle2.processbundle(self._repo, b)
310 bundle2.processbundle(self._repo, b)
311 raise
311 raise
312 except error.PushRaced as exc:
312 except error.PushRaced as exc:
313 raise error.ResponseError(_('push failed:'),
313 raise error.ResponseError(_('push failed:'),
314 stringutil.forcebytestr(exc))
314 stringutil.forcebytestr(exc))
315
315
316 # End of _basewirecommands interface.
316 # End of _basewirecommands interface.
317
317
318 # Begin of peer interface.
318 # Begin of peer interface.
319
319
320 def commandexecutor(self):
320 def commandexecutor(self):
321 return localcommandexecutor(self)
321 return localcommandexecutor(self)
322
322
323 # End of peer interface.
323 # End of peer interface.
324
324
325 @interfaceutil.implementer(repository.ipeerlegacycommands)
325 @interfaceutil.implementer(repository.ipeerlegacycommands)
326 class locallegacypeer(localpeer):
326 class locallegacypeer(localpeer):
327 '''peer extension which implements legacy methods too; used for tests with
327 '''peer extension which implements legacy methods too; used for tests with
328 restricted capabilities'''
328 restricted capabilities'''
329
329
330 def __init__(self, repo):
330 def __init__(self, repo):
331 super(locallegacypeer, self).__init__(repo, caps=legacycaps)
331 super(locallegacypeer, self).__init__(repo, caps=legacycaps)
332
332
333 # Begin of baselegacywirecommands interface.
333 # Begin of baselegacywirecommands interface.
334
334
335 def between(self, pairs):
335 def between(self, pairs):
336 return self._repo.between(pairs)
336 return self._repo.between(pairs)
337
337
338 def branches(self, nodes):
338 def branches(self, nodes):
339 return self._repo.branches(nodes)
339 return self._repo.branches(nodes)
340
340
341 def changegroup(self, nodes, source):
341 def changegroup(self, nodes, source):
342 outgoing = discovery.outgoing(self._repo, missingroots=nodes,
342 outgoing = discovery.outgoing(self._repo, missingroots=nodes,
343 missingheads=self._repo.heads())
343 missingheads=self._repo.heads())
344 return changegroup.makechangegroup(self._repo, outgoing, '01', source)
344 return changegroup.makechangegroup(self._repo, outgoing, '01', source)
345
345
346 def changegroupsubset(self, bases, heads, source):
346 def changegroupsubset(self, bases, heads, source):
347 outgoing = discovery.outgoing(self._repo, missingroots=bases,
347 outgoing = discovery.outgoing(self._repo, missingroots=bases,
348 missingheads=heads)
348 missingheads=heads)
349 return changegroup.makechangegroup(self._repo, outgoing, '01', source)
349 return changegroup.makechangegroup(self._repo, outgoing, '01', source)
350
350
351 # End of baselegacywirecommands interface.
351 # End of baselegacywirecommands interface.
352
352
353 # Increment the sub-version when the revlog v2 format changes to lock out old
353 # Increment the sub-version when the revlog v2 format changes to lock out old
354 # clients.
354 # clients.
355 REVLOGV2_REQUIREMENT = 'exp-revlogv2.0'
355 REVLOGV2_REQUIREMENT = 'exp-revlogv2.0'
356
356
357 # A repository with the sparserevlog feature will have delta chains that
357 # A repository with the sparserevlog feature will have delta chains that
358 # can spread over a larger span. Sparse reading cuts these large spans into
358 # can spread over a larger span. Sparse reading cuts these large spans into
359 # pieces, so that each piece isn't too big.
359 # pieces, so that each piece isn't too big.
360 # Without the sparserevlog capability, reading from the repository could use
360 # Without the sparserevlog capability, reading from the repository could use
361 # huge amounts of memory, because the whole span would be read at once,
361 # huge amounts of memory, because the whole span would be read at once,
362 # including all the intermediate revisions that aren't pertinent for the chain.
362 # including all the intermediate revisions that aren't pertinent for the chain.
363 # This is why once a repository has enabled sparse-read, it becomes required.
363 # This is why once a repository has enabled sparse-read, it becomes required.
364 SPARSEREVLOG_REQUIREMENT = 'sparserevlog'
364 SPARSEREVLOG_REQUIREMENT = 'sparserevlog'
365
365
366 # Functions receiving (ui, features) that extensions can register to impact
366 # Functions receiving (ui, features) that extensions can register to impact
367 # the ability to load repositories with custom requirements. Only
367 # the ability to load repositories with custom requirements. Only
368 # functions defined in loaded extensions are called.
368 # functions defined in loaded extensions are called.
369 #
369 #
370 # The function receives a set of requirement strings that the repository
370 # The function receives a set of requirement strings that the repository
371 # is capable of opening. Functions will typically add elements to the
371 # is capable of opening. Functions will typically add elements to the
372 # set to reflect that the extension knows how to handle that requirements.
372 # set to reflect that the extension knows how to handle that requirements.
373 featuresetupfuncs = set()
373 featuresetupfuncs = set()
374
374
375 @interfaceutil.implementer(repository.completelocalrepository)
375 @interfaceutil.implementer(repository.completelocalrepository)
376 class localrepository(object):
376 class localrepository(object):
377
377
378 # obsolete experimental requirements:
378 # obsolete experimental requirements:
379 # - manifestv2: An experimental new manifest format that allowed
379 # - manifestv2: An experimental new manifest format that allowed
380 # for stem compression of long paths. Experiment ended up not
380 # for stem compression of long paths. Experiment ended up not
381 # being successful (repository sizes went up due to worse delta
381 # being successful (repository sizes went up due to worse delta
382 # chains), and the code was deleted in 4.6.
382 # chains), and the code was deleted in 4.6.
383 supportedformats = {
383 supportedformats = {
384 'revlogv1',
384 'revlogv1',
385 'generaldelta',
385 'generaldelta',
386 'treemanifest',
386 'treemanifest',
387 REVLOGV2_REQUIREMENT,
387 REVLOGV2_REQUIREMENT,
388 SPARSEREVLOG_REQUIREMENT,
388 SPARSEREVLOG_REQUIREMENT,
389 }
389 }
390 _basesupported = supportedformats | {
390 _basesupported = supportedformats | {
391 'store',
391 'store',
392 'fncache',
392 'fncache',
393 'shared',
393 'shared',
394 'relshared',
394 'relshared',
395 'dotencode',
395 'dotencode',
396 'exp-sparse',
396 'exp-sparse',
397 'internal-phase'
397 }
398 }
398 openerreqs = {
399 openerreqs = {
399 'revlogv1',
400 'revlogv1',
400 'generaldelta',
401 'generaldelta',
401 'treemanifest',
402 'treemanifest',
402 }
403 }
403
404
404 # list of prefix for file which can be written without 'wlock'
405 # list of prefix for file which can be written without 'wlock'
405 # Extensions should extend this list when needed
406 # Extensions should extend this list when needed
406 _wlockfreeprefix = {
407 _wlockfreeprefix = {
407 # We migh consider requiring 'wlock' for the next
408 # We migh consider requiring 'wlock' for the next
408 # two, but pretty much all the existing code assume
409 # two, but pretty much all the existing code assume
409 # wlock is not needed so we keep them excluded for
410 # wlock is not needed so we keep them excluded for
410 # now.
411 # now.
411 'hgrc',
412 'hgrc',
412 'requires',
413 'requires',
413 # XXX cache is a complicatged business someone
414 # XXX cache is a complicatged business someone
414 # should investigate this in depth at some point
415 # should investigate this in depth at some point
415 'cache/',
416 'cache/',
416 # XXX shouldn't be dirstate covered by the wlock?
417 # XXX shouldn't be dirstate covered by the wlock?
417 'dirstate',
418 'dirstate',
418 # XXX bisect was still a bit too messy at the time
419 # XXX bisect was still a bit too messy at the time
419 # this changeset was introduced. Someone should fix
420 # this changeset was introduced. Someone should fix
420 # the remainig bit and drop this line
421 # the remainig bit and drop this line
421 'bisect.state',
422 'bisect.state',
422 }
423 }
423
424
424 def __init__(self, baseui, path, create=False, intents=None):
425 def __init__(self, baseui, path, create=False, intents=None):
425 self.requirements = set()
426 self.requirements = set()
426 self.filtername = None
427 self.filtername = None
427 # wvfs: rooted at the repository root, used to access the working copy
428 # wvfs: rooted at the repository root, used to access the working copy
428 self.wvfs = vfsmod.vfs(path, expandpath=True, realpath=True)
429 self.wvfs = vfsmod.vfs(path, expandpath=True, realpath=True)
429 # vfs: rooted at .hg, used to access repo files outside of .hg/store
430 # vfs: rooted at .hg, used to access repo files outside of .hg/store
430 self.vfs = None
431 self.vfs = None
431 # svfs: usually rooted at .hg/store, used to access repository history
432 # svfs: usually rooted at .hg/store, used to access repository history
432 # If this is a shared repository, this vfs may point to another
433 # If this is a shared repository, this vfs may point to another
433 # repository's .hg/store directory.
434 # repository's .hg/store directory.
434 self.svfs = None
435 self.svfs = None
435 self.root = self.wvfs.base
436 self.root = self.wvfs.base
436 self.path = self.wvfs.join(".hg")
437 self.path = self.wvfs.join(".hg")
437 self.origroot = path
438 self.origroot = path
438 # This is only used by context.workingctx.match in order to
439 # This is only used by context.workingctx.match in order to
439 # detect files in subrepos.
440 # detect files in subrepos.
440 self.auditor = pathutil.pathauditor(
441 self.auditor = pathutil.pathauditor(
441 self.root, callback=self._checknested)
442 self.root, callback=self._checknested)
442 # This is only used by context.basectx.match in order to detect
443 # This is only used by context.basectx.match in order to detect
443 # files in subrepos.
444 # files in subrepos.
444 self.nofsauditor = pathutil.pathauditor(
445 self.nofsauditor = pathutil.pathauditor(
445 self.root, callback=self._checknested, realfs=False, cached=True)
446 self.root, callback=self._checknested, realfs=False, cached=True)
446 self.baseui = baseui
447 self.baseui = baseui
447 self.ui = baseui.copy()
448 self.ui = baseui.copy()
448 self.ui.copy = baseui.copy # prevent copying repo configuration
449 self.ui.copy = baseui.copy # prevent copying repo configuration
449 self.vfs = vfsmod.vfs(self.path, cacheaudited=True)
450 self.vfs = vfsmod.vfs(self.path, cacheaudited=True)
450 if (self.ui.configbool('devel', 'all-warnings') or
451 if (self.ui.configbool('devel', 'all-warnings') or
451 self.ui.configbool('devel', 'check-locks')):
452 self.ui.configbool('devel', 'check-locks')):
452 self.vfs.audit = self._getvfsward(self.vfs.audit)
453 self.vfs.audit = self._getvfsward(self.vfs.audit)
453 # A list of callback to shape the phase if no data were found.
454 # A list of callback to shape the phase if no data were found.
454 # Callback are in the form: func(repo, roots) --> processed root.
455 # Callback are in the form: func(repo, roots) --> processed root.
455 # This list it to be filled by extension during repo setup
456 # This list it to be filled by extension during repo setup
456 self._phasedefaults = []
457 self._phasedefaults = []
457 try:
458 try:
458 self.ui.readconfig(self.vfs.join("hgrc"), self.root)
459 self.ui.readconfig(self.vfs.join("hgrc"), self.root)
459 self._loadextensions()
460 self._loadextensions()
460 except IOError:
461 except IOError:
461 pass
462 pass
462
463
463 if featuresetupfuncs:
464 if featuresetupfuncs:
464 self.supported = set(self._basesupported) # use private copy
465 self.supported = set(self._basesupported) # use private copy
465 extmods = set(m.__name__ for n, m
466 extmods = set(m.__name__ for n, m
466 in extensions.extensions(self.ui))
467 in extensions.extensions(self.ui))
467 for setupfunc in featuresetupfuncs:
468 for setupfunc in featuresetupfuncs:
468 if setupfunc.__module__ in extmods:
469 if setupfunc.__module__ in extmods:
469 setupfunc(self.ui, self.supported)
470 setupfunc(self.ui, self.supported)
470 else:
471 else:
471 self.supported = self._basesupported
472 self.supported = self._basesupported
472 color.setup(self.ui)
473 color.setup(self.ui)
473
474
474 # Add compression engines.
475 # Add compression engines.
475 for name in util.compengines:
476 for name in util.compengines:
476 engine = util.compengines[name]
477 engine = util.compengines[name]
477 if engine.revlogheader():
478 if engine.revlogheader():
478 self.supported.add('exp-compression-%s' % name)
479 self.supported.add('exp-compression-%s' % name)
479
480
480 if not self.vfs.isdir():
481 if not self.vfs.isdir():
481 if create:
482 if create:
482 self.requirements = newreporequirements(self)
483 self.requirements = newreporequirements(self)
483
484
484 if not self.wvfs.exists():
485 if not self.wvfs.exists():
485 self.wvfs.makedirs()
486 self.wvfs.makedirs()
486 self.vfs.makedir(notindexed=True)
487 self.vfs.makedir(notindexed=True)
487
488
488 if 'store' in self.requirements:
489 if 'store' in self.requirements:
489 self.vfs.mkdir("store")
490 self.vfs.mkdir("store")
490
491
491 # create an invalid changelog
492 # create an invalid changelog
492 self.vfs.append(
493 self.vfs.append(
493 "00changelog.i",
494 "00changelog.i",
494 '\0\0\0\2' # represents revlogv2
495 '\0\0\0\2' # represents revlogv2
495 ' dummy changelog to prevent using the old repo layout'
496 ' dummy changelog to prevent using the old repo layout'
496 )
497 )
497 else:
498 else:
498 try:
499 try:
499 self.vfs.stat()
500 self.vfs.stat()
500 except OSError as inst:
501 except OSError as inst:
501 if inst.errno != errno.ENOENT:
502 if inst.errno != errno.ENOENT:
502 raise
503 raise
503 raise error.RepoError(_("repository %s not found") % path)
504 raise error.RepoError(_("repository %s not found") % path)
504 elif create:
505 elif create:
505 raise error.RepoError(_("repository %s already exists") % path)
506 raise error.RepoError(_("repository %s already exists") % path)
506 else:
507 else:
507 try:
508 try:
508 self.requirements = scmutil.readrequires(
509 self.requirements = scmutil.readrequires(
509 self.vfs, self.supported)
510 self.vfs, self.supported)
510 except IOError as inst:
511 except IOError as inst:
511 if inst.errno != errno.ENOENT:
512 if inst.errno != errno.ENOENT:
512 raise
513 raise
513
514
514 cachepath = self.vfs.join('cache')
515 cachepath = self.vfs.join('cache')
515 self.sharedpath = self.path
516 self.sharedpath = self.path
516 try:
517 try:
517 sharedpath = self.vfs.read("sharedpath").rstrip('\n')
518 sharedpath = self.vfs.read("sharedpath").rstrip('\n')
518 if 'relshared' in self.requirements:
519 if 'relshared' in self.requirements:
519 sharedpath = self.vfs.join(sharedpath)
520 sharedpath = self.vfs.join(sharedpath)
520 vfs = vfsmod.vfs(sharedpath, realpath=True)
521 vfs = vfsmod.vfs(sharedpath, realpath=True)
521 cachepath = vfs.join('cache')
522 cachepath = vfs.join('cache')
522 s = vfs.base
523 s = vfs.base
523 if not vfs.exists():
524 if not vfs.exists():
524 raise error.RepoError(
525 raise error.RepoError(
525 _('.hg/sharedpath points to nonexistent directory %s') % s)
526 _('.hg/sharedpath points to nonexistent directory %s') % s)
526 self.sharedpath = s
527 self.sharedpath = s
527 except IOError as inst:
528 except IOError as inst:
528 if inst.errno != errno.ENOENT:
529 if inst.errno != errno.ENOENT:
529 raise
530 raise
530
531
531 if 'exp-sparse' in self.requirements and not sparse.enabled:
532 if 'exp-sparse' in self.requirements and not sparse.enabled:
532 raise error.RepoError(_('repository is using sparse feature but '
533 raise error.RepoError(_('repository is using sparse feature but '
533 'sparse is not enabled; enable the '
534 'sparse is not enabled; enable the '
534 '"sparse" extensions to access'))
535 '"sparse" extensions to access'))
535
536
536 self.store = store.store(
537 self.store = store.store(
537 self.requirements, self.sharedpath,
538 self.requirements, self.sharedpath,
538 lambda base: vfsmod.vfs(base, cacheaudited=True))
539 lambda base: vfsmod.vfs(base, cacheaudited=True))
539 self.spath = self.store.path
540 self.spath = self.store.path
540 self.svfs = self.store.vfs
541 self.svfs = self.store.vfs
541 self.sjoin = self.store.join
542 self.sjoin = self.store.join
542 self.vfs.createmode = self.store.createmode
543 self.vfs.createmode = self.store.createmode
543 self.cachevfs = vfsmod.vfs(cachepath, cacheaudited=True)
544 self.cachevfs = vfsmod.vfs(cachepath, cacheaudited=True)
544 self.cachevfs.createmode = self.store.createmode
545 self.cachevfs.createmode = self.store.createmode
545 if (self.ui.configbool('devel', 'all-warnings') or
546 if (self.ui.configbool('devel', 'all-warnings') or
546 self.ui.configbool('devel', 'check-locks')):
547 self.ui.configbool('devel', 'check-locks')):
547 if util.safehasattr(self.svfs, 'vfs'): # this is filtervfs
548 if util.safehasattr(self.svfs, 'vfs'): # this is filtervfs
548 self.svfs.vfs.audit = self._getsvfsward(self.svfs.vfs.audit)
549 self.svfs.vfs.audit = self._getsvfsward(self.svfs.vfs.audit)
549 else: # standard vfs
550 else: # standard vfs
550 self.svfs.audit = self._getsvfsward(self.svfs.audit)
551 self.svfs.audit = self._getsvfsward(self.svfs.audit)
551 self._applyopenerreqs()
552 self._applyopenerreqs()
552 if create:
553 if create:
553 self._writerequirements()
554 self._writerequirements()
554
555
555 self._dirstatevalidatewarned = False
556 self._dirstatevalidatewarned = False
556
557
557 self._branchcaches = {}
558 self._branchcaches = {}
558 self._revbranchcache = None
559 self._revbranchcache = None
559 self._filterpats = {}
560 self._filterpats = {}
560 self._datafilters = {}
561 self._datafilters = {}
561 self._transref = self._lockref = self._wlockref = None
562 self._transref = self._lockref = self._wlockref = None
562
563
563 # A cache for various files under .hg/ that tracks file changes,
564 # A cache for various files under .hg/ that tracks file changes,
564 # (used by the filecache decorator)
565 # (used by the filecache decorator)
565 #
566 #
566 # Maps a property name to its util.filecacheentry
567 # Maps a property name to its util.filecacheentry
567 self._filecache = {}
568 self._filecache = {}
568
569
569 # hold sets of revision to be filtered
570 # hold sets of revision to be filtered
570 # should be cleared when something might have changed the filter value:
571 # should be cleared when something might have changed the filter value:
571 # - new changesets,
572 # - new changesets,
572 # - phase change,
573 # - phase change,
573 # - new obsolescence marker,
574 # - new obsolescence marker,
574 # - working directory parent change,
575 # - working directory parent change,
575 # - bookmark changes
576 # - bookmark changes
576 self.filteredrevcache = {}
577 self.filteredrevcache = {}
577
578
578 # post-dirstate-status hooks
579 # post-dirstate-status hooks
579 self._postdsstatus = []
580 self._postdsstatus = []
580
581
581 # generic mapping between names and nodes
582 # generic mapping between names and nodes
582 self.names = namespaces.namespaces()
583 self.names = namespaces.namespaces()
583
584
584 # Key to signature value.
585 # Key to signature value.
585 self._sparsesignaturecache = {}
586 self._sparsesignaturecache = {}
586 # Signature to cached matcher instance.
587 # Signature to cached matcher instance.
587 self._sparsematchercache = {}
588 self._sparsematchercache = {}
588
589
589 def _getvfsward(self, origfunc):
590 def _getvfsward(self, origfunc):
590 """build a ward for self.vfs"""
591 """build a ward for self.vfs"""
591 rref = weakref.ref(self)
592 rref = weakref.ref(self)
592 def checkvfs(path, mode=None):
593 def checkvfs(path, mode=None):
593 ret = origfunc(path, mode=mode)
594 ret = origfunc(path, mode=mode)
594 repo = rref()
595 repo = rref()
595 if (repo is None
596 if (repo is None
596 or not util.safehasattr(repo, '_wlockref')
597 or not util.safehasattr(repo, '_wlockref')
597 or not util.safehasattr(repo, '_lockref')):
598 or not util.safehasattr(repo, '_lockref')):
598 return
599 return
599 if mode in (None, 'r', 'rb'):
600 if mode in (None, 'r', 'rb'):
600 return
601 return
601 if path.startswith(repo.path):
602 if path.startswith(repo.path):
602 # truncate name relative to the repository (.hg)
603 # truncate name relative to the repository (.hg)
603 path = path[len(repo.path) + 1:]
604 path = path[len(repo.path) + 1:]
604 if path.startswith('cache/'):
605 if path.startswith('cache/'):
605 msg = 'accessing cache with vfs instead of cachevfs: "%s"'
606 msg = 'accessing cache with vfs instead of cachevfs: "%s"'
606 repo.ui.develwarn(msg % path, stacklevel=2, config="cache-vfs")
607 repo.ui.develwarn(msg % path, stacklevel=2, config="cache-vfs")
607 if path.startswith('journal.'):
608 if path.startswith('journal.'):
608 # journal is covered by 'lock'
609 # journal is covered by 'lock'
609 if repo._currentlock(repo._lockref) is None:
610 if repo._currentlock(repo._lockref) is None:
610 repo.ui.develwarn('write with no lock: "%s"' % path,
611 repo.ui.develwarn('write with no lock: "%s"' % path,
611 stacklevel=2, config='check-locks')
612 stacklevel=2, config='check-locks')
612 elif repo._currentlock(repo._wlockref) is None:
613 elif repo._currentlock(repo._wlockref) is None:
613 # rest of vfs files are covered by 'wlock'
614 # rest of vfs files are covered by 'wlock'
614 #
615 #
615 # exclude special files
616 # exclude special files
616 for prefix in self._wlockfreeprefix:
617 for prefix in self._wlockfreeprefix:
617 if path.startswith(prefix):
618 if path.startswith(prefix):
618 return
619 return
619 repo.ui.develwarn('write with no wlock: "%s"' % path,
620 repo.ui.develwarn('write with no wlock: "%s"' % path,
620 stacklevel=2, config='check-locks')
621 stacklevel=2, config='check-locks')
621 return ret
622 return ret
622 return checkvfs
623 return checkvfs
623
624
624 def _getsvfsward(self, origfunc):
625 def _getsvfsward(self, origfunc):
625 """build a ward for self.svfs"""
626 """build a ward for self.svfs"""
626 rref = weakref.ref(self)
627 rref = weakref.ref(self)
627 def checksvfs(path, mode=None):
628 def checksvfs(path, mode=None):
628 ret = origfunc(path, mode=mode)
629 ret = origfunc(path, mode=mode)
629 repo = rref()
630 repo = rref()
630 if repo is None or not util.safehasattr(repo, '_lockref'):
631 if repo is None or not util.safehasattr(repo, '_lockref'):
631 return
632 return
632 if mode in (None, 'r', 'rb'):
633 if mode in (None, 'r', 'rb'):
633 return
634 return
634 if path.startswith(repo.sharedpath):
635 if path.startswith(repo.sharedpath):
635 # truncate name relative to the repository (.hg)
636 # truncate name relative to the repository (.hg)
636 path = path[len(repo.sharedpath) + 1:]
637 path = path[len(repo.sharedpath) + 1:]
637 if repo._currentlock(repo._lockref) is None:
638 if repo._currentlock(repo._lockref) is None:
638 repo.ui.develwarn('write with no lock: "%s"' % path,
639 repo.ui.develwarn('write with no lock: "%s"' % path,
639 stacklevel=3)
640 stacklevel=3)
640 return ret
641 return ret
641 return checksvfs
642 return checksvfs
642
643
643 def close(self):
644 def close(self):
644 self._writecaches()
645 self._writecaches()
645
646
646 def _loadextensions(self):
647 def _loadextensions(self):
647 extensions.loadall(self.ui)
648 extensions.loadall(self.ui)
648
649
649 def _writecaches(self):
650 def _writecaches(self):
650 if self._revbranchcache:
651 if self._revbranchcache:
651 self._revbranchcache.write()
652 self._revbranchcache.write()
652
653
653 def _restrictcapabilities(self, caps):
654 def _restrictcapabilities(self, caps):
654 if self.ui.configbool('experimental', 'bundle2-advertise'):
655 if self.ui.configbool('experimental', 'bundle2-advertise'):
655 caps = set(caps)
656 caps = set(caps)
656 capsblob = bundle2.encodecaps(bundle2.getrepocaps(self,
657 capsblob = bundle2.encodecaps(bundle2.getrepocaps(self,
657 role='client'))
658 role='client'))
658 caps.add('bundle2=' + urlreq.quote(capsblob))
659 caps.add('bundle2=' + urlreq.quote(capsblob))
659 return caps
660 return caps
660
661
661 def _applyopenerreqs(self):
662 def _applyopenerreqs(self):
662 self.svfs.options = dict((r, 1) for r in self.requirements
663 self.svfs.options = dict((r, 1) for r in self.requirements
663 if r in self.openerreqs)
664 if r in self.openerreqs)
664 # experimental config: format.chunkcachesize
665 # experimental config: format.chunkcachesize
665 chunkcachesize = self.ui.configint('format', 'chunkcachesize')
666 chunkcachesize = self.ui.configint('format', 'chunkcachesize')
666 if chunkcachesize is not None:
667 if chunkcachesize is not None:
667 self.svfs.options['chunkcachesize'] = chunkcachesize
668 self.svfs.options['chunkcachesize'] = chunkcachesize
668 # experimental config: format.maxchainlen
669 # experimental config: format.maxchainlen
669 maxchainlen = self.ui.configint('format', 'maxchainlen')
670 maxchainlen = self.ui.configint('format', 'maxchainlen')
670 if maxchainlen is not None:
671 if maxchainlen is not None:
671 self.svfs.options['maxchainlen'] = maxchainlen
672 self.svfs.options['maxchainlen'] = maxchainlen
672 # experimental config: format.manifestcachesize
673 # experimental config: format.manifestcachesize
673 manifestcachesize = self.ui.configint('format', 'manifestcachesize')
674 manifestcachesize = self.ui.configint('format', 'manifestcachesize')
674 if manifestcachesize is not None:
675 if manifestcachesize is not None:
675 self.svfs.options['manifestcachesize'] = manifestcachesize
676 self.svfs.options['manifestcachesize'] = manifestcachesize
676 deltabothparents = self.ui.configbool('storage',
677 deltabothparents = self.ui.configbool('storage',
677 'revlog.optimize-delta-parent-choice')
678 'revlog.optimize-delta-parent-choice')
678 self.svfs.options['deltabothparents'] = deltabothparents
679 self.svfs.options['deltabothparents'] = deltabothparents
679 self.svfs.options['lazydeltabase'] = not scmutil.gddeltaconfig(self.ui)
680 self.svfs.options['lazydeltabase'] = not scmutil.gddeltaconfig(self.ui)
680 chainspan = self.ui.configbytes('experimental', 'maxdeltachainspan')
681 chainspan = self.ui.configbytes('experimental', 'maxdeltachainspan')
681 if 0 <= chainspan:
682 if 0 <= chainspan:
682 self.svfs.options['maxdeltachainspan'] = chainspan
683 self.svfs.options['maxdeltachainspan'] = chainspan
683 mmapindexthreshold = self.ui.configbytes('experimental',
684 mmapindexthreshold = self.ui.configbytes('experimental',
684 'mmapindexthreshold')
685 'mmapindexthreshold')
685 if mmapindexthreshold is not None:
686 if mmapindexthreshold is not None:
686 self.svfs.options['mmapindexthreshold'] = mmapindexthreshold
687 self.svfs.options['mmapindexthreshold'] = mmapindexthreshold
687 withsparseread = self.ui.configbool('experimental', 'sparse-read')
688 withsparseread = self.ui.configbool('experimental', 'sparse-read')
688 srdensitythres = float(self.ui.config('experimental',
689 srdensitythres = float(self.ui.config('experimental',
689 'sparse-read.density-threshold'))
690 'sparse-read.density-threshold'))
690 srmingapsize = self.ui.configbytes('experimental',
691 srmingapsize = self.ui.configbytes('experimental',
691 'sparse-read.min-gap-size')
692 'sparse-read.min-gap-size')
692 self.svfs.options['with-sparse-read'] = withsparseread
693 self.svfs.options['with-sparse-read'] = withsparseread
693 self.svfs.options['sparse-read-density-threshold'] = srdensitythres
694 self.svfs.options['sparse-read-density-threshold'] = srdensitythres
694 self.svfs.options['sparse-read-min-gap-size'] = srmingapsize
695 self.svfs.options['sparse-read-min-gap-size'] = srmingapsize
695 sparserevlog = SPARSEREVLOG_REQUIREMENT in self.requirements
696 sparserevlog = SPARSEREVLOG_REQUIREMENT in self.requirements
696 self.svfs.options['sparse-revlog'] = sparserevlog
697 self.svfs.options['sparse-revlog'] = sparserevlog
697 if sparserevlog:
698 if sparserevlog:
698 self.svfs.options['generaldelta'] = True
699 self.svfs.options['generaldelta'] = True
699
700
700 for r in self.requirements:
701 for r in self.requirements:
701 if r.startswith('exp-compression-'):
702 if r.startswith('exp-compression-'):
702 self.svfs.options['compengine'] = r[len('exp-compression-'):]
703 self.svfs.options['compengine'] = r[len('exp-compression-'):]
703
704
704 # TODO move "revlogv2" to openerreqs once finalized.
705 # TODO move "revlogv2" to openerreqs once finalized.
705 if REVLOGV2_REQUIREMENT in self.requirements:
706 if REVLOGV2_REQUIREMENT in self.requirements:
706 self.svfs.options['revlogv2'] = True
707 self.svfs.options['revlogv2'] = True
707
708
708 def _writerequirements(self):
709 def _writerequirements(self):
709 scmutil.writerequires(self.vfs, self.requirements)
710 scmutil.writerequires(self.vfs, self.requirements)
710
711
711 def _checknested(self, path):
712 def _checknested(self, path):
712 """Determine if path is a legal nested repository."""
713 """Determine if path is a legal nested repository."""
713 if not path.startswith(self.root):
714 if not path.startswith(self.root):
714 return False
715 return False
715 subpath = path[len(self.root) + 1:]
716 subpath = path[len(self.root) + 1:]
716 normsubpath = util.pconvert(subpath)
717 normsubpath = util.pconvert(subpath)
717
718
718 # XXX: Checking against the current working copy is wrong in
719 # XXX: Checking against the current working copy is wrong in
719 # the sense that it can reject things like
720 # the sense that it can reject things like
720 #
721 #
721 # $ hg cat -r 10 sub/x.txt
722 # $ hg cat -r 10 sub/x.txt
722 #
723 #
723 # if sub/ is no longer a subrepository in the working copy
724 # if sub/ is no longer a subrepository in the working copy
724 # parent revision.
725 # parent revision.
725 #
726 #
726 # However, it can of course also allow things that would have
727 # However, it can of course also allow things that would have
727 # been rejected before, such as the above cat command if sub/
728 # been rejected before, such as the above cat command if sub/
728 # is a subrepository now, but was a normal directory before.
729 # is a subrepository now, but was a normal directory before.
729 # The old path auditor would have rejected by mistake since it
730 # The old path auditor would have rejected by mistake since it
730 # panics when it sees sub/.hg/.
731 # panics when it sees sub/.hg/.
731 #
732 #
732 # All in all, checking against the working copy seems sensible
733 # All in all, checking against the working copy seems sensible
733 # since we want to prevent access to nested repositories on
734 # since we want to prevent access to nested repositories on
734 # the filesystem *now*.
735 # the filesystem *now*.
735 ctx = self[None]
736 ctx = self[None]
736 parts = util.splitpath(subpath)
737 parts = util.splitpath(subpath)
737 while parts:
738 while parts:
738 prefix = '/'.join(parts)
739 prefix = '/'.join(parts)
739 if prefix in ctx.substate:
740 if prefix in ctx.substate:
740 if prefix == normsubpath:
741 if prefix == normsubpath:
741 return True
742 return True
742 else:
743 else:
743 sub = ctx.sub(prefix)
744 sub = ctx.sub(prefix)
744 return sub.checknested(subpath[len(prefix) + 1:])
745 return sub.checknested(subpath[len(prefix) + 1:])
745 else:
746 else:
746 parts.pop()
747 parts.pop()
747 return False
748 return False
748
749
749 def peer(self):
750 def peer(self):
750 return localpeer(self) # not cached to avoid reference cycle
751 return localpeer(self) # not cached to avoid reference cycle
751
752
752 def unfiltered(self):
753 def unfiltered(self):
753 """Return unfiltered version of the repository
754 """Return unfiltered version of the repository
754
755
755 Intended to be overwritten by filtered repo."""
756 Intended to be overwritten by filtered repo."""
756 return self
757 return self
757
758
758 def filtered(self, name, visibilityexceptions=None):
759 def filtered(self, name, visibilityexceptions=None):
759 """Return a filtered version of a repository"""
760 """Return a filtered version of a repository"""
760 cls = repoview.newtype(self.unfiltered().__class__)
761 cls = repoview.newtype(self.unfiltered().__class__)
761 return cls(self, name, visibilityexceptions)
762 return cls(self, name, visibilityexceptions)
762
763
763 @repofilecache('bookmarks', 'bookmarks.current')
764 @repofilecache('bookmarks', 'bookmarks.current')
764 def _bookmarks(self):
765 def _bookmarks(self):
765 return bookmarks.bmstore(self)
766 return bookmarks.bmstore(self)
766
767
767 @property
768 @property
768 def _activebookmark(self):
769 def _activebookmark(self):
769 return self._bookmarks.active
770 return self._bookmarks.active
770
771
771 # _phasesets depend on changelog. what we need is to call
772 # _phasesets depend on changelog. what we need is to call
772 # _phasecache.invalidate() if '00changelog.i' was changed, but it
773 # _phasecache.invalidate() if '00changelog.i' was changed, but it
773 # can't be easily expressed in filecache mechanism.
774 # can't be easily expressed in filecache mechanism.
774 @storecache('phaseroots', '00changelog.i')
775 @storecache('phaseroots', '00changelog.i')
775 def _phasecache(self):
776 def _phasecache(self):
776 return phases.phasecache(self, self._phasedefaults)
777 return phases.phasecache(self, self._phasedefaults)
777
778
778 @storecache('obsstore')
779 @storecache('obsstore')
779 def obsstore(self):
780 def obsstore(self):
780 return obsolete.makestore(self.ui, self)
781 return obsolete.makestore(self.ui, self)
781
782
782 @storecache('00changelog.i')
783 @storecache('00changelog.i')
783 def changelog(self):
784 def changelog(self):
784 return changelog.changelog(self.svfs,
785 return changelog.changelog(self.svfs,
785 trypending=txnutil.mayhavepending(self.root))
786 trypending=txnutil.mayhavepending(self.root))
786
787
787 def _constructmanifest(self):
788 def _constructmanifest(self):
788 # This is a temporary function while we migrate from manifest to
789 # This is a temporary function while we migrate from manifest to
789 # manifestlog. It allows bundlerepo and unionrepo to intercept the
790 # manifestlog. It allows bundlerepo and unionrepo to intercept the
790 # manifest creation.
791 # manifest creation.
791 return manifest.manifestrevlog(self.svfs)
792 return manifest.manifestrevlog(self.svfs)
792
793
793 @storecache('00manifest.i')
794 @storecache('00manifest.i')
794 def manifestlog(self):
795 def manifestlog(self):
795 return manifest.manifestlog(self.svfs, self)
796 return manifest.manifestlog(self.svfs, self)
796
797
797 @repofilecache('dirstate')
798 @repofilecache('dirstate')
798 def dirstate(self):
799 def dirstate(self):
799 return self._makedirstate()
800 return self._makedirstate()
800
801
801 def _makedirstate(self):
802 def _makedirstate(self):
802 """Extension point for wrapping the dirstate per-repo."""
803 """Extension point for wrapping the dirstate per-repo."""
803 sparsematchfn = lambda: sparse.matcher(self)
804 sparsematchfn = lambda: sparse.matcher(self)
804
805
805 return dirstate.dirstate(self.vfs, self.ui, self.root,
806 return dirstate.dirstate(self.vfs, self.ui, self.root,
806 self._dirstatevalidate, sparsematchfn)
807 self._dirstatevalidate, sparsematchfn)
807
808
808 def _dirstatevalidate(self, node):
809 def _dirstatevalidate(self, node):
809 try:
810 try:
810 self.changelog.rev(node)
811 self.changelog.rev(node)
811 return node
812 return node
812 except error.LookupError:
813 except error.LookupError:
813 if not self._dirstatevalidatewarned:
814 if not self._dirstatevalidatewarned:
814 self._dirstatevalidatewarned = True
815 self._dirstatevalidatewarned = True
815 self.ui.warn(_("warning: ignoring unknown"
816 self.ui.warn(_("warning: ignoring unknown"
816 " working parent %s!\n") % short(node))
817 " working parent %s!\n") % short(node))
817 return nullid
818 return nullid
818
819
819 @storecache(narrowspec.FILENAME)
820 @storecache(narrowspec.FILENAME)
820 def narrowpats(self):
821 def narrowpats(self):
821 """matcher patterns for this repository's narrowspec
822 """matcher patterns for this repository's narrowspec
822
823
823 A tuple of (includes, excludes).
824 A tuple of (includes, excludes).
824 """
825 """
825 source = self
826 source = self
826 if self.shared():
827 if self.shared():
827 from . import hg
828 from . import hg
828 source = hg.sharedreposource(self)
829 source = hg.sharedreposource(self)
829 return narrowspec.load(source)
830 return narrowspec.load(source)
830
831
831 @storecache(narrowspec.FILENAME)
832 @storecache(narrowspec.FILENAME)
832 def _narrowmatch(self):
833 def _narrowmatch(self):
833 if repository.NARROW_REQUIREMENT not in self.requirements:
834 if repository.NARROW_REQUIREMENT not in self.requirements:
834 return matchmod.always(self.root, '')
835 return matchmod.always(self.root, '')
835 include, exclude = self.narrowpats
836 include, exclude = self.narrowpats
836 return narrowspec.match(self.root, include=include, exclude=exclude)
837 return narrowspec.match(self.root, include=include, exclude=exclude)
837
838
838 # TODO(martinvonz): make this property-like instead?
839 # TODO(martinvonz): make this property-like instead?
839 def narrowmatch(self):
840 def narrowmatch(self):
840 return self._narrowmatch
841 return self._narrowmatch
841
842
842 def setnarrowpats(self, newincludes, newexcludes):
843 def setnarrowpats(self, newincludes, newexcludes):
843 target = self
844 target = self
844 if self.shared():
845 if self.shared():
845 from . import hg
846 from . import hg
846 target = hg.sharedreposource(self)
847 target = hg.sharedreposource(self)
847 narrowspec.save(target, newincludes, newexcludes)
848 narrowspec.save(target, newincludes, newexcludes)
848 self.invalidate(clearfilecache=True)
849 self.invalidate(clearfilecache=True)
849
850
850 def __getitem__(self, changeid):
851 def __getitem__(self, changeid):
851 if changeid is None:
852 if changeid is None:
852 return context.workingctx(self)
853 return context.workingctx(self)
853 if isinstance(changeid, context.basectx):
854 if isinstance(changeid, context.basectx):
854 return changeid
855 return changeid
855 if isinstance(changeid, slice):
856 if isinstance(changeid, slice):
856 # wdirrev isn't contiguous so the slice shouldn't include it
857 # wdirrev isn't contiguous so the slice shouldn't include it
857 return [context.changectx(self, i)
858 return [context.changectx(self, i)
858 for i in pycompat.xrange(*changeid.indices(len(self)))
859 for i in pycompat.xrange(*changeid.indices(len(self)))
859 if i not in self.changelog.filteredrevs]
860 if i not in self.changelog.filteredrevs]
860 try:
861 try:
861 return context.changectx(self, changeid)
862 return context.changectx(self, changeid)
862 except error.WdirUnsupported:
863 except error.WdirUnsupported:
863 return context.workingctx(self)
864 return context.workingctx(self)
864
865
865 def __contains__(self, changeid):
866 def __contains__(self, changeid):
866 """True if the given changeid exists
867 """True if the given changeid exists
867
868
868 error.AmbiguousPrefixLookupError is raised if an ambiguous node
869 error.AmbiguousPrefixLookupError is raised if an ambiguous node
869 specified.
870 specified.
870 """
871 """
871 try:
872 try:
872 self[changeid]
873 self[changeid]
873 return True
874 return True
874 except error.RepoLookupError:
875 except error.RepoLookupError:
875 return False
876 return False
876
877
877 def __nonzero__(self):
878 def __nonzero__(self):
878 return True
879 return True
879
880
880 __bool__ = __nonzero__
881 __bool__ = __nonzero__
881
882
882 def __len__(self):
883 def __len__(self):
883 # no need to pay the cost of repoview.changelog
884 # no need to pay the cost of repoview.changelog
884 unfi = self.unfiltered()
885 unfi = self.unfiltered()
885 return len(unfi.changelog)
886 return len(unfi.changelog)
886
887
887 def __iter__(self):
888 def __iter__(self):
888 return iter(self.changelog)
889 return iter(self.changelog)
889
890
890 def revs(self, expr, *args):
891 def revs(self, expr, *args):
891 '''Find revisions matching a revset.
892 '''Find revisions matching a revset.
892
893
893 The revset is specified as a string ``expr`` that may contain
894 The revset is specified as a string ``expr`` that may contain
894 %-formatting to escape certain types. See ``revsetlang.formatspec``.
895 %-formatting to escape certain types. See ``revsetlang.formatspec``.
895
896
896 Revset aliases from the configuration are not expanded. To expand
897 Revset aliases from the configuration are not expanded. To expand
897 user aliases, consider calling ``scmutil.revrange()`` or
898 user aliases, consider calling ``scmutil.revrange()`` or
898 ``repo.anyrevs([expr], user=True)``.
899 ``repo.anyrevs([expr], user=True)``.
899
900
900 Returns a revset.abstractsmartset, which is a list-like interface
901 Returns a revset.abstractsmartset, which is a list-like interface
901 that contains integer revisions.
902 that contains integer revisions.
902 '''
903 '''
903 expr = revsetlang.formatspec(expr, *args)
904 expr = revsetlang.formatspec(expr, *args)
904 m = revset.match(None, expr)
905 m = revset.match(None, expr)
905 return m(self)
906 return m(self)
906
907
907 def set(self, expr, *args):
908 def set(self, expr, *args):
908 '''Find revisions matching a revset and emit changectx instances.
909 '''Find revisions matching a revset and emit changectx instances.
909
910
910 This is a convenience wrapper around ``revs()`` that iterates the
911 This is a convenience wrapper around ``revs()`` that iterates the
911 result and is a generator of changectx instances.
912 result and is a generator of changectx instances.
912
913
913 Revset aliases from the configuration are not expanded. To expand
914 Revset aliases from the configuration are not expanded. To expand
914 user aliases, consider calling ``scmutil.revrange()``.
915 user aliases, consider calling ``scmutil.revrange()``.
915 '''
916 '''
916 for r in self.revs(expr, *args):
917 for r in self.revs(expr, *args):
917 yield self[r]
918 yield self[r]
918
919
919 def anyrevs(self, specs, user=False, localalias=None):
920 def anyrevs(self, specs, user=False, localalias=None):
920 '''Find revisions matching one of the given revsets.
921 '''Find revisions matching one of the given revsets.
921
922
922 Revset aliases from the configuration are not expanded by default. To
923 Revset aliases from the configuration are not expanded by default. To
923 expand user aliases, specify ``user=True``. To provide some local
924 expand user aliases, specify ``user=True``. To provide some local
924 definitions overriding user aliases, set ``localalias`` to
925 definitions overriding user aliases, set ``localalias`` to
925 ``{name: definitionstring}``.
926 ``{name: definitionstring}``.
926 '''
927 '''
927 if user:
928 if user:
928 m = revset.matchany(self.ui, specs,
929 m = revset.matchany(self.ui, specs,
929 lookup=revset.lookupfn(self),
930 lookup=revset.lookupfn(self),
930 localalias=localalias)
931 localalias=localalias)
931 else:
932 else:
932 m = revset.matchany(None, specs, localalias=localalias)
933 m = revset.matchany(None, specs, localalias=localalias)
933 return m(self)
934 return m(self)
934
935
935 def url(self):
936 def url(self):
936 return 'file:' + self.root
937 return 'file:' + self.root
937
938
938 def hook(self, name, throw=False, **args):
939 def hook(self, name, throw=False, **args):
939 """Call a hook, passing this repo instance.
940 """Call a hook, passing this repo instance.
940
941
941 This a convenience method to aid invoking hooks. Extensions likely
942 This a convenience method to aid invoking hooks. Extensions likely
942 won't call this unless they have registered a custom hook or are
943 won't call this unless they have registered a custom hook or are
943 replacing code that is expected to call a hook.
944 replacing code that is expected to call a hook.
944 """
945 """
945 return hook.hook(self.ui, self, name, throw, **args)
946 return hook.hook(self.ui, self, name, throw, **args)
946
947
947 @filteredpropertycache
948 @filteredpropertycache
948 def _tagscache(self):
949 def _tagscache(self):
949 '''Returns a tagscache object that contains various tags related
950 '''Returns a tagscache object that contains various tags related
950 caches.'''
951 caches.'''
951
952
952 # This simplifies its cache management by having one decorated
953 # This simplifies its cache management by having one decorated
953 # function (this one) and the rest simply fetch things from it.
954 # function (this one) and the rest simply fetch things from it.
954 class tagscache(object):
955 class tagscache(object):
955 def __init__(self):
956 def __init__(self):
956 # These two define the set of tags for this repository. tags
957 # These two define the set of tags for this repository. tags
957 # maps tag name to node; tagtypes maps tag name to 'global' or
958 # maps tag name to node; tagtypes maps tag name to 'global' or
958 # 'local'. (Global tags are defined by .hgtags across all
959 # 'local'. (Global tags are defined by .hgtags across all
959 # heads, and local tags are defined in .hg/localtags.)
960 # heads, and local tags are defined in .hg/localtags.)
960 # They constitute the in-memory cache of tags.
961 # They constitute the in-memory cache of tags.
961 self.tags = self.tagtypes = None
962 self.tags = self.tagtypes = None
962
963
963 self.nodetagscache = self.tagslist = None
964 self.nodetagscache = self.tagslist = None
964
965
965 cache = tagscache()
966 cache = tagscache()
966 cache.tags, cache.tagtypes = self._findtags()
967 cache.tags, cache.tagtypes = self._findtags()
967
968
968 return cache
969 return cache
969
970
970 def tags(self):
971 def tags(self):
971 '''return a mapping of tag to node'''
972 '''return a mapping of tag to node'''
972 t = {}
973 t = {}
973 if self.changelog.filteredrevs:
974 if self.changelog.filteredrevs:
974 tags, tt = self._findtags()
975 tags, tt = self._findtags()
975 else:
976 else:
976 tags = self._tagscache.tags
977 tags = self._tagscache.tags
977 for k, v in tags.iteritems():
978 for k, v in tags.iteritems():
978 try:
979 try:
979 # ignore tags to unknown nodes
980 # ignore tags to unknown nodes
980 self.changelog.rev(v)
981 self.changelog.rev(v)
981 t[k] = v
982 t[k] = v
982 except (error.LookupError, ValueError):
983 except (error.LookupError, ValueError):
983 pass
984 pass
984 return t
985 return t
985
986
986 def _findtags(self):
987 def _findtags(self):
987 '''Do the hard work of finding tags. Return a pair of dicts
988 '''Do the hard work of finding tags. Return a pair of dicts
988 (tags, tagtypes) where tags maps tag name to node, and tagtypes
989 (tags, tagtypes) where tags maps tag name to node, and tagtypes
989 maps tag name to a string like \'global\' or \'local\'.
990 maps tag name to a string like \'global\' or \'local\'.
990 Subclasses or extensions are free to add their own tags, but
991 Subclasses or extensions are free to add their own tags, but
991 should be aware that the returned dicts will be retained for the
992 should be aware that the returned dicts will be retained for the
992 duration of the localrepo object.'''
993 duration of the localrepo object.'''
993
994
994 # XXX what tagtype should subclasses/extensions use? Currently
995 # XXX what tagtype should subclasses/extensions use? Currently
995 # mq and bookmarks add tags, but do not set the tagtype at all.
996 # mq and bookmarks add tags, but do not set the tagtype at all.
996 # Should each extension invent its own tag type? Should there
997 # Should each extension invent its own tag type? Should there
997 # be one tagtype for all such "virtual" tags? Or is the status
998 # be one tagtype for all such "virtual" tags? Or is the status
998 # quo fine?
999 # quo fine?
999
1000
1000
1001
1001 # map tag name to (node, hist)
1002 # map tag name to (node, hist)
1002 alltags = tagsmod.findglobaltags(self.ui, self)
1003 alltags = tagsmod.findglobaltags(self.ui, self)
1003 # map tag name to tag type
1004 # map tag name to tag type
1004 tagtypes = dict((tag, 'global') for tag in alltags)
1005 tagtypes = dict((tag, 'global') for tag in alltags)
1005
1006
1006 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
1007 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
1007
1008
1008 # Build the return dicts. Have to re-encode tag names because
1009 # Build the return dicts. Have to re-encode tag names because
1009 # the tags module always uses UTF-8 (in order not to lose info
1010 # the tags module always uses UTF-8 (in order not to lose info
1010 # writing to the cache), but the rest of Mercurial wants them in
1011 # writing to the cache), but the rest of Mercurial wants them in
1011 # local encoding.
1012 # local encoding.
1012 tags = {}
1013 tags = {}
1013 for (name, (node, hist)) in alltags.iteritems():
1014 for (name, (node, hist)) in alltags.iteritems():
1014 if node != nullid:
1015 if node != nullid:
1015 tags[encoding.tolocal(name)] = node
1016 tags[encoding.tolocal(name)] = node
1016 tags['tip'] = self.changelog.tip()
1017 tags['tip'] = self.changelog.tip()
1017 tagtypes = dict([(encoding.tolocal(name), value)
1018 tagtypes = dict([(encoding.tolocal(name), value)
1018 for (name, value) in tagtypes.iteritems()])
1019 for (name, value) in tagtypes.iteritems()])
1019 return (tags, tagtypes)
1020 return (tags, tagtypes)
1020
1021
1021 def tagtype(self, tagname):
1022 def tagtype(self, tagname):
1022 '''
1023 '''
1023 return the type of the given tag. result can be:
1024 return the type of the given tag. result can be:
1024
1025
1025 'local' : a local tag
1026 'local' : a local tag
1026 'global' : a global tag
1027 'global' : a global tag
1027 None : tag does not exist
1028 None : tag does not exist
1028 '''
1029 '''
1029
1030
1030 return self._tagscache.tagtypes.get(tagname)
1031 return self._tagscache.tagtypes.get(tagname)
1031
1032
1032 def tagslist(self):
1033 def tagslist(self):
1033 '''return a list of tags ordered by revision'''
1034 '''return a list of tags ordered by revision'''
1034 if not self._tagscache.tagslist:
1035 if not self._tagscache.tagslist:
1035 l = []
1036 l = []
1036 for t, n in self.tags().iteritems():
1037 for t, n in self.tags().iteritems():
1037 l.append((self.changelog.rev(n), t, n))
1038 l.append((self.changelog.rev(n), t, n))
1038 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
1039 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
1039
1040
1040 return self._tagscache.tagslist
1041 return self._tagscache.tagslist
1041
1042
1042 def nodetags(self, node):
1043 def nodetags(self, node):
1043 '''return the tags associated with a node'''
1044 '''return the tags associated with a node'''
1044 if not self._tagscache.nodetagscache:
1045 if not self._tagscache.nodetagscache:
1045 nodetagscache = {}
1046 nodetagscache = {}
1046 for t, n in self._tagscache.tags.iteritems():
1047 for t, n in self._tagscache.tags.iteritems():
1047 nodetagscache.setdefault(n, []).append(t)
1048 nodetagscache.setdefault(n, []).append(t)
1048 for tags in nodetagscache.itervalues():
1049 for tags in nodetagscache.itervalues():
1049 tags.sort()
1050 tags.sort()
1050 self._tagscache.nodetagscache = nodetagscache
1051 self._tagscache.nodetagscache = nodetagscache
1051 return self._tagscache.nodetagscache.get(node, [])
1052 return self._tagscache.nodetagscache.get(node, [])
1052
1053
1053 def nodebookmarks(self, node):
1054 def nodebookmarks(self, node):
1054 """return the list of bookmarks pointing to the specified node"""
1055 """return the list of bookmarks pointing to the specified node"""
1055 return self._bookmarks.names(node)
1056 return self._bookmarks.names(node)
1056
1057
1057 def branchmap(self):
1058 def branchmap(self):
1058 '''returns a dictionary {branch: [branchheads]} with branchheads
1059 '''returns a dictionary {branch: [branchheads]} with branchheads
1059 ordered by increasing revision number'''
1060 ordered by increasing revision number'''
1060 branchmap.updatecache(self)
1061 branchmap.updatecache(self)
1061 return self._branchcaches[self.filtername]
1062 return self._branchcaches[self.filtername]
1062
1063
1063 @unfilteredmethod
1064 @unfilteredmethod
1064 def revbranchcache(self):
1065 def revbranchcache(self):
1065 if not self._revbranchcache:
1066 if not self._revbranchcache:
1066 self._revbranchcache = branchmap.revbranchcache(self.unfiltered())
1067 self._revbranchcache = branchmap.revbranchcache(self.unfiltered())
1067 return self._revbranchcache
1068 return self._revbranchcache
1068
1069
1069 def branchtip(self, branch, ignoremissing=False):
1070 def branchtip(self, branch, ignoremissing=False):
1070 '''return the tip node for a given branch
1071 '''return the tip node for a given branch
1071
1072
1072 If ignoremissing is True, then this method will not raise an error.
1073 If ignoremissing is True, then this method will not raise an error.
1073 This is helpful for callers that only expect None for a missing branch
1074 This is helpful for callers that only expect None for a missing branch
1074 (e.g. namespace).
1075 (e.g. namespace).
1075
1076
1076 '''
1077 '''
1077 try:
1078 try:
1078 return self.branchmap().branchtip(branch)
1079 return self.branchmap().branchtip(branch)
1079 except KeyError:
1080 except KeyError:
1080 if not ignoremissing:
1081 if not ignoremissing:
1081 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
1082 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
1082 else:
1083 else:
1083 pass
1084 pass
1084
1085
1085 def lookup(self, key):
1086 def lookup(self, key):
1086 return scmutil.revsymbol(self, key).node()
1087 return scmutil.revsymbol(self, key).node()
1087
1088
1088 def lookupbranch(self, key):
1089 def lookupbranch(self, key):
1089 if key in self.branchmap():
1090 if key in self.branchmap():
1090 return key
1091 return key
1091
1092
1092 return scmutil.revsymbol(self, key).branch()
1093 return scmutil.revsymbol(self, key).branch()
1093
1094
1094 def known(self, nodes):
1095 def known(self, nodes):
1095 cl = self.changelog
1096 cl = self.changelog
1096 nm = cl.nodemap
1097 nm = cl.nodemap
1097 filtered = cl.filteredrevs
1098 filtered = cl.filteredrevs
1098 result = []
1099 result = []
1099 for n in nodes:
1100 for n in nodes:
1100 r = nm.get(n)
1101 r = nm.get(n)
1101 resp = not (r is None or r in filtered)
1102 resp = not (r is None or r in filtered)
1102 result.append(resp)
1103 result.append(resp)
1103 return result
1104 return result
1104
1105
1105 def local(self):
1106 def local(self):
1106 return self
1107 return self
1107
1108
1108 def publishing(self):
1109 def publishing(self):
1109 # it's safe (and desirable) to trust the publish flag unconditionally
1110 # it's safe (and desirable) to trust the publish flag unconditionally
1110 # so that we don't finalize changes shared between users via ssh or nfs
1111 # so that we don't finalize changes shared between users via ssh or nfs
1111 return self.ui.configbool('phases', 'publish', untrusted=True)
1112 return self.ui.configbool('phases', 'publish', untrusted=True)
1112
1113
1113 def cancopy(self):
1114 def cancopy(self):
1114 # so statichttprepo's override of local() works
1115 # so statichttprepo's override of local() works
1115 if not self.local():
1116 if not self.local():
1116 return False
1117 return False
1117 if not self.publishing():
1118 if not self.publishing():
1118 return True
1119 return True
1119 # if publishing we can't copy if there is filtered content
1120 # if publishing we can't copy if there is filtered content
1120 return not self.filtered('visible').changelog.filteredrevs
1121 return not self.filtered('visible').changelog.filteredrevs
1121
1122
1122 def shared(self):
1123 def shared(self):
1123 '''the type of shared repository (None if not shared)'''
1124 '''the type of shared repository (None if not shared)'''
1124 if self.sharedpath != self.path:
1125 if self.sharedpath != self.path:
1125 return 'store'
1126 return 'store'
1126 return None
1127 return None
1127
1128
1128 def wjoin(self, f, *insidef):
1129 def wjoin(self, f, *insidef):
1129 return self.vfs.reljoin(self.root, f, *insidef)
1130 return self.vfs.reljoin(self.root, f, *insidef)
1130
1131
1131 def file(self, f):
1132 def file(self, f):
1132 if f[0] == '/':
1133 if f[0] == '/':
1133 f = f[1:]
1134 f = f[1:]
1134 return filelog.filelog(self.svfs, f)
1135 return filelog.filelog(self.svfs, f)
1135
1136
1136 def setparents(self, p1, p2=nullid):
1137 def setparents(self, p1, p2=nullid):
1137 with self.dirstate.parentchange():
1138 with self.dirstate.parentchange():
1138 copies = self.dirstate.setparents(p1, p2)
1139 copies = self.dirstate.setparents(p1, p2)
1139 pctx = self[p1]
1140 pctx = self[p1]
1140 if copies:
1141 if copies:
1141 # Adjust copy records, the dirstate cannot do it, it
1142 # Adjust copy records, the dirstate cannot do it, it
1142 # requires access to parents manifests. Preserve them
1143 # requires access to parents manifests. Preserve them
1143 # only for entries added to first parent.
1144 # only for entries added to first parent.
1144 for f in copies:
1145 for f in copies:
1145 if f not in pctx and copies[f] in pctx:
1146 if f not in pctx and copies[f] in pctx:
1146 self.dirstate.copy(copies[f], f)
1147 self.dirstate.copy(copies[f], f)
1147 if p2 == nullid:
1148 if p2 == nullid:
1148 for f, s in sorted(self.dirstate.copies().items()):
1149 for f, s in sorted(self.dirstate.copies().items()):
1149 if f not in pctx and s not in pctx:
1150 if f not in pctx and s not in pctx:
1150 self.dirstate.copy(None, f)
1151 self.dirstate.copy(None, f)
1151
1152
1152 def filectx(self, path, changeid=None, fileid=None, changectx=None):
1153 def filectx(self, path, changeid=None, fileid=None, changectx=None):
1153 """changeid can be a changeset revision, node, or tag.
1154 """changeid can be a changeset revision, node, or tag.
1154 fileid can be a file revision or node."""
1155 fileid can be a file revision or node."""
1155 return context.filectx(self, path, changeid, fileid,
1156 return context.filectx(self, path, changeid, fileid,
1156 changectx=changectx)
1157 changectx=changectx)
1157
1158
1158 def getcwd(self):
1159 def getcwd(self):
1159 return self.dirstate.getcwd()
1160 return self.dirstate.getcwd()
1160
1161
1161 def pathto(self, f, cwd=None):
1162 def pathto(self, f, cwd=None):
1162 return self.dirstate.pathto(f, cwd)
1163 return self.dirstate.pathto(f, cwd)
1163
1164
1164 def _loadfilter(self, filter):
1165 def _loadfilter(self, filter):
1165 if filter not in self._filterpats:
1166 if filter not in self._filterpats:
1166 l = []
1167 l = []
1167 for pat, cmd in self.ui.configitems(filter):
1168 for pat, cmd in self.ui.configitems(filter):
1168 if cmd == '!':
1169 if cmd == '!':
1169 continue
1170 continue
1170 mf = matchmod.match(self.root, '', [pat])
1171 mf = matchmod.match(self.root, '', [pat])
1171 fn = None
1172 fn = None
1172 params = cmd
1173 params = cmd
1173 for name, filterfn in self._datafilters.iteritems():
1174 for name, filterfn in self._datafilters.iteritems():
1174 if cmd.startswith(name):
1175 if cmd.startswith(name):
1175 fn = filterfn
1176 fn = filterfn
1176 params = cmd[len(name):].lstrip()
1177 params = cmd[len(name):].lstrip()
1177 break
1178 break
1178 if not fn:
1179 if not fn:
1179 fn = lambda s, c, **kwargs: procutil.filter(s, c)
1180 fn = lambda s, c, **kwargs: procutil.filter(s, c)
1180 # Wrap old filters not supporting keyword arguments
1181 # Wrap old filters not supporting keyword arguments
1181 if not pycompat.getargspec(fn)[2]:
1182 if not pycompat.getargspec(fn)[2]:
1182 oldfn = fn
1183 oldfn = fn
1183 fn = lambda s, c, **kwargs: oldfn(s, c)
1184 fn = lambda s, c, **kwargs: oldfn(s, c)
1184 l.append((mf, fn, params))
1185 l.append((mf, fn, params))
1185 self._filterpats[filter] = l
1186 self._filterpats[filter] = l
1186 return self._filterpats[filter]
1187 return self._filterpats[filter]
1187
1188
1188 def _filter(self, filterpats, filename, data):
1189 def _filter(self, filterpats, filename, data):
1189 for mf, fn, cmd in filterpats:
1190 for mf, fn, cmd in filterpats:
1190 if mf(filename):
1191 if mf(filename):
1191 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
1192 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
1192 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
1193 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
1193 break
1194 break
1194
1195
1195 return data
1196 return data
1196
1197
1197 @unfilteredpropertycache
1198 @unfilteredpropertycache
1198 def _encodefilterpats(self):
1199 def _encodefilterpats(self):
1199 return self._loadfilter('encode')
1200 return self._loadfilter('encode')
1200
1201
1201 @unfilteredpropertycache
1202 @unfilteredpropertycache
1202 def _decodefilterpats(self):
1203 def _decodefilterpats(self):
1203 return self._loadfilter('decode')
1204 return self._loadfilter('decode')
1204
1205
1205 def adddatafilter(self, name, filter):
1206 def adddatafilter(self, name, filter):
1206 self._datafilters[name] = filter
1207 self._datafilters[name] = filter
1207
1208
1208 def wread(self, filename):
1209 def wread(self, filename):
1209 if self.wvfs.islink(filename):
1210 if self.wvfs.islink(filename):
1210 data = self.wvfs.readlink(filename)
1211 data = self.wvfs.readlink(filename)
1211 else:
1212 else:
1212 data = self.wvfs.read(filename)
1213 data = self.wvfs.read(filename)
1213 return self._filter(self._encodefilterpats, filename, data)
1214 return self._filter(self._encodefilterpats, filename, data)
1214
1215
1215 def wwrite(self, filename, data, flags, backgroundclose=False, **kwargs):
1216 def wwrite(self, filename, data, flags, backgroundclose=False, **kwargs):
1216 """write ``data`` into ``filename`` in the working directory
1217 """write ``data`` into ``filename`` in the working directory
1217
1218
1218 This returns length of written (maybe decoded) data.
1219 This returns length of written (maybe decoded) data.
1219 """
1220 """
1220 data = self._filter(self._decodefilterpats, filename, data)
1221 data = self._filter(self._decodefilterpats, filename, data)
1221 if 'l' in flags:
1222 if 'l' in flags:
1222 self.wvfs.symlink(data, filename)
1223 self.wvfs.symlink(data, filename)
1223 else:
1224 else:
1224 self.wvfs.write(filename, data, backgroundclose=backgroundclose,
1225 self.wvfs.write(filename, data, backgroundclose=backgroundclose,
1225 **kwargs)
1226 **kwargs)
1226 if 'x' in flags:
1227 if 'x' in flags:
1227 self.wvfs.setflags(filename, False, True)
1228 self.wvfs.setflags(filename, False, True)
1228 else:
1229 else:
1229 self.wvfs.setflags(filename, False, False)
1230 self.wvfs.setflags(filename, False, False)
1230 return len(data)
1231 return len(data)
1231
1232
1232 def wwritedata(self, filename, data):
1233 def wwritedata(self, filename, data):
1233 return self._filter(self._decodefilterpats, filename, data)
1234 return self._filter(self._decodefilterpats, filename, data)
1234
1235
1235 def currenttransaction(self):
1236 def currenttransaction(self):
1236 """return the current transaction or None if non exists"""
1237 """return the current transaction or None if non exists"""
1237 if self._transref:
1238 if self._transref:
1238 tr = self._transref()
1239 tr = self._transref()
1239 else:
1240 else:
1240 tr = None
1241 tr = None
1241
1242
1242 if tr and tr.running():
1243 if tr and tr.running():
1243 return tr
1244 return tr
1244 return None
1245 return None
1245
1246
1246 def transaction(self, desc, report=None):
1247 def transaction(self, desc, report=None):
1247 if (self.ui.configbool('devel', 'all-warnings')
1248 if (self.ui.configbool('devel', 'all-warnings')
1248 or self.ui.configbool('devel', 'check-locks')):
1249 or self.ui.configbool('devel', 'check-locks')):
1249 if self._currentlock(self._lockref) is None:
1250 if self._currentlock(self._lockref) is None:
1250 raise error.ProgrammingError('transaction requires locking')
1251 raise error.ProgrammingError('transaction requires locking')
1251 tr = self.currenttransaction()
1252 tr = self.currenttransaction()
1252 if tr is not None:
1253 if tr is not None:
1253 return tr.nest(name=desc)
1254 return tr.nest(name=desc)
1254
1255
1255 # abort here if the journal already exists
1256 # abort here if the journal already exists
1256 if self.svfs.exists("journal"):
1257 if self.svfs.exists("journal"):
1257 raise error.RepoError(
1258 raise error.RepoError(
1258 _("abandoned transaction found"),
1259 _("abandoned transaction found"),
1259 hint=_("run 'hg recover' to clean up transaction"))
1260 hint=_("run 'hg recover' to clean up transaction"))
1260
1261
1261 idbase = "%.40f#%f" % (random.random(), time.time())
1262 idbase = "%.40f#%f" % (random.random(), time.time())
1262 ha = hex(hashlib.sha1(idbase).digest())
1263 ha = hex(hashlib.sha1(idbase).digest())
1263 txnid = 'TXN:' + ha
1264 txnid = 'TXN:' + ha
1264 self.hook('pretxnopen', throw=True, txnname=desc, txnid=txnid)
1265 self.hook('pretxnopen', throw=True, txnname=desc, txnid=txnid)
1265
1266
1266 self._writejournal(desc)
1267 self._writejournal(desc)
1267 renames = [(vfs, x, undoname(x)) for vfs, x in self._journalfiles()]
1268 renames = [(vfs, x, undoname(x)) for vfs, x in self._journalfiles()]
1268 if report:
1269 if report:
1269 rp = report
1270 rp = report
1270 else:
1271 else:
1271 rp = self.ui.warn
1272 rp = self.ui.warn
1272 vfsmap = {'plain': self.vfs} # root of .hg/
1273 vfsmap = {'plain': self.vfs} # root of .hg/
1273 # we must avoid cyclic reference between repo and transaction.
1274 # we must avoid cyclic reference between repo and transaction.
1274 reporef = weakref.ref(self)
1275 reporef = weakref.ref(self)
1275 # Code to track tag movement
1276 # Code to track tag movement
1276 #
1277 #
1277 # Since tags are all handled as file content, it is actually quite hard
1278 # Since tags are all handled as file content, it is actually quite hard
1278 # to track these movement from a code perspective. So we fallback to a
1279 # to track these movement from a code perspective. So we fallback to a
1279 # tracking at the repository level. One could envision to track changes
1280 # tracking at the repository level. One could envision to track changes
1280 # to the '.hgtags' file through changegroup apply but that fails to
1281 # to the '.hgtags' file through changegroup apply but that fails to
1281 # cope with case where transaction expose new heads without changegroup
1282 # cope with case where transaction expose new heads without changegroup
1282 # being involved (eg: phase movement).
1283 # being involved (eg: phase movement).
1283 #
1284 #
1284 # For now, We gate the feature behind a flag since this likely comes
1285 # For now, We gate the feature behind a flag since this likely comes
1285 # with performance impacts. The current code run more often than needed
1286 # with performance impacts. The current code run more often than needed
1286 # and do not use caches as much as it could. The current focus is on
1287 # and do not use caches as much as it could. The current focus is on
1287 # the behavior of the feature so we disable it by default. The flag
1288 # the behavior of the feature so we disable it by default. The flag
1288 # will be removed when we are happy with the performance impact.
1289 # will be removed when we are happy with the performance impact.
1289 #
1290 #
1290 # Once this feature is no longer experimental move the following
1291 # Once this feature is no longer experimental move the following
1291 # documentation to the appropriate help section:
1292 # documentation to the appropriate help section:
1292 #
1293 #
1293 # The ``HG_TAG_MOVED`` variable will be set if the transaction touched
1294 # The ``HG_TAG_MOVED`` variable will be set if the transaction touched
1294 # tags (new or changed or deleted tags). In addition the details of
1295 # tags (new or changed or deleted tags). In addition the details of
1295 # these changes are made available in a file at:
1296 # these changes are made available in a file at:
1296 # ``REPOROOT/.hg/changes/tags.changes``.
1297 # ``REPOROOT/.hg/changes/tags.changes``.
1297 # Make sure you check for HG_TAG_MOVED before reading that file as it
1298 # Make sure you check for HG_TAG_MOVED before reading that file as it
1298 # might exist from a previous transaction even if no tag were touched
1299 # might exist from a previous transaction even if no tag were touched
1299 # in this one. Changes are recorded in a line base format::
1300 # in this one. Changes are recorded in a line base format::
1300 #
1301 #
1301 # <action> <hex-node> <tag-name>\n
1302 # <action> <hex-node> <tag-name>\n
1302 #
1303 #
1303 # Actions are defined as follow:
1304 # Actions are defined as follow:
1304 # "-R": tag is removed,
1305 # "-R": tag is removed,
1305 # "+A": tag is added,
1306 # "+A": tag is added,
1306 # "-M": tag is moved (old value),
1307 # "-M": tag is moved (old value),
1307 # "+M": tag is moved (new value),
1308 # "+M": tag is moved (new value),
1308 tracktags = lambda x: None
1309 tracktags = lambda x: None
1309 # experimental config: experimental.hook-track-tags
1310 # experimental config: experimental.hook-track-tags
1310 shouldtracktags = self.ui.configbool('experimental', 'hook-track-tags')
1311 shouldtracktags = self.ui.configbool('experimental', 'hook-track-tags')
1311 if desc != 'strip' and shouldtracktags:
1312 if desc != 'strip' and shouldtracktags:
1312 oldheads = self.changelog.headrevs()
1313 oldheads = self.changelog.headrevs()
1313 def tracktags(tr2):
1314 def tracktags(tr2):
1314 repo = reporef()
1315 repo = reporef()
1315 oldfnodes = tagsmod.fnoderevs(repo.ui, repo, oldheads)
1316 oldfnodes = tagsmod.fnoderevs(repo.ui, repo, oldheads)
1316 newheads = repo.changelog.headrevs()
1317 newheads = repo.changelog.headrevs()
1317 newfnodes = tagsmod.fnoderevs(repo.ui, repo, newheads)
1318 newfnodes = tagsmod.fnoderevs(repo.ui, repo, newheads)
1318 # notes: we compare lists here.
1319 # notes: we compare lists here.
1319 # As we do it only once buiding set would not be cheaper
1320 # As we do it only once buiding set would not be cheaper
1320 changes = tagsmod.difftags(repo.ui, repo, oldfnodes, newfnodes)
1321 changes = tagsmod.difftags(repo.ui, repo, oldfnodes, newfnodes)
1321 if changes:
1322 if changes:
1322 tr2.hookargs['tag_moved'] = '1'
1323 tr2.hookargs['tag_moved'] = '1'
1323 with repo.vfs('changes/tags.changes', 'w',
1324 with repo.vfs('changes/tags.changes', 'w',
1324 atomictemp=True) as changesfile:
1325 atomictemp=True) as changesfile:
1325 # note: we do not register the file to the transaction
1326 # note: we do not register the file to the transaction
1326 # because we needs it to still exist on the transaction
1327 # because we needs it to still exist on the transaction
1327 # is close (for txnclose hooks)
1328 # is close (for txnclose hooks)
1328 tagsmod.writediff(changesfile, changes)
1329 tagsmod.writediff(changesfile, changes)
1329 def validate(tr2):
1330 def validate(tr2):
1330 """will run pre-closing hooks"""
1331 """will run pre-closing hooks"""
1331 # XXX the transaction API is a bit lacking here so we take a hacky
1332 # XXX the transaction API is a bit lacking here so we take a hacky
1332 # path for now
1333 # path for now
1333 #
1334 #
1334 # We cannot add this as a "pending" hooks since the 'tr.hookargs'
1335 # We cannot add this as a "pending" hooks since the 'tr.hookargs'
1335 # dict is copied before these run. In addition we needs the data
1336 # dict is copied before these run. In addition we needs the data
1336 # available to in memory hooks too.
1337 # available to in memory hooks too.
1337 #
1338 #
1338 # Moreover, we also need to make sure this runs before txnclose
1339 # Moreover, we also need to make sure this runs before txnclose
1339 # hooks and there is no "pending" mechanism that would execute
1340 # hooks and there is no "pending" mechanism that would execute
1340 # logic only if hooks are about to run.
1341 # logic only if hooks are about to run.
1341 #
1342 #
1342 # Fixing this limitation of the transaction is also needed to track
1343 # Fixing this limitation of the transaction is also needed to track
1343 # other families of changes (bookmarks, phases, obsolescence).
1344 # other families of changes (bookmarks, phases, obsolescence).
1344 #
1345 #
1345 # This will have to be fixed before we remove the experimental
1346 # This will have to be fixed before we remove the experimental
1346 # gating.
1347 # gating.
1347 tracktags(tr2)
1348 tracktags(tr2)
1348 repo = reporef()
1349 repo = reporef()
1349 if repo.ui.configbool('experimental', 'single-head-per-branch'):
1350 if repo.ui.configbool('experimental', 'single-head-per-branch'):
1350 scmutil.enforcesinglehead(repo, tr2, desc)
1351 scmutil.enforcesinglehead(repo, tr2, desc)
1351 if hook.hashook(repo.ui, 'pretxnclose-bookmark'):
1352 if hook.hashook(repo.ui, 'pretxnclose-bookmark'):
1352 for name, (old, new) in sorted(tr.changes['bookmarks'].items()):
1353 for name, (old, new) in sorted(tr.changes['bookmarks'].items()):
1353 args = tr.hookargs.copy()
1354 args = tr.hookargs.copy()
1354 args.update(bookmarks.preparehookargs(name, old, new))
1355 args.update(bookmarks.preparehookargs(name, old, new))
1355 repo.hook('pretxnclose-bookmark', throw=True,
1356 repo.hook('pretxnclose-bookmark', throw=True,
1356 txnname=desc,
1357 txnname=desc,
1357 **pycompat.strkwargs(args))
1358 **pycompat.strkwargs(args))
1358 if hook.hashook(repo.ui, 'pretxnclose-phase'):
1359 if hook.hashook(repo.ui, 'pretxnclose-phase'):
1359 cl = repo.unfiltered().changelog
1360 cl = repo.unfiltered().changelog
1360 for rev, (old, new) in tr.changes['phases'].items():
1361 for rev, (old, new) in tr.changes['phases'].items():
1361 args = tr.hookargs.copy()
1362 args = tr.hookargs.copy()
1362 node = hex(cl.node(rev))
1363 node = hex(cl.node(rev))
1363 args.update(phases.preparehookargs(node, old, new))
1364 args.update(phases.preparehookargs(node, old, new))
1364 repo.hook('pretxnclose-phase', throw=True, txnname=desc,
1365 repo.hook('pretxnclose-phase', throw=True, txnname=desc,
1365 **pycompat.strkwargs(args))
1366 **pycompat.strkwargs(args))
1366
1367
1367 repo.hook('pretxnclose', throw=True,
1368 repo.hook('pretxnclose', throw=True,
1368 txnname=desc, **pycompat.strkwargs(tr.hookargs))
1369 txnname=desc, **pycompat.strkwargs(tr.hookargs))
1369 def releasefn(tr, success):
1370 def releasefn(tr, success):
1370 repo = reporef()
1371 repo = reporef()
1371 if success:
1372 if success:
1372 # this should be explicitly invoked here, because
1373 # this should be explicitly invoked here, because
1373 # in-memory changes aren't written out at closing
1374 # in-memory changes aren't written out at closing
1374 # transaction, if tr.addfilegenerator (via
1375 # transaction, if tr.addfilegenerator (via
1375 # dirstate.write or so) isn't invoked while
1376 # dirstate.write or so) isn't invoked while
1376 # transaction running
1377 # transaction running
1377 repo.dirstate.write(None)
1378 repo.dirstate.write(None)
1378 else:
1379 else:
1379 # discard all changes (including ones already written
1380 # discard all changes (including ones already written
1380 # out) in this transaction
1381 # out) in this transaction
1381 narrowspec.restorebackup(self, 'journal.narrowspec')
1382 narrowspec.restorebackup(self, 'journal.narrowspec')
1382 repo.dirstate.restorebackup(None, 'journal.dirstate')
1383 repo.dirstate.restorebackup(None, 'journal.dirstate')
1383
1384
1384 repo.invalidate(clearfilecache=True)
1385 repo.invalidate(clearfilecache=True)
1385
1386
1386 tr = transaction.transaction(rp, self.svfs, vfsmap,
1387 tr = transaction.transaction(rp, self.svfs, vfsmap,
1387 "journal",
1388 "journal",
1388 "undo",
1389 "undo",
1389 aftertrans(renames),
1390 aftertrans(renames),
1390 self.store.createmode,
1391 self.store.createmode,
1391 validator=validate,
1392 validator=validate,
1392 releasefn=releasefn,
1393 releasefn=releasefn,
1393 checkambigfiles=_cachedfiles,
1394 checkambigfiles=_cachedfiles,
1394 name=desc)
1395 name=desc)
1395 tr.changes['revs'] = pycompat.xrange(0, 0)
1396 tr.changes['revs'] = pycompat.xrange(0, 0)
1396 tr.changes['obsmarkers'] = set()
1397 tr.changes['obsmarkers'] = set()
1397 tr.changes['phases'] = {}
1398 tr.changes['phases'] = {}
1398 tr.changes['bookmarks'] = {}
1399 tr.changes['bookmarks'] = {}
1399
1400
1400 tr.hookargs['txnid'] = txnid
1401 tr.hookargs['txnid'] = txnid
1401 # note: writing the fncache only during finalize mean that the file is
1402 # note: writing the fncache only during finalize mean that the file is
1402 # outdated when running hooks. As fncache is used for streaming clone,
1403 # outdated when running hooks. As fncache is used for streaming clone,
1403 # this is not expected to break anything that happen during the hooks.
1404 # this is not expected to break anything that happen during the hooks.
1404 tr.addfinalize('flush-fncache', self.store.write)
1405 tr.addfinalize('flush-fncache', self.store.write)
1405 def txnclosehook(tr2):
1406 def txnclosehook(tr2):
1406 """To be run if transaction is successful, will schedule a hook run
1407 """To be run if transaction is successful, will schedule a hook run
1407 """
1408 """
1408 # Don't reference tr2 in hook() so we don't hold a reference.
1409 # Don't reference tr2 in hook() so we don't hold a reference.
1409 # This reduces memory consumption when there are multiple
1410 # This reduces memory consumption when there are multiple
1410 # transactions per lock. This can likely go away if issue5045
1411 # transactions per lock. This can likely go away if issue5045
1411 # fixes the function accumulation.
1412 # fixes the function accumulation.
1412 hookargs = tr2.hookargs
1413 hookargs = tr2.hookargs
1413
1414
1414 def hookfunc():
1415 def hookfunc():
1415 repo = reporef()
1416 repo = reporef()
1416 if hook.hashook(repo.ui, 'txnclose-bookmark'):
1417 if hook.hashook(repo.ui, 'txnclose-bookmark'):
1417 bmchanges = sorted(tr.changes['bookmarks'].items())
1418 bmchanges = sorted(tr.changes['bookmarks'].items())
1418 for name, (old, new) in bmchanges:
1419 for name, (old, new) in bmchanges:
1419 args = tr.hookargs.copy()
1420 args = tr.hookargs.copy()
1420 args.update(bookmarks.preparehookargs(name, old, new))
1421 args.update(bookmarks.preparehookargs(name, old, new))
1421 repo.hook('txnclose-bookmark', throw=False,
1422 repo.hook('txnclose-bookmark', throw=False,
1422 txnname=desc, **pycompat.strkwargs(args))
1423 txnname=desc, **pycompat.strkwargs(args))
1423
1424
1424 if hook.hashook(repo.ui, 'txnclose-phase'):
1425 if hook.hashook(repo.ui, 'txnclose-phase'):
1425 cl = repo.unfiltered().changelog
1426 cl = repo.unfiltered().changelog
1426 phasemv = sorted(tr.changes['phases'].items())
1427 phasemv = sorted(tr.changes['phases'].items())
1427 for rev, (old, new) in phasemv:
1428 for rev, (old, new) in phasemv:
1428 args = tr.hookargs.copy()
1429 args = tr.hookargs.copy()
1429 node = hex(cl.node(rev))
1430 node = hex(cl.node(rev))
1430 args.update(phases.preparehookargs(node, old, new))
1431 args.update(phases.preparehookargs(node, old, new))
1431 repo.hook('txnclose-phase', throw=False, txnname=desc,
1432 repo.hook('txnclose-phase', throw=False, txnname=desc,
1432 **pycompat.strkwargs(args))
1433 **pycompat.strkwargs(args))
1433
1434
1434 repo.hook('txnclose', throw=False, txnname=desc,
1435 repo.hook('txnclose', throw=False, txnname=desc,
1435 **pycompat.strkwargs(hookargs))
1436 **pycompat.strkwargs(hookargs))
1436 reporef()._afterlock(hookfunc)
1437 reporef()._afterlock(hookfunc)
1437 tr.addfinalize('txnclose-hook', txnclosehook)
1438 tr.addfinalize('txnclose-hook', txnclosehook)
1438 # Include a leading "-" to make it happen before the transaction summary
1439 # Include a leading "-" to make it happen before the transaction summary
1439 # reports registered via scmutil.registersummarycallback() whose names
1440 # reports registered via scmutil.registersummarycallback() whose names
1440 # are 00-txnreport etc. That way, the caches will be warm when the
1441 # are 00-txnreport etc. That way, the caches will be warm when the
1441 # callbacks run.
1442 # callbacks run.
1442 tr.addpostclose('-warm-cache', self._buildcacheupdater(tr))
1443 tr.addpostclose('-warm-cache', self._buildcacheupdater(tr))
1443 def txnaborthook(tr2):
1444 def txnaborthook(tr2):
1444 """To be run if transaction is aborted
1445 """To be run if transaction is aborted
1445 """
1446 """
1446 reporef().hook('txnabort', throw=False, txnname=desc,
1447 reporef().hook('txnabort', throw=False, txnname=desc,
1447 **pycompat.strkwargs(tr2.hookargs))
1448 **pycompat.strkwargs(tr2.hookargs))
1448 tr.addabort('txnabort-hook', txnaborthook)
1449 tr.addabort('txnabort-hook', txnaborthook)
1449 # avoid eager cache invalidation. in-memory data should be identical
1450 # avoid eager cache invalidation. in-memory data should be identical
1450 # to stored data if transaction has no error.
1451 # to stored data if transaction has no error.
1451 tr.addpostclose('refresh-filecachestats', self._refreshfilecachestats)
1452 tr.addpostclose('refresh-filecachestats', self._refreshfilecachestats)
1452 self._transref = weakref.ref(tr)
1453 self._transref = weakref.ref(tr)
1453 scmutil.registersummarycallback(self, tr, desc)
1454 scmutil.registersummarycallback(self, tr, desc)
1454 return tr
1455 return tr
1455
1456
1456 def _journalfiles(self):
1457 def _journalfiles(self):
1457 return ((self.svfs, 'journal'),
1458 return ((self.svfs, 'journal'),
1458 (self.vfs, 'journal.dirstate'),
1459 (self.vfs, 'journal.dirstate'),
1459 (self.vfs, 'journal.branch'),
1460 (self.vfs, 'journal.branch'),
1460 (self.vfs, 'journal.desc'),
1461 (self.vfs, 'journal.desc'),
1461 (self.vfs, 'journal.bookmarks'),
1462 (self.vfs, 'journal.bookmarks'),
1462 (self.svfs, 'journal.phaseroots'))
1463 (self.svfs, 'journal.phaseroots'))
1463
1464
1464 def undofiles(self):
1465 def undofiles(self):
1465 return [(vfs, undoname(x)) for vfs, x in self._journalfiles()]
1466 return [(vfs, undoname(x)) for vfs, x in self._journalfiles()]
1466
1467
1467 @unfilteredmethod
1468 @unfilteredmethod
1468 def _writejournal(self, desc):
1469 def _writejournal(self, desc):
1469 self.dirstate.savebackup(None, 'journal.dirstate')
1470 self.dirstate.savebackup(None, 'journal.dirstate')
1470 narrowspec.savebackup(self, 'journal.narrowspec')
1471 narrowspec.savebackup(self, 'journal.narrowspec')
1471 self.vfs.write("journal.branch",
1472 self.vfs.write("journal.branch",
1472 encoding.fromlocal(self.dirstate.branch()))
1473 encoding.fromlocal(self.dirstate.branch()))
1473 self.vfs.write("journal.desc",
1474 self.vfs.write("journal.desc",
1474 "%d\n%s\n" % (len(self), desc))
1475 "%d\n%s\n" % (len(self), desc))
1475 self.vfs.write("journal.bookmarks",
1476 self.vfs.write("journal.bookmarks",
1476 self.vfs.tryread("bookmarks"))
1477 self.vfs.tryread("bookmarks"))
1477 self.svfs.write("journal.phaseroots",
1478 self.svfs.write("journal.phaseroots",
1478 self.svfs.tryread("phaseroots"))
1479 self.svfs.tryread("phaseroots"))
1479
1480
1480 def recover(self):
1481 def recover(self):
1481 with self.lock():
1482 with self.lock():
1482 if self.svfs.exists("journal"):
1483 if self.svfs.exists("journal"):
1483 self.ui.status(_("rolling back interrupted transaction\n"))
1484 self.ui.status(_("rolling back interrupted transaction\n"))
1484 vfsmap = {'': self.svfs,
1485 vfsmap = {'': self.svfs,
1485 'plain': self.vfs,}
1486 'plain': self.vfs,}
1486 transaction.rollback(self.svfs, vfsmap, "journal",
1487 transaction.rollback(self.svfs, vfsmap, "journal",
1487 self.ui.warn,
1488 self.ui.warn,
1488 checkambigfiles=_cachedfiles)
1489 checkambigfiles=_cachedfiles)
1489 self.invalidate()
1490 self.invalidate()
1490 return True
1491 return True
1491 else:
1492 else:
1492 self.ui.warn(_("no interrupted transaction available\n"))
1493 self.ui.warn(_("no interrupted transaction available\n"))
1493 return False
1494 return False
1494
1495
1495 def rollback(self, dryrun=False, force=False):
1496 def rollback(self, dryrun=False, force=False):
1496 wlock = lock = dsguard = None
1497 wlock = lock = dsguard = None
1497 try:
1498 try:
1498 wlock = self.wlock()
1499 wlock = self.wlock()
1499 lock = self.lock()
1500 lock = self.lock()
1500 if self.svfs.exists("undo"):
1501 if self.svfs.exists("undo"):
1501 dsguard = dirstateguard.dirstateguard(self, 'rollback')
1502 dsguard = dirstateguard.dirstateguard(self, 'rollback')
1502
1503
1503 return self._rollback(dryrun, force, dsguard)
1504 return self._rollback(dryrun, force, dsguard)
1504 else:
1505 else:
1505 self.ui.warn(_("no rollback information available\n"))
1506 self.ui.warn(_("no rollback information available\n"))
1506 return 1
1507 return 1
1507 finally:
1508 finally:
1508 release(dsguard, lock, wlock)
1509 release(dsguard, lock, wlock)
1509
1510
1510 @unfilteredmethod # Until we get smarter cache management
1511 @unfilteredmethod # Until we get smarter cache management
1511 def _rollback(self, dryrun, force, dsguard):
1512 def _rollback(self, dryrun, force, dsguard):
1512 ui = self.ui
1513 ui = self.ui
1513 try:
1514 try:
1514 args = self.vfs.read('undo.desc').splitlines()
1515 args = self.vfs.read('undo.desc').splitlines()
1515 (oldlen, desc, detail) = (int(args[0]), args[1], None)
1516 (oldlen, desc, detail) = (int(args[0]), args[1], None)
1516 if len(args) >= 3:
1517 if len(args) >= 3:
1517 detail = args[2]
1518 detail = args[2]
1518 oldtip = oldlen - 1
1519 oldtip = oldlen - 1
1519
1520
1520 if detail and ui.verbose:
1521 if detail and ui.verbose:
1521 msg = (_('repository tip rolled back to revision %d'
1522 msg = (_('repository tip rolled back to revision %d'
1522 ' (undo %s: %s)\n')
1523 ' (undo %s: %s)\n')
1523 % (oldtip, desc, detail))
1524 % (oldtip, desc, detail))
1524 else:
1525 else:
1525 msg = (_('repository tip rolled back to revision %d'
1526 msg = (_('repository tip rolled back to revision %d'
1526 ' (undo %s)\n')
1527 ' (undo %s)\n')
1527 % (oldtip, desc))
1528 % (oldtip, desc))
1528 except IOError:
1529 except IOError:
1529 msg = _('rolling back unknown transaction\n')
1530 msg = _('rolling back unknown transaction\n')
1530 desc = None
1531 desc = None
1531
1532
1532 if not force and self['.'] != self['tip'] and desc == 'commit':
1533 if not force and self['.'] != self['tip'] and desc == 'commit':
1533 raise error.Abort(
1534 raise error.Abort(
1534 _('rollback of last commit while not checked out '
1535 _('rollback of last commit while not checked out '
1535 'may lose data'), hint=_('use -f to force'))
1536 'may lose data'), hint=_('use -f to force'))
1536
1537
1537 ui.status(msg)
1538 ui.status(msg)
1538 if dryrun:
1539 if dryrun:
1539 return 0
1540 return 0
1540
1541
1541 parents = self.dirstate.parents()
1542 parents = self.dirstate.parents()
1542 self.destroying()
1543 self.destroying()
1543 vfsmap = {'plain': self.vfs, '': self.svfs}
1544 vfsmap = {'plain': self.vfs, '': self.svfs}
1544 transaction.rollback(self.svfs, vfsmap, 'undo', ui.warn,
1545 transaction.rollback(self.svfs, vfsmap, 'undo', ui.warn,
1545 checkambigfiles=_cachedfiles)
1546 checkambigfiles=_cachedfiles)
1546 if self.vfs.exists('undo.bookmarks'):
1547 if self.vfs.exists('undo.bookmarks'):
1547 self.vfs.rename('undo.bookmarks', 'bookmarks', checkambig=True)
1548 self.vfs.rename('undo.bookmarks', 'bookmarks', checkambig=True)
1548 if self.svfs.exists('undo.phaseroots'):
1549 if self.svfs.exists('undo.phaseroots'):
1549 self.svfs.rename('undo.phaseroots', 'phaseroots', checkambig=True)
1550 self.svfs.rename('undo.phaseroots', 'phaseroots', checkambig=True)
1550 self.invalidate()
1551 self.invalidate()
1551
1552
1552 parentgone = (parents[0] not in self.changelog.nodemap or
1553 parentgone = (parents[0] not in self.changelog.nodemap or
1553 parents[1] not in self.changelog.nodemap)
1554 parents[1] not in self.changelog.nodemap)
1554 if parentgone:
1555 if parentgone:
1555 # prevent dirstateguard from overwriting already restored one
1556 # prevent dirstateguard from overwriting already restored one
1556 dsguard.close()
1557 dsguard.close()
1557
1558
1558 narrowspec.restorebackup(self, 'undo.narrowspec')
1559 narrowspec.restorebackup(self, 'undo.narrowspec')
1559 self.dirstate.restorebackup(None, 'undo.dirstate')
1560 self.dirstate.restorebackup(None, 'undo.dirstate')
1560 try:
1561 try:
1561 branch = self.vfs.read('undo.branch')
1562 branch = self.vfs.read('undo.branch')
1562 self.dirstate.setbranch(encoding.tolocal(branch))
1563 self.dirstate.setbranch(encoding.tolocal(branch))
1563 except IOError:
1564 except IOError:
1564 ui.warn(_('named branch could not be reset: '
1565 ui.warn(_('named branch could not be reset: '
1565 'current branch is still \'%s\'\n')
1566 'current branch is still \'%s\'\n')
1566 % self.dirstate.branch())
1567 % self.dirstate.branch())
1567
1568
1568 parents = tuple([p.rev() for p in self[None].parents()])
1569 parents = tuple([p.rev() for p in self[None].parents()])
1569 if len(parents) > 1:
1570 if len(parents) > 1:
1570 ui.status(_('working directory now based on '
1571 ui.status(_('working directory now based on '
1571 'revisions %d and %d\n') % parents)
1572 'revisions %d and %d\n') % parents)
1572 else:
1573 else:
1573 ui.status(_('working directory now based on '
1574 ui.status(_('working directory now based on '
1574 'revision %d\n') % parents)
1575 'revision %d\n') % parents)
1575 mergemod.mergestate.clean(self, self['.'].node())
1576 mergemod.mergestate.clean(self, self['.'].node())
1576
1577
1577 # TODO: if we know which new heads may result from this rollback, pass
1578 # TODO: if we know which new heads may result from this rollback, pass
1578 # them to destroy(), which will prevent the branchhead cache from being
1579 # them to destroy(), which will prevent the branchhead cache from being
1579 # invalidated.
1580 # invalidated.
1580 self.destroyed()
1581 self.destroyed()
1581 return 0
1582 return 0
1582
1583
1583 def _buildcacheupdater(self, newtransaction):
1584 def _buildcacheupdater(self, newtransaction):
1584 """called during transaction to build the callback updating cache
1585 """called during transaction to build the callback updating cache
1585
1586
1586 Lives on the repository to help extension who might want to augment
1587 Lives on the repository to help extension who might want to augment
1587 this logic. For this purpose, the created transaction is passed to the
1588 this logic. For this purpose, the created transaction is passed to the
1588 method.
1589 method.
1589 """
1590 """
1590 # we must avoid cyclic reference between repo and transaction.
1591 # we must avoid cyclic reference between repo and transaction.
1591 reporef = weakref.ref(self)
1592 reporef = weakref.ref(self)
1592 def updater(tr):
1593 def updater(tr):
1593 repo = reporef()
1594 repo = reporef()
1594 repo.updatecaches(tr)
1595 repo.updatecaches(tr)
1595 return updater
1596 return updater
1596
1597
1597 @unfilteredmethod
1598 @unfilteredmethod
1598 def updatecaches(self, tr=None, full=False):
1599 def updatecaches(self, tr=None, full=False):
1599 """warm appropriate caches
1600 """warm appropriate caches
1600
1601
1601 If this function is called after a transaction closed. The transaction
1602 If this function is called after a transaction closed. The transaction
1602 will be available in the 'tr' argument. This can be used to selectively
1603 will be available in the 'tr' argument. This can be used to selectively
1603 update caches relevant to the changes in that transaction.
1604 update caches relevant to the changes in that transaction.
1604
1605
1605 If 'full' is set, make sure all caches the function knows about have
1606 If 'full' is set, make sure all caches the function knows about have
1606 up-to-date data. Even the ones usually loaded more lazily.
1607 up-to-date data. Even the ones usually loaded more lazily.
1607 """
1608 """
1608 if tr is not None and tr.hookargs.get('source') == 'strip':
1609 if tr is not None and tr.hookargs.get('source') == 'strip':
1609 # During strip, many caches are invalid but
1610 # During strip, many caches are invalid but
1610 # later call to `destroyed` will refresh them.
1611 # later call to `destroyed` will refresh them.
1611 return
1612 return
1612
1613
1613 if tr is None or tr.changes['revs']:
1614 if tr is None or tr.changes['revs']:
1614 # updating the unfiltered branchmap should refresh all the others,
1615 # updating the unfiltered branchmap should refresh all the others,
1615 self.ui.debug('updating the branch cache\n')
1616 self.ui.debug('updating the branch cache\n')
1616 branchmap.updatecache(self.filtered('served'))
1617 branchmap.updatecache(self.filtered('served'))
1617
1618
1618 if full:
1619 if full:
1619 rbc = self.revbranchcache()
1620 rbc = self.revbranchcache()
1620 for r in self.changelog:
1621 for r in self.changelog:
1621 rbc.branchinfo(r)
1622 rbc.branchinfo(r)
1622 rbc.write()
1623 rbc.write()
1623
1624
1624 # ensure the working copy parents are in the manifestfulltextcache
1625 # ensure the working copy parents are in the manifestfulltextcache
1625 for ctx in self['.'].parents():
1626 for ctx in self['.'].parents():
1626 ctx.manifest() # accessing the manifest is enough
1627 ctx.manifest() # accessing the manifest is enough
1627
1628
1628 def invalidatecaches(self):
1629 def invalidatecaches(self):
1629
1630
1630 if '_tagscache' in vars(self):
1631 if '_tagscache' in vars(self):
1631 # can't use delattr on proxy
1632 # can't use delattr on proxy
1632 del self.__dict__['_tagscache']
1633 del self.__dict__['_tagscache']
1633
1634
1634 self.unfiltered()._branchcaches.clear()
1635 self.unfiltered()._branchcaches.clear()
1635 self.invalidatevolatilesets()
1636 self.invalidatevolatilesets()
1636 self._sparsesignaturecache.clear()
1637 self._sparsesignaturecache.clear()
1637
1638
1638 def invalidatevolatilesets(self):
1639 def invalidatevolatilesets(self):
1639 self.filteredrevcache.clear()
1640 self.filteredrevcache.clear()
1640 obsolete.clearobscaches(self)
1641 obsolete.clearobscaches(self)
1641
1642
1642 def invalidatedirstate(self):
1643 def invalidatedirstate(self):
1643 '''Invalidates the dirstate, causing the next call to dirstate
1644 '''Invalidates the dirstate, causing the next call to dirstate
1644 to check if it was modified since the last time it was read,
1645 to check if it was modified since the last time it was read,
1645 rereading it if it has.
1646 rereading it if it has.
1646
1647
1647 This is different to dirstate.invalidate() that it doesn't always
1648 This is different to dirstate.invalidate() that it doesn't always
1648 rereads the dirstate. Use dirstate.invalidate() if you want to
1649 rereads the dirstate. Use dirstate.invalidate() if you want to
1649 explicitly read the dirstate again (i.e. restoring it to a previous
1650 explicitly read the dirstate again (i.e. restoring it to a previous
1650 known good state).'''
1651 known good state).'''
1651 if hasunfilteredcache(self, 'dirstate'):
1652 if hasunfilteredcache(self, 'dirstate'):
1652 for k in self.dirstate._filecache:
1653 for k in self.dirstate._filecache:
1653 try:
1654 try:
1654 delattr(self.dirstate, k)
1655 delattr(self.dirstate, k)
1655 except AttributeError:
1656 except AttributeError:
1656 pass
1657 pass
1657 delattr(self.unfiltered(), 'dirstate')
1658 delattr(self.unfiltered(), 'dirstate')
1658
1659
1659 def invalidate(self, clearfilecache=False):
1660 def invalidate(self, clearfilecache=False):
1660 '''Invalidates both store and non-store parts other than dirstate
1661 '''Invalidates both store and non-store parts other than dirstate
1661
1662
1662 If a transaction is running, invalidation of store is omitted,
1663 If a transaction is running, invalidation of store is omitted,
1663 because discarding in-memory changes might cause inconsistency
1664 because discarding in-memory changes might cause inconsistency
1664 (e.g. incomplete fncache causes unintentional failure, but
1665 (e.g. incomplete fncache causes unintentional failure, but
1665 redundant one doesn't).
1666 redundant one doesn't).
1666 '''
1667 '''
1667 unfiltered = self.unfiltered() # all file caches are stored unfiltered
1668 unfiltered = self.unfiltered() # all file caches are stored unfiltered
1668 for k in list(self._filecache.keys()):
1669 for k in list(self._filecache.keys()):
1669 # dirstate is invalidated separately in invalidatedirstate()
1670 # dirstate is invalidated separately in invalidatedirstate()
1670 if k == 'dirstate':
1671 if k == 'dirstate':
1671 continue
1672 continue
1672 if (k == 'changelog' and
1673 if (k == 'changelog' and
1673 self.currenttransaction() and
1674 self.currenttransaction() and
1674 self.changelog._delayed):
1675 self.changelog._delayed):
1675 # The changelog object may store unwritten revisions. We don't
1676 # The changelog object may store unwritten revisions. We don't
1676 # want to lose them.
1677 # want to lose them.
1677 # TODO: Solve the problem instead of working around it.
1678 # TODO: Solve the problem instead of working around it.
1678 continue
1679 continue
1679
1680
1680 if clearfilecache:
1681 if clearfilecache:
1681 del self._filecache[k]
1682 del self._filecache[k]
1682 try:
1683 try:
1683 delattr(unfiltered, k)
1684 delattr(unfiltered, k)
1684 except AttributeError:
1685 except AttributeError:
1685 pass
1686 pass
1686 self.invalidatecaches()
1687 self.invalidatecaches()
1687 if not self.currenttransaction():
1688 if not self.currenttransaction():
1688 # TODO: Changing contents of store outside transaction
1689 # TODO: Changing contents of store outside transaction
1689 # causes inconsistency. We should make in-memory store
1690 # causes inconsistency. We should make in-memory store
1690 # changes detectable, and abort if changed.
1691 # changes detectable, and abort if changed.
1691 self.store.invalidatecaches()
1692 self.store.invalidatecaches()
1692
1693
1693 def invalidateall(self):
1694 def invalidateall(self):
1694 '''Fully invalidates both store and non-store parts, causing the
1695 '''Fully invalidates both store and non-store parts, causing the
1695 subsequent operation to reread any outside changes.'''
1696 subsequent operation to reread any outside changes.'''
1696 # extension should hook this to invalidate its caches
1697 # extension should hook this to invalidate its caches
1697 self.invalidate()
1698 self.invalidate()
1698 self.invalidatedirstate()
1699 self.invalidatedirstate()
1699
1700
1700 @unfilteredmethod
1701 @unfilteredmethod
1701 def _refreshfilecachestats(self, tr):
1702 def _refreshfilecachestats(self, tr):
1702 """Reload stats of cached files so that they are flagged as valid"""
1703 """Reload stats of cached files so that they are flagged as valid"""
1703 for k, ce in self._filecache.items():
1704 for k, ce in self._filecache.items():
1704 k = pycompat.sysstr(k)
1705 k = pycompat.sysstr(k)
1705 if k == r'dirstate' or k not in self.__dict__:
1706 if k == r'dirstate' or k not in self.__dict__:
1706 continue
1707 continue
1707 ce.refresh()
1708 ce.refresh()
1708
1709
1709 def _lock(self, vfs, lockname, wait, releasefn, acquirefn, desc,
1710 def _lock(self, vfs, lockname, wait, releasefn, acquirefn, desc,
1710 inheritchecker=None, parentenvvar=None):
1711 inheritchecker=None, parentenvvar=None):
1711 parentlock = None
1712 parentlock = None
1712 # the contents of parentenvvar are used by the underlying lock to
1713 # the contents of parentenvvar are used by the underlying lock to
1713 # determine whether it can be inherited
1714 # determine whether it can be inherited
1714 if parentenvvar is not None:
1715 if parentenvvar is not None:
1715 parentlock = encoding.environ.get(parentenvvar)
1716 parentlock = encoding.environ.get(parentenvvar)
1716
1717
1717 timeout = 0
1718 timeout = 0
1718 warntimeout = 0
1719 warntimeout = 0
1719 if wait:
1720 if wait:
1720 timeout = self.ui.configint("ui", "timeout")
1721 timeout = self.ui.configint("ui", "timeout")
1721 warntimeout = self.ui.configint("ui", "timeout.warn")
1722 warntimeout = self.ui.configint("ui", "timeout.warn")
1722 # internal config: ui.signal-safe-lock
1723 # internal config: ui.signal-safe-lock
1723 signalsafe = self.ui.configbool('ui', 'signal-safe-lock')
1724 signalsafe = self.ui.configbool('ui', 'signal-safe-lock')
1724
1725
1725 l = lockmod.trylock(self.ui, vfs, lockname, timeout, warntimeout,
1726 l = lockmod.trylock(self.ui, vfs, lockname, timeout, warntimeout,
1726 releasefn=releasefn,
1727 releasefn=releasefn,
1727 acquirefn=acquirefn, desc=desc,
1728 acquirefn=acquirefn, desc=desc,
1728 inheritchecker=inheritchecker,
1729 inheritchecker=inheritchecker,
1729 parentlock=parentlock,
1730 parentlock=parentlock,
1730 signalsafe=signalsafe)
1731 signalsafe=signalsafe)
1731 return l
1732 return l
1732
1733
1733 def _afterlock(self, callback):
1734 def _afterlock(self, callback):
1734 """add a callback to be run when the repository is fully unlocked
1735 """add a callback to be run when the repository is fully unlocked
1735
1736
1736 The callback will be executed when the outermost lock is released
1737 The callback will be executed when the outermost lock is released
1737 (with wlock being higher level than 'lock')."""
1738 (with wlock being higher level than 'lock')."""
1738 for ref in (self._wlockref, self._lockref):
1739 for ref in (self._wlockref, self._lockref):
1739 l = ref and ref()
1740 l = ref and ref()
1740 if l and l.held:
1741 if l and l.held:
1741 l.postrelease.append(callback)
1742 l.postrelease.append(callback)
1742 break
1743 break
1743 else: # no lock have been found.
1744 else: # no lock have been found.
1744 callback()
1745 callback()
1745
1746
1746 def lock(self, wait=True):
1747 def lock(self, wait=True):
1747 '''Lock the repository store (.hg/store) and return a weak reference
1748 '''Lock the repository store (.hg/store) and return a weak reference
1748 to the lock. Use this before modifying the store (e.g. committing or
1749 to the lock. Use this before modifying the store (e.g. committing or
1749 stripping). If you are opening a transaction, get a lock as well.)
1750 stripping). If you are opening a transaction, get a lock as well.)
1750
1751
1751 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
1752 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
1752 'wlock' first to avoid a dead-lock hazard.'''
1753 'wlock' first to avoid a dead-lock hazard.'''
1753 l = self._currentlock(self._lockref)
1754 l = self._currentlock(self._lockref)
1754 if l is not None:
1755 if l is not None:
1755 l.lock()
1756 l.lock()
1756 return l
1757 return l
1757
1758
1758 l = self._lock(self.svfs, "lock", wait, None,
1759 l = self._lock(self.svfs, "lock", wait, None,
1759 self.invalidate, _('repository %s') % self.origroot)
1760 self.invalidate, _('repository %s') % self.origroot)
1760 self._lockref = weakref.ref(l)
1761 self._lockref = weakref.ref(l)
1761 return l
1762 return l
1762
1763
1763 def _wlockchecktransaction(self):
1764 def _wlockchecktransaction(self):
1764 if self.currenttransaction() is not None:
1765 if self.currenttransaction() is not None:
1765 raise error.LockInheritanceContractViolation(
1766 raise error.LockInheritanceContractViolation(
1766 'wlock cannot be inherited in the middle of a transaction')
1767 'wlock cannot be inherited in the middle of a transaction')
1767
1768
1768 def wlock(self, wait=True):
1769 def wlock(self, wait=True):
1769 '''Lock the non-store parts of the repository (everything under
1770 '''Lock the non-store parts of the repository (everything under
1770 .hg except .hg/store) and return a weak reference to the lock.
1771 .hg except .hg/store) and return a weak reference to the lock.
1771
1772
1772 Use this before modifying files in .hg.
1773 Use this before modifying files in .hg.
1773
1774
1774 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
1775 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
1775 'wlock' first to avoid a dead-lock hazard.'''
1776 'wlock' first to avoid a dead-lock hazard.'''
1776 l = self._wlockref and self._wlockref()
1777 l = self._wlockref and self._wlockref()
1777 if l is not None and l.held:
1778 if l is not None and l.held:
1778 l.lock()
1779 l.lock()
1779 return l
1780 return l
1780
1781
1781 # We do not need to check for non-waiting lock acquisition. Such
1782 # We do not need to check for non-waiting lock acquisition. Such
1782 # acquisition would not cause dead-lock as they would just fail.
1783 # acquisition would not cause dead-lock as they would just fail.
1783 if wait and (self.ui.configbool('devel', 'all-warnings')
1784 if wait and (self.ui.configbool('devel', 'all-warnings')
1784 or self.ui.configbool('devel', 'check-locks')):
1785 or self.ui.configbool('devel', 'check-locks')):
1785 if self._currentlock(self._lockref) is not None:
1786 if self._currentlock(self._lockref) is not None:
1786 self.ui.develwarn('"wlock" acquired after "lock"')
1787 self.ui.develwarn('"wlock" acquired after "lock"')
1787
1788
1788 def unlock():
1789 def unlock():
1789 if self.dirstate.pendingparentchange():
1790 if self.dirstate.pendingparentchange():
1790 self.dirstate.invalidate()
1791 self.dirstate.invalidate()
1791 else:
1792 else:
1792 self.dirstate.write(None)
1793 self.dirstate.write(None)
1793
1794
1794 self._filecache['dirstate'].refresh()
1795 self._filecache['dirstate'].refresh()
1795
1796
1796 l = self._lock(self.vfs, "wlock", wait, unlock,
1797 l = self._lock(self.vfs, "wlock", wait, unlock,
1797 self.invalidatedirstate, _('working directory of %s') %
1798 self.invalidatedirstate, _('working directory of %s') %
1798 self.origroot,
1799 self.origroot,
1799 inheritchecker=self._wlockchecktransaction,
1800 inheritchecker=self._wlockchecktransaction,
1800 parentenvvar='HG_WLOCK_LOCKER')
1801 parentenvvar='HG_WLOCK_LOCKER')
1801 self._wlockref = weakref.ref(l)
1802 self._wlockref = weakref.ref(l)
1802 return l
1803 return l
1803
1804
1804 def _currentlock(self, lockref):
1805 def _currentlock(self, lockref):
1805 """Returns the lock if it's held, or None if it's not."""
1806 """Returns the lock if it's held, or None if it's not."""
1806 if lockref is None:
1807 if lockref is None:
1807 return None
1808 return None
1808 l = lockref()
1809 l = lockref()
1809 if l is None or not l.held:
1810 if l is None or not l.held:
1810 return None
1811 return None
1811 return l
1812 return l
1812
1813
1813 def currentwlock(self):
1814 def currentwlock(self):
1814 """Returns the wlock if it's held, or None if it's not."""
1815 """Returns the wlock if it's held, or None if it's not."""
1815 return self._currentlock(self._wlockref)
1816 return self._currentlock(self._wlockref)
1816
1817
1817 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
1818 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
1818 """
1819 """
1819 commit an individual file as part of a larger transaction
1820 commit an individual file as part of a larger transaction
1820 """
1821 """
1821
1822
1822 fname = fctx.path()
1823 fname = fctx.path()
1823 fparent1 = manifest1.get(fname, nullid)
1824 fparent1 = manifest1.get(fname, nullid)
1824 fparent2 = manifest2.get(fname, nullid)
1825 fparent2 = manifest2.get(fname, nullid)
1825 if isinstance(fctx, context.filectx):
1826 if isinstance(fctx, context.filectx):
1826 node = fctx.filenode()
1827 node = fctx.filenode()
1827 if node in [fparent1, fparent2]:
1828 if node in [fparent1, fparent2]:
1828 self.ui.debug('reusing %s filelog entry\n' % fname)
1829 self.ui.debug('reusing %s filelog entry\n' % fname)
1829 if manifest1.flags(fname) != fctx.flags():
1830 if manifest1.flags(fname) != fctx.flags():
1830 changelist.append(fname)
1831 changelist.append(fname)
1831 return node
1832 return node
1832
1833
1833 flog = self.file(fname)
1834 flog = self.file(fname)
1834 meta = {}
1835 meta = {}
1835 copy = fctx.renamed()
1836 copy = fctx.renamed()
1836 if copy and copy[0] != fname:
1837 if copy and copy[0] != fname:
1837 # Mark the new revision of this file as a copy of another
1838 # Mark the new revision of this file as a copy of another
1838 # file. This copy data will effectively act as a parent
1839 # file. This copy data will effectively act as a parent
1839 # of this new revision. If this is a merge, the first
1840 # of this new revision. If this is a merge, the first
1840 # parent will be the nullid (meaning "look up the copy data")
1841 # parent will be the nullid (meaning "look up the copy data")
1841 # and the second one will be the other parent. For example:
1842 # and the second one will be the other parent. For example:
1842 #
1843 #
1843 # 0 --- 1 --- 3 rev1 changes file foo
1844 # 0 --- 1 --- 3 rev1 changes file foo
1844 # \ / rev2 renames foo to bar and changes it
1845 # \ / rev2 renames foo to bar and changes it
1845 # \- 2 -/ rev3 should have bar with all changes and
1846 # \- 2 -/ rev3 should have bar with all changes and
1846 # should record that bar descends from
1847 # should record that bar descends from
1847 # bar in rev2 and foo in rev1
1848 # bar in rev2 and foo in rev1
1848 #
1849 #
1849 # this allows this merge to succeed:
1850 # this allows this merge to succeed:
1850 #
1851 #
1851 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
1852 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
1852 # \ / merging rev3 and rev4 should use bar@rev2
1853 # \ / merging rev3 and rev4 should use bar@rev2
1853 # \- 2 --- 4 as the merge base
1854 # \- 2 --- 4 as the merge base
1854 #
1855 #
1855
1856
1856 cfname = copy[0]
1857 cfname = copy[0]
1857 crev = manifest1.get(cfname)
1858 crev = manifest1.get(cfname)
1858 newfparent = fparent2
1859 newfparent = fparent2
1859
1860
1860 if manifest2: # branch merge
1861 if manifest2: # branch merge
1861 if fparent2 == nullid or crev is None: # copied on remote side
1862 if fparent2 == nullid or crev is None: # copied on remote side
1862 if cfname in manifest2:
1863 if cfname in manifest2:
1863 crev = manifest2[cfname]
1864 crev = manifest2[cfname]
1864 newfparent = fparent1
1865 newfparent = fparent1
1865
1866
1866 # Here, we used to search backwards through history to try to find
1867 # Here, we used to search backwards through history to try to find
1867 # where the file copy came from if the source of a copy was not in
1868 # where the file copy came from if the source of a copy was not in
1868 # the parent directory. However, this doesn't actually make sense to
1869 # the parent directory. However, this doesn't actually make sense to
1869 # do (what does a copy from something not in your working copy even
1870 # do (what does a copy from something not in your working copy even
1870 # mean?) and it causes bugs (eg, issue4476). Instead, we will warn
1871 # mean?) and it causes bugs (eg, issue4476). Instead, we will warn
1871 # the user that copy information was dropped, so if they didn't
1872 # the user that copy information was dropped, so if they didn't
1872 # expect this outcome it can be fixed, but this is the correct
1873 # expect this outcome it can be fixed, but this is the correct
1873 # behavior in this circumstance.
1874 # behavior in this circumstance.
1874
1875
1875 if crev:
1876 if crev:
1876 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1877 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1877 meta["copy"] = cfname
1878 meta["copy"] = cfname
1878 meta["copyrev"] = hex(crev)
1879 meta["copyrev"] = hex(crev)
1879 fparent1, fparent2 = nullid, newfparent
1880 fparent1, fparent2 = nullid, newfparent
1880 else:
1881 else:
1881 self.ui.warn(_("warning: can't find ancestor for '%s' "
1882 self.ui.warn(_("warning: can't find ancestor for '%s' "
1882 "copied from '%s'!\n") % (fname, cfname))
1883 "copied from '%s'!\n") % (fname, cfname))
1883
1884
1884 elif fparent1 == nullid:
1885 elif fparent1 == nullid:
1885 fparent1, fparent2 = fparent2, nullid
1886 fparent1, fparent2 = fparent2, nullid
1886 elif fparent2 != nullid:
1887 elif fparent2 != nullid:
1887 # is one parent an ancestor of the other?
1888 # is one parent an ancestor of the other?
1888 fparentancestors = flog.commonancestorsheads(fparent1, fparent2)
1889 fparentancestors = flog.commonancestorsheads(fparent1, fparent2)
1889 if fparent1 in fparentancestors:
1890 if fparent1 in fparentancestors:
1890 fparent1, fparent2 = fparent2, nullid
1891 fparent1, fparent2 = fparent2, nullid
1891 elif fparent2 in fparentancestors:
1892 elif fparent2 in fparentancestors:
1892 fparent2 = nullid
1893 fparent2 = nullid
1893
1894
1894 # is the file changed?
1895 # is the file changed?
1895 text = fctx.data()
1896 text = fctx.data()
1896 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1897 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1897 changelist.append(fname)
1898 changelist.append(fname)
1898 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1899 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1899 # are just the flags changed during merge?
1900 # are just the flags changed during merge?
1900 elif fname in manifest1 and manifest1.flags(fname) != fctx.flags():
1901 elif fname in manifest1 and manifest1.flags(fname) != fctx.flags():
1901 changelist.append(fname)
1902 changelist.append(fname)
1902
1903
1903 return fparent1
1904 return fparent1
1904
1905
1905 def checkcommitpatterns(self, wctx, vdirs, match, status, fail):
1906 def checkcommitpatterns(self, wctx, vdirs, match, status, fail):
1906 """check for commit arguments that aren't committable"""
1907 """check for commit arguments that aren't committable"""
1907 if match.isexact() or match.prefix():
1908 if match.isexact() or match.prefix():
1908 matched = set(status.modified + status.added + status.removed)
1909 matched = set(status.modified + status.added + status.removed)
1909
1910
1910 for f in match.files():
1911 for f in match.files():
1911 f = self.dirstate.normalize(f)
1912 f = self.dirstate.normalize(f)
1912 if f == '.' or f in matched or f in wctx.substate:
1913 if f == '.' or f in matched or f in wctx.substate:
1913 continue
1914 continue
1914 if f in status.deleted:
1915 if f in status.deleted:
1915 fail(f, _('file not found!'))
1916 fail(f, _('file not found!'))
1916 if f in vdirs: # visited directory
1917 if f in vdirs: # visited directory
1917 d = f + '/'
1918 d = f + '/'
1918 for mf in matched:
1919 for mf in matched:
1919 if mf.startswith(d):
1920 if mf.startswith(d):
1920 break
1921 break
1921 else:
1922 else:
1922 fail(f, _("no match under directory!"))
1923 fail(f, _("no match under directory!"))
1923 elif f not in self.dirstate:
1924 elif f not in self.dirstate:
1924 fail(f, _("file not tracked!"))
1925 fail(f, _("file not tracked!"))
1925
1926
1926 @unfilteredmethod
1927 @unfilteredmethod
1927 def commit(self, text="", user=None, date=None, match=None, force=False,
1928 def commit(self, text="", user=None, date=None, match=None, force=False,
1928 editor=False, extra=None):
1929 editor=False, extra=None):
1929 """Add a new revision to current repository.
1930 """Add a new revision to current repository.
1930
1931
1931 Revision information is gathered from the working directory,
1932 Revision information is gathered from the working directory,
1932 match can be used to filter the committed files. If editor is
1933 match can be used to filter the committed files. If editor is
1933 supplied, it is called to get a commit message.
1934 supplied, it is called to get a commit message.
1934 """
1935 """
1935 if extra is None:
1936 if extra is None:
1936 extra = {}
1937 extra = {}
1937
1938
1938 def fail(f, msg):
1939 def fail(f, msg):
1939 raise error.Abort('%s: %s' % (f, msg))
1940 raise error.Abort('%s: %s' % (f, msg))
1940
1941
1941 if not match:
1942 if not match:
1942 match = matchmod.always(self.root, '')
1943 match = matchmod.always(self.root, '')
1943
1944
1944 if not force:
1945 if not force:
1945 vdirs = []
1946 vdirs = []
1946 match.explicitdir = vdirs.append
1947 match.explicitdir = vdirs.append
1947 match.bad = fail
1948 match.bad = fail
1948
1949
1949 wlock = lock = tr = None
1950 wlock = lock = tr = None
1950 try:
1951 try:
1951 wlock = self.wlock()
1952 wlock = self.wlock()
1952 lock = self.lock() # for recent changelog (see issue4368)
1953 lock = self.lock() # for recent changelog (see issue4368)
1953
1954
1954 wctx = self[None]
1955 wctx = self[None]
1955 merge = len(wctx.parents()) > 1
1956 merge = len(wctx.parents()) > 1
1956
1957
1957 if not force and merge and not match.always():
1958 if not force and merge and not match.always():
1958 raise error.Abort(_('cannot partially commit a merge '
1959 raise error.Abort(_('cannot partially commit a merge '
1959 '(do not specify files or patterns)'))
1960 '(do not specify files or patterns)'))
1960
1961
1961 status = self.status(match=match, clean=force)
1962 status = self.status(match=match, clean=force)
1962 if force:
1963 if force:
1963 status.modified.extend(status.clean) # mq may commit clean files
1964 status.modified.extend(status.clean) # mq may commit clean files
1964
1965
1965 # check subrepos
1966 # check subrepos
1966 subs, commitsubs, newstate = subrepoutil.precommit(
1967 subs, commitsubs, newstate = subrepoutil.precommit(
1967 self.ui, wctx, status, match, force=force)
1968 self.ui, wctx, status, match, force=force)
1968
1969
1969 # make sure all explicit patterns are matched
1970 # make sure all explicit patterns are matched
1970 if not force:
1971 if not force:
1971 self.checkcommitpatterns(wctx, vdirs, match, status, fail)
1972 self.checkcommitpatterns(wctx, vdirs, match, status, fail)
1972
1973
1973 cctx = context.workingcommitctx(self, status,
1974 cctx = context.workingcommitctx(self, status,
1974 text, user, date, extra)
1975 text, user, date, extra)
1975
1976
1976 # internal config: ui.allowemptycommit
1977 # internal config: ui.allowemptycommit
1977 allowemptycommit = (wctx.branch() != wctx.p1().branch()
1978 allowemptycommit = (wctx.branch() != wctx.p1().branch()
1978 or extra.get('close') or merge or cctx.files()
1979 or extra.get('close') or merge or cctx.files()
1979 or self.ui.configbool('ui', 'allowemptycommit'))
1980 or self.ui.configbool('ui', 'allowemptycommit'))
1980 if not allowemptycommit:
1981 if not allowemptycommit:
1981 return None
1982 return None
1982
1983
1983 if merge and cctx.deleted():
1984 if merge and cctx.deleted():
1984 raise error.Abort(_("cannot commit merge with missing files"))
1985 raise error.Abort(_("cannot commit merge with missing files"))
1985
1986
1986 ms = mergemod.mergestate.read(self)
1987 ms = mergemod.mergestate.read(self)
1987 mergeutil.checkunresolved(ms)
1988 mergeutil.checkunresolved(ms)
1988
1989
1989 if editor:
1990 if editor:
1990 cctx._text = editor(self, cctx, subs)
1991 cctx._text = editor(self, cctx, subs)
1991 edited = (text != cctx._text)
1992 edited = (text != cctx._text)
1992
1993
1993 # Save commit message in case this transaction gets rolled back
1994 # Save commit message in case this transaction gets rolled back
1994 # (e.g. by a pretxncommit hook). Leave the content alone on
1995 # (e.g. by a pretxncommit hook). Leave the content alone on
1995 # the assumption that the user will use the same editor again.
1996 # the assumption that the user will use the same editor again.
1996 msgfn = self.savecommitmessage(cctx._text)
1997 msgfn = self.savecommitmessage(cctx._text)
1997
1998
1998 # commit subs and write new state
1999 # commit subs and write new state
1999 if subs:
2000 if subs:
2000 for s in sorted(commitsubs):
2001 for s in sorted(commitsubs):
2001 sub = wctx.sub(s)
2002 sub = wctx.sub(s)
2002 self.ui.status(_('committing subrepository %s\n') %
2003 self.ui.status(_('committing subrepository %s\n') %
2003 subrepoutil.subrelpath(sub))
2004 subrepoutil.subrelpath(sub))
2004 sr = sub.commit(cctx._text, user, date)
2005 sr = sub.commit(cctx._text, user, date)
2005 newstate[s] = (newstate[s][0], sr)
2006 newstate[s] = (newstate[s][0], sr)
2006 subrepoutil.writestate(self, newstate)
2007 subrepoutil.writestate(self, newstate)
2007
2008
2008 p1, p2 = self.dirstate.parents()
2009 p1, p2 = self.dirstate.parents()
2009 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
2010 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
2010 try:
2011 try:
2011 self.hook("precommit", throw=True, parent1=hookp1,
2012 self.hook("precommit", throw=True, parent1=hookp1,
2012 parent2=hookp2)
2013 parent2=hookp2)
2013 tr = self.transaction('commit')
2014 tr = self.transaction('commit')
2014 ret = self.commitctx(cctx, True)
2015 ret = self.commitctx(cctx, True)
2015 except: # re-raises
2016 except: # re-raises
2016 if edited:
2017 if edited:
2017 self.ui.write(
2018 self.ui.write(
2018 _('note: commit message saved in %s\n') % msgfn)
2019 _('note: commit message saved in %s\n') % msgfn)
2019 raise
2020 raise
2020 # update bookmarks, dirstate and mergestate
2021 # update bookmarks, dirstate and mergestate
2021 bookmarks.update(self, [p1, p2], ret)
2022 bookmarks.update(self, [p1, p2], ret)
2022 cctx.markcommitted(ret)
2023 cctx.markcommitted(ret)
2023 ms.reset()
2024 ms.reset()
2024 tr.close()
2025 tr.close()
2025
2026
2026 finally:
2027 finally:
2027 lockmod.release(tr, lock, wlock)
2028 lockmod.release(tr, lock, wlock)
2028
2029
2029 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
2030 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
2030 # hack for command that use a temporary commit (eg: histedit)
2031 # hack for command that use a temporary commit (eg: histedit)
2031 # temporary commit got stripped before hook release
2032 # temporary commit got stripped before hook release
2032 if self.changelog.hasnode(ret):
2033 if self.changelog.hasnode(ret):
2033 self.hook("commit", node=node, parent1=parent1,
2034 self.hook("commit", node=node, parent1=parent1,
2034 parent2=parent2)
2035 parent2=parent2)
2035 self._afterlock(commithook)
2036 self._afterlock(commithook)
2036 return ret
2037 return ret
2037
2038
2038 @unfilteredmethod
2039 @unfilteredmethod
2039 def commitctx(self, ctx, error=False):
2040 def commitctx(self, ctx, error=False):
2040 """Add a new revision to current repository.
2041 """Add a new revision to current repository.
2041 Revision information is passed via the context argument.
2042 Revision information is passed via the context argument.
2042
2043
2043 ctx.files() should list all files involved in this commit, i.e.
2044 ctx.files() should list all files involved in this commit, i.e.
2044 modified/added/removed files. On merge, it may be wider than the
2045 modified/added/removed files. On merge, it may be wider than the
2045 ctx.files() to be committed, since any file nodes derived directly
2046 ctx.files() to be committed, since any file nodes derived directly
2046 from p1 or p2 are excluded from the committed ctx.files().
2047 from p1 or p2 are excluded from the committed ctx.files().
2047 """
2048 """
2048
2049
2049 tr = None
2050 tr = None
2050 p1, p2 = ctx.p1(), ctx.p2()
2051 p1, p2 = ctx.p1(), ctx.p2()
2051 user = ctx.user()
2052 user = ctx.user()
2052
2053
2053 lock = self.lock()
2054 lock = self.lock()
2054 try:
2055 try:
2055 tr = self.transaction("commit")
2056 tr = self.transaction("commit")
2056 trp = weakref.proxy(tr)
2057 trp = weakref.proxy(tr)
2057
2058
2058 if ctx.manifestnode():
2059 if ctx.manifestnode():
2059 # reuse an existing manifest revision
2060 # reuse an existing manifest revision
2060 self.ui.debug('reusing known manifest\n')
2061 self.ui.debug('reusing known manifest\n')
2061 mn = ctx.manifestnode()
2062 mn = ctx.manifestnode()
2062 files = ctx.files()
2063 files = ctx.files()
2063 elif ctx.files():
2064 elif ctx.files():
2064 m1ctx = p1.manifestctx()
2065 m1ctx = p1.manifestctx()
2065 m2ctx = p2.manifestctx()
2066 m2ctx = p2.manifestctx()
2066 mctx = m1ctx.copy()
2067 mctx = m1ctx.copy()
2067
2068
2068 m = mctx.read()
2069 m = mctx.read()
2069 m1 = m1ctx.read()
2070 m1 = m1ctx.read()
2070 m2 = m2ctx.read()
2071 m2 = m2ctx.read()
2071
2072
2072 # check in files
2073 # check in files
2073 added = []
2074 added = []
2074 changed = []
2075 changed = []
2075 removed = list(ctx.removed())
2076 removed = list(ctx.removed())
2076 linkrev = len(self)
2077 linkrev = len(self)
2077 self.ui.note(_("committing files:\n"))
2078 self.ui.note(_("committing files:\n"))
2078 for f in sorted(ctx.modified() + ctx.added()):
2079 for f in sorted(ctx.modified() + ctx.added()):
2079 self.ui.note(f + "\n")
2080 self.ui.note(f + "\n")
2080 try:
2081 try:
2081 fctx = ctx[f]
2082 fctx = ctx[f]
2082 if fctx is None:
2083 if fctx is None:
2083 removed.append(f)
2084 removed.append(f)
2084 else:
2085 else:
2085 added.append(f)
2086 added.append(f)
2086 m[f] = self._filecommit(fctx, m1, m2, linkrev,
2087 m[f] = self._filecommit(fctx, m1, m2, linkrev,
2087 trp, changed)
2088 trp, changed)
2088 m.setflag(f, fctx.flags())
2089 m.setflag(f, fctx.flags())
2089 except OSError as inst:
2090 except OSError as inst:
2090 self.ui.warn(_("trouble committing %s!\n") % f)
2091 self.ui.warn(_("trouble committing %s!\n") % f)
2091 raise
2092 raise
2092 except IOError as inst:
2093 except IOError as inst:
2093 errcode = getattr(inst, 'errno', errno.ENOENT)
2094 errcode = getattr(inst, 'errno', errno.ENOENT)
2094 if error or errcode and errcode != errno.ENOENT:
2095 if error or errcode and errcode != errno.ENOENT:
2095 self.ui.warn(_("trouble committing %s!\n") % f)
2096 self.ui.warn(_("trouble committing %s!\n") % f)
2096 raise
2097 raise
2097
2098
2098 # update manifest
2099 # update manifest
2099 removed = [f for f in sorted(removed) if f in m1 or f in m2]
2100 removed = [f for f in sorted(removed) if f in m1 or f in m2]
2100 drop = [f for f in removed if f in m]
2101 drop = [f for f in removed if f in m]
2101 for f in drop:
2102 for f in drop:
2102 del m[f]
2103 del m[f]
2103 files = changed + removed
2104 files = changed + removed
2104 md = None
2105 md = None
2105 if not files:
2106 if not files:
2106 # if no "files" actually changed in terms of the changelog,
2107 # if no "files" actually changed in terms of the changelog,
2107 # try hard to detect unmodified manifest entry so that the
2108 # try hard to detect unmodified manifest entry so that the
2108 # exact same commit can be reproduced later on convert.
2109 # exact same commit can be reproduced later on convert.
2109 md = m1.diff(m, scmutil.matchfiles(self, ctx.files()))
2110 md = m1.diff(m, scmutil.matchfiles(self, ctx.files()))
2110 if not files and md:
2111 if not files and md:
2111 self.ui.debug('not reusing manifest (no file change in '
2112 self.ui.debug('not reusing manifest (no file change in '
2112 'changelog, but manifest differs)\n')
2113 'changelog, but manifest differs)\n')
2113 if files or md:
2114 if files or md:
2114 self.ui.note(_("committing manifest\n"))
2115 self.ui.note(_("committing manifest\n"))
2115 mn = mctx.write(trp, linkrev,
2116 mn = mctx.write(trp, linkrev,
2116 p1.manifestnode(), p2.manifestnode(),
2117 p1.manifestnode(), p2.manifestnode(),
2117 added, drop)
2118 added, drop)
2118 else:
2119 else:
2119 self.ui.debug('reusing manifest form p1 (listed files '
2120 self.ui.debug('reusing manifest form p1 (listed files '
2120 'actually unchanged)\n')
2121 'actually unchanged)\n')
2121 mn = p1.manifestnode()
2122 mn = p1.manifestnode()
2122 else:
2123 else:
2123 self.ui.debug('reusing manifest from p1 (no file change)\n')
2124 self.ui.debug('reusing manifest from p1 (no file change)\n')
2124 mn = p1.manifestnode()
2125 mn = p1.manifestnode()
2125 files = []
2126 files = []
2126
2127
2127 # update changelog
2128 # update changelog
2128 self.ui.note(_("committing changelog\n"))
2129 self.ui.note(_("committing changelog\n"))
2129 self.changelog.delayupdate(tr)
2130 self.changelog.delayupdate(tr)
2130 n = self.changelog.add(mn, files, ctx.description(),
2131 n = self.changelog.add(mn, files, ctx.description(),
2131 trp, p1.node(), p2.node(),
2132 trp, p1.node(), p2.node(),
2132 user, ctx.date(), ctx.extra().copy())
2133 user, ctx.date(), ctx.extra().copy())
2133 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
2134 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
2134 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
2135 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
2135 parent2=xp2)
2136 parent2=xp2)
2136 # set the new commit is proper phase
2137 # set the new commit is proper phase
2137 targetphase = subrepoutil.newcommitphase(self.ui, ctx)
2138 targetphase = subrepoutil.newcommitphase(self.ui, ctx)
2138 if targetphase:
2139 if targetphase:
2139 # retract boundary do not alter parent changeset.
2140 # retract boundary do not alter parent changeset.
2140 # if a parent have higher the resulting phase will
2141 # if a parent have higher the resulting phase will
2141 # be compliant anyway
2142 # be compliant anyway
2142 #
2143 #
2143 # if minimal phase was 0 we don't need to retract anything
2144 # if minimal phase was 0 we don't need to retract anything
2144 phases.registernew(self, tr, targetphase, [n])
2145 phases.registernew(self, tr, targetphase, [n])
2145 tr.close()
2146 tr.close()
2146 return n
2147 return n
2147 finally:
2148 finally:
2148 if tr:
2149 if tr:
2149 tr.release()
2150 tr.release()
2150 lock.release()
2151 lock.release()
2151
2152
2152 @unfilteredmethod
2153 @unfilteredmethod
2153 def destroying(self):
2154 def destroying(self):
2154 '''Inform the repository that nodes are about to be destroyed.
2155 '''Inform the repository that nodes are about to be destroyed.
2155 Intended for use by strip and rollback, so there's a common
2156 Intended for use by strip and rollback, so there's a common
2156 place for anything that has to be done before destroying history.
2157 place for anything that has to be done before destroying history.
2157
2158
2158 This is mostly useful for saving state that is in memory and waiting
2159 This is mostly useful for saving state that is in memory and waiting
2159 to be flushed when the current lock is released. Because a call to
2160 to be flushed when the current lock is released. Because a call to
2160 destroyed is imminent, the repo will be invalidated causing those
2161 destroyed is imminent, the repo will be invalidated causing those
2161 changes to stay in memory (waiting for the next unlock), or vanish
2162 changes to stay in memory (waiting for the next unlock), or vanish
2162 completely.
2163 completely.
2163 '''
2164 '''
2164 # When using the same lock to commit and strip, the phasecache is left
2165 # When using the same lock to commit and strip, the phasecache is left
2165 # dirty after committing. Then when we strip, the repo is invalidated,
2166 # dirty after committing. Then when we strip, the repo is invalidated,
2166 # causing those changes to disappear.
2167 # causing those changes to disappear.
2167 if '_phasecache' in vars(self):
2168 if '_phasecache' in vars(self):
2168 self._phasecache.write()
2169 self._phasecache.write()
2169
2170
2170 @unfilteredmethod
2171 @unfilteredmethod
2171 def destroyed(self):
2172 def destroyed(self):
2172 '''Inform the repository that nodes have been destroyed.
2173 '''Inform the repository that nodes have been destroyed.
2173 Intended for use by strip and rollback, so there's a common
2174 Intended for use by strip and rollback, so there's a common
2174 place for anything that has to be done after destroying history.
2175 place for anything that has to be done after destroying history.
2175 '''
2176 '''
2176 # When one tries to:
2177 # When one tries to:
2177 # 1) destroy nodes thus calling this method (e.g. strip)
2178 # 1) destroy nodes thus calling this method (e.g. strip)
2178 # 2) use phasecache somewhere (e.g. commit)
2179 # 2) use phasecache somewhere (e.g. commit)
2179 #
2180 #
2180 # then 2) will fail because the phasecache contains nodes that were
2181 # then 2) will fail because the phasecache contains nodes that were
2181 # removed. We can either remove phasecache from the filecache,
2182 # removed. We can either remove phasecache from the filecache,
2182 # causing it to reload next time it is accessed, or simply filter
2183 # causing it to reload next time it is accessed, or simply filter
2183 # the removed nodes now and write the updated cache.
2184 # the removed nodes now and write the updated cache.
2184 self._phasecache.filterunknown(self)
2185 self._phasecache.filterunknown(self)
2185 self._phasecache.write()
2186 self._phasecache.write()
2186
2187
2187 # refresh all repository caches
2188 # refresh all repository caches
2188 self.updatecaches()
2189 self.updatecaches()
2189
2190
2190 # Ensure the persistent tag cache is updated. Doing it now
2191 # Ensure the persistent tag cache is updated. Doing it now
2191 # means that the tag cache only has to worry about destroyed
2192 # means that the tag cache only has to worry about destroyed
2192 # heads immediately after a strip/rollback. That in turn
2193 # heads immediately after a strip/rollback. That in turn
2193 # guarantees that "cachetip == currenttip" (comparing both rev
2194 # guarantees that "cachetip == currenttip" (comparing both rev
2194 # and node) always means no nodes have been added or destroyed.
2195 # and node) always means no nodes have been added or destroyed.
2195
2196
2196 # XXX this is suboptimal when qrefresh'ing: we strip the current
2197 # XXX this is suboptimal when qrefresh'ing: we strip the current
2197 # head, refresh the tag cache, then immediately add a new head.
2198 # head, refresh the tag cache, then immediately add a new head.
2198 # But I think doing it this way is necessary for the "instant
2199 # But I think doing it this way is necessary for the "instant
2199 # tag cache retrieval" case to work.
2200 # tag cache retrieval" case to work.
2200 self.invalidate()
2201 self.invalidate()
2201
2202
2202 def status(self, node1='.', node2=None, match=None,
2203 def status(self, node1='.', node2=None, match=None,
2203 ignored=False, clean=False, unknown=False,
2204 ignored=False, clean=False, unknown=False,
2204 listsubrepos=False):
2205 listsubrepos=False):
2205 '''a convenience method that calls node1.status(node2)'''
2206 '''a convenience method that calls node1.status(node2)'''
2206 return self[node1].status(node2, match, ignored, clean, unknown,
2207 return self[node1].status(node2, match, ignored, clean, unknown,
2207 listsubrepos)
2208 listsubrepos)
2208
2209
2209 def addpostdsstatus(self, ps):
2210 def addpostdsstatus(self, ps):
2210 """Add a callback to run within the wlock, at the point at which status
2211 """Add a callback to run within the wlock, at the point at which status
2211 fixups happen.
2212 fixups happen.
2212
2213
2213 On status completion, callback(wctx, status) will be called with the
2214 On status completion, callback(wctx, status) will be called with the
2214 wlock held, unless the dirstate has changed from underneath or the wlock
2215 wlock held, unless the dirstate has changed from underneath or the wlock
2215 couldn't be grabbed.
2216 couldn't be grabbed.
2216
2217
2217 Callbacks should not capture and use a cached copy of the dirstate --
2218 Callbacks should not capture and use a cached copy of the dirstate --
2218 it might change in the meanwhile. Instead, they should access the
2219 it might change in the meanwhile. Instead, they should access the
2219 dirstate via wctx.repo().dirstate.
2220 dirstate via wctx.repo().dirstate.
2220
2221
2221 This list is emptied out after each status run -- extensions should
2222 This list is emptied out after each status run -- extensions should
2222 make sure it adds to this list each time dirstate.status is called.
2223 make sure it adds to this list each time dirstate.status is called.
2223 Extensions should also make sure they don't call this for statuses
2224 Extensions should also make sure they don't call this for statuses
2224 that don't involve the dirstate.
2225 that don't involve the dirstate.
2225 """
2226 """
2226
2227
2227 # The list is located here for uniqueness reasons -- it is actually
2228 # The list is located here for uniqueness reasons -- it is actually
2228 # managed by the workingctx, but that isn't unique per-repo.
2229 # managed by the workingctx, but that isn't unique per-repo.
2229 self._postdsstatus.append(ps)
2230 self._postdsstatus.append(ps)
2230
2231
2231 def postdsstatus(self):
2232 def postdsstatus(self):
2232 """Used by workingctx to get the list of post-dirstate-status hooks."""
2233 """Used by workingctx to get the list of post-dirstate-status hooks."""
2233 return self._postdsstatus
2234 return self._postdsstatus
2234
2235
2235 def clearpostdsstatus(self):
2236 def clearpostdsstatus(self):
2236 """Used by workingctx to clear post-dirstate-status hooks."""
2237 """Used by workingctx to clear post-dirstate-status hooks."""
2237 del self._postdsstatus[:]
2238 del self._postdsstatus[:]
2238
2239
2239 def heads(self, start=None):
2240 def heads(self, start=None):
2240 if start is None:
2241 if start is None:
2241 cl = self.changelog
2242 cl = self.changelog
2242 headrevs = reversed(cl.headrevs())
2243 headrevs = reversed(cl.headrevs())
2243 return [cl.node(rev) for rev in headrevs]
2244 return [cl.node(rev) for rev in headrevs]
2244
2245
2245 heads = self.changelog.heads(start)
2246 heads = self.changelog.heads(start)
2246 # sort the output in rev descending order
2247 # sort the output in rev descending order
2247 return sorted(heads, key=self.changelog.rev, reverse=True)
2248 return sorted(heads, key=self.changelog.rev, reverse=True)
2248
2249
2249 def branchheads(self, branch=None, start=None, closed=False):
2250 def branchheads(self, branch=None, start=None, closed=False):
2250 '''return a (possibly filtered) list of heads for the given branch
2251 '''return a (possibly filtered) list of heads for the given branch
2251
2252
2252 Heads are returned in topological order, from newest to oldest.
2253 Heads are returned in topological order, from newest to oldest.
2253 If branch is None, use the dirstate branch.
2254 If branch is None, use the dirstate branch.
2254 If start is not None, return only heads reachable from start.
2255 If start is not None, return only heads reachable from start.
2255 If closed is True, return heads that are marked as closed as well.
2256 If closed is True, return heads that are marked as closed as well.
2256 '''
2257 '''
2257 if branch is None:
2258 if branch is None:
2258 branch = self[None].branch()
2259 branch = self[None].branch()
2259 branches = self.branchmap()
2260 branches = self.branchmap()
2260 if branch not in branches:
2261 if branch not in branches:
2261 return []
2262 return []
2262 # the cache returns heads ordered lowest to highest
2263 # the cache returns heads ordered lowest to highest
2263 bheads = list(reversed(branches.branchheads(branch, closed=closed)))
2264 bheads = list(reversed(branches.branchheads(branch, closed=closed)))
2264 if start is not None:
2265 if start is not None:
2265 # filter out the heads that cannot be reached from startrev
2266 # filter out the heads that cannot be reached from startrev
2266 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
2267 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
2267 bheads = [h for h in bheads if h in fbheads]
2268 bheads = [h for h in bheads if h in fbheads]
2268 return bheads
2269 return bheads
2269
2270
2270 def branches(self, nodes):
2271 def branches(self, nodes):
2271 if not nodes:
2272 if not nodes:
2272 nodes = [self.changelog.tip()]
2273 nodes = [self.changelog.tip()]
2273 b = []
2274 b = []
2274 for n in nodes:
2275 for n in nodes:
2275 t = n
2276 t = n
2276 while True:
2277 while True:
2277 p = self.changelog.parents(n)
2278 p = self.changelog.parents(n)
2278 if p[1] != nullid or p[0] == nullid:
2279 if p[1] != nullid or p[0] == nullid:
2279 b.append((t, n, p[0], p[1]))
2280 b.append((t, n, p[0], p[1]))
2280 break
2281 break
2281 n = p[0]
2282 n = p[0]
2282 return b
2283 return b
2283
2284
2284 def between(self, pairs):
2285 def between(self, pairs):
2285 r = []
2286 r = []
2286
2287
2287 for top, bottom in pairs:
2288 for top, bottom in pairs:
2288 n, l, i = top, [], 0
2289 n, l, i = top, [], 0
2289 f = 1
2290 f = 1
2290
2291
2291 while n != bottom and n != nullid:
2292 while n != bottom and n != nullid:
2292 p = self.changelog.parents(n)[0]
2293 p = self.changelog.parents(n)[0]
2293 if i == f:
2294 if i == f:
2294 l.append(n)
2295 l.append(n)
2295 f = f * 2
2296 f = f * 2
2296 n = p
2297 n = p
2297 i += 1
2298 i += 1
2298
2299
2299 r.append(l)
2300 r.append(l)
2300
2301
2301 return r
2302 return r
2302
2303
2303 def checkpush(self, pushop):
2304 def checkpush(self, pushop):
2304 """Extensions can override this function if additional checks have
2305 """Extensions can override this function if additional checks have
2305 to be performed before pushing, or call it if they override push
2306 to be performed before pushing, or call it if they override push
2306 command.
2307 command.
2307 """
2308 """
2308
2309
2309 @unfilteredpropertycache
2310 @unfilteredpropertycache
2310 def prepushoutgoinghooks(self):
2311 def prepushoutgoinghooks(self):
2311 """Return util.hooks consists of a pushop with repo, remote, outgoing
2312 """Return util.hooks consists of a pushop with repo, remote, outgoing
2312 methods, which are called before pushing changesets.
2313 methods, which are called before pushing changesets.
2313 """
2314 """
2314 return util.hooks()
2315 return util.hooks()
2315
2316
2316 def pushkey(self, namespace, key, old, new):
2317 def pushkey(self, namespace, key, old, new):
2317 try:
2318 try:
2318 tr = self.currenttransaction()
2319 tr = self.currenttransaction()
2319 hookargs = {}
2320 hookargs = {}
2320 if tr is not None:
2321 if tr is not None:
2321 hookargs.update(tr.hookargs)
2322 hookargs.update(tr.hookargs)
2322 hookargs = pycompat.strkwargs(hookargs)
2323 hookargs = pycompat.strkwargs(hookargs)
2323 hookargs[r'namespace'] = namespace
2324 hookargs[r'namespace'] = namespace
2324 hookargs[r'key'] = key
2325 hookargs[r'key'] = key
2325 hookargs[r'old'] = old
2326 hookargs[r'old'] = old
2326 hookargs[r'new'] = new
2327 hookargs[r'new'] = new
2327 self.hook('prepushkey', throw=True, **hookargs)
2328 self.hook('prepushkey', throw=True, **hookargs)
2328 except error.HookAbort as exc:
2329 except error.HookAbort as exc:
2329 self.ui.write_err(_("pushkey-abort: %s\n") % exc)
2330 self.ui.write_err(_("pushkey-abort: %s\n") % exc)
2330 if exc.hint:
2331 if exc.hint:
2331 self.ui.write_err(_("(%s)\n") % exc.hint)
2332 self.ui.write_err(_("(%s)\n") % exc.hint)
2332 return False
2333 return False
2333 self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key))
2334 self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key))
2334 ret = pushkey.push(self, namespace, key, old, new)
2335 ret = pushkey.push(self, namespace, key, old, new)
2335 def runhook():
2336 def runhook():
2336 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
2337 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
2337 ret=ret)
2338 ret=ret)
2338 self._afterlock(runhook)
2339 self._afterlock(runhook)
2339 return ret
2340 return ret
2340
2341
2341 def listkeys(self, namespace):
2342 def listkeys(self, namespace):
2342 self.hook('prelistkeys', throw=True, namespace=namespace)
2343 self.hook('prelistkeys', throw=True, namespace=namespace)
2343 self.ui.debug('listing keys for "%s"\n' % namespace)
2344 self.ui.debug('listing keys for "%s"\n' % namespace)
2344 values = pushkey.list(self, namespace)
2345 values = pushkey.list(self, namespace)
2345 self.hook('listkeys', namespace=namespace, values=values)
2346 self.hook('listkeys', namespace=namespace, values=values)
2346 return values
2347 return values
2347
2348
2348 def debugwireargs(self, one, two, three=None, four=None, five=None):
2349 def debugwireargs(self, one, two, three=None, four=None, five=None):
2349 '''used to test argument passing over the wire'''
2350 '''used to test argument passing over the wire'''
2350 return "%s %s %s %s %s" % (one, two, pycompat.bytestr(three),
2351 return "%s %s %s %s %s" % (one, two, pycompat.bytestr(three),
2351 pycompat.bytestr(four),
2352 pycompat.bytestr(four),
2352 pycompat.bytestr(five))
2353 pycompat.bytestr(five))
2353
2354
2354 def savecommitmessage(self, text):
2355 def savecommitmessage(self, text):
2355 fp = self.vfs('last-message.txt', 'wb')
2356 fp = self.vfs('last-message.txt', 'wb')
2356 try:
2357 try:
2357 fp.write(text)
2358 fp.write(text)
2358 finally:
2359 finally:
2359 fp.close()
2360 fp.close()
2360 return self.pathto(fp.name[len(self.root) + 1:])
2361 return self.pathto(fp.name[len(self.root) + 1:])
2361
2362
2362 # used to avoid circular references so destructors work
2363 # used to avoid circular references so destructors work
2363 def aftertrans(files):
2364 def aftertrans(files):
2364 renamefiles = [tuple(t) for t in files]
2365 renamefiles = [tuple(t) for t in files]
2365 def a():
2366 def a():
2366 for vfs, src, dest in renamefiles:
2367 for vfs, src, dest in renamefiles:
2367 # if src and dest refer to a same file, vfs.rename is a no-op,
2368 # if src and dest refer to a same file, vfs.rename is a no-op,
2368 # leaving both src and dest on disk. delete dest to make sure
2369 # leaving both src and dest on disk. delete dest to make sure
2369 # the rename couldn't be such a no-op.
2370 # the rename couldn't be such a no-op.
2370 vfs.tryunlink(dest)
2371 vfs.tryunlink(dest)
2371 try:
2372 try:
2372 vfs.rename(src, dest)
2373 vfs.rename(src, dest)
2373 except OSError: # journal file does not yet exist
2374 except OSError: # journal file does not yet exist
2374 pass
2375 pass
2375 return a
2376 return a
2376
2377
2377 def undoname(fn):
2378 def undoname(fn):
2378 base, name = os.path.split(fn)
2379 base, name = os.path.split(fn)
2379 assert name.startswith('journal')
2380 assert name.startswith('journal')
2380 return os.path.join(base, name.replace('journal', 'undo', 1))
2381 return os.path.join(base, name.replace('journal', 'undo', 1))
2381
2382
2382 def instance(ui, path, create, intents=None):
2383 def instance(ui, path, create, intents=None):
2383 return localrepository(ui, util.urllocalpath(path), create,
2384 return localrepository(ui, util.urllocalpath(path), create,
2384 intents=intents)
2385 intents=intents)
2385
2386
2386 def islocal(path):
2387 def islocal(path):
2387 return True
2388 return True
2388
2389
2389 def newreporequirements(repo):
2390 def newreporequirements(repo):
2390 """Determine the set of requirements for a new local repository.
2391 """Determine the set of requirements for a new local repository.
2391
2392
2392 Extensions can wrap this function to specify custom requirements for
2393 Extensions can wrap this function to specify custom requirements for
2393 new repositories.
2394 new repositories.
2394 """
2395 """
2395 ui = repo.ui
2396 ui = repo.ui
2396 requirements = {'revlogv1'}
2397 requirements = {'revlogv1'}
2397 if ui.configbool('format', 'usestore'):
2398 if ui.configbool('format', 'usestore'):
2398 requirements.add('store')
2399 requirements.add('store')
2399 if ui.configbool('format', 'usefncache'):
2400 if ui.configbool('format', 'usefncache'):
2400 requirements.add('fncache')
2401 requirements.add('fncache')
2401 if ui.configbool('format', 'dotencode'):
2402 if ui.configbool('format', 'dotencode'):
2402 requirements.add('dotencode')
2403 requirements.add('dotencode')
2403
2404
2404 compengine = ui.config('experimental', 'format.compression')
2405 compengine = ui.config('experimental', 'format.compression')
2405 if compengine not in util.compengines:
2406 if compengine not in util.compengines:
2406 raise error.Abort(_('compression engine %s defined by '
2407 raise error.Abort(_('compression engine %s defined by '
2407 'experimental.format.compression not available') %
2408 'experimental.format.compression not available') %
2408 compengine,
2409 compengine,
2409 hint=_('run "hg debuginstall" to list available '
2410 hint=_('run "hg debuginstall" to list available '
2410 'compression engines'))
2411 'compression engines'))
2411
2412
2412 # zlib is the historical default and doesn't need an explicit requirement.
2413 # zlib is the historical default and doesn't need an explicit requirement.
2413 if compengine != 'zlib':
2414 if compengine != 'zlib':
2414 requirements.add('exp-compression-%s' % compengine)
2415 requirements.add('exp-compression-%s' % compengine)
2415
2416
2416 if scmutil.gdinitconfig(ui):
2417 if scmutil.gdinitconfig(ui):
2417 requirements.add('generaldelta')
2418 requirements.add('generaldelta')
2418 if ui.configbool('experimental', 'treemanifest'):
2419 if ui.configbool('experimental', 'treemanifest'):
2419 requirements.add('treemanifest')
2420 requirements.add('treemanifest')
2420 # experimental config: format.sparse-revlog
2421 # experimental config: format.sparse-revlog
2421 if ui.configbool('format', 'sparse-revlog'):
2422 if ui.configbool('format', 'sparse-revlog'):
2422 requirements.add(SPARSEREVLOG_REQUIREMENT)
2423 requirements.add(SPARSEREVLOG_REQUIREMENT)
2423
2424
2424 revlogv2 = ui.config('experimental', 'revlogv2')
2425 revlogv2 = ui.config('experimental', 'revlogv2')
2425 if revlogv2 == 'enable-unstable-format-and-corrupt-my-data':
2426 if revlogv2 == 'enable-unstable-format-and-corrupt-my-data':
2426 requirements.remove('revlogv1')
2427 requirements.remove('revlogv1')
2427 # generaldelta is implied by revlogv2.
2428 # generaldelta is implied by revlogv2.
2428 requirements.discard('generaldelta')
2429 requirements.discard('generaldelta')
2429 requirements.add(REVLOGV2_REQUIREMENT)
2430 requirements.add(REVLOGV2_REQUIREMENT)
2431 # experimental config: format.internal-phase
2432 if repo.ui.configbool('format', 'internal-phase'):
2433 requirements.add('internal-phase')
2430
2434
2431 return requirements
2435 return requirements
General Comments 0
You need to be logged in to leave comments. Login now