##// END OF EJS Templates
rewriting: add an option for rewrite commands to use the archived phase...
Boris Feld -
r41961:64de5f44 default
parent child Browse files
Show More
@@ -1,1452 +1,1455 b''
1 # configitems.py - centralized declaration of configuration option
1 # configitems.py - centralized declaration of configuration option
2 #
2 #
3 # Copyright 2017 Pierre-Yves David <pierre-yves.david@octobus.net>
3 # Copyright 2017 Pierre-Yves David <pierre-yves.david@octobus.net>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import functools
10 import functools
11 import re
11 import re
12
12
13 from . import (
13 from . import (
14 encoding,
14 encoding,
15 error,
15 error,
16 )
16 )
17
17
18 def loadconfigtable(ui, extname, configtable):
18 def loadconfigtable(ui, extname, configtable):
19 """update config item known to the ui with the extension ones"""
19 """update config item known to the ui with the extension ones"""
20 for section, items in sorted(configtable.items()):
20 for section, items in sorted(configtable.items()):
21 knownitems = ui._knownconfig.setdefault(section, itemregister())
21 knownitems = ui._knownconfig.setdefault(section, itemregister())
22 knownkeys = set(knownitems)
22 knownkeys = set(knownitems)
23 newkeys = set(items)
23 newkeys = set(items)
24 for key in sorted(knownkeys & newkeys):
24 for key in sorted(knownkeys & newkeys):
25 msg = "extension '%s' overwrite config item '%s.%s'"
25 msg = "extension '%s' overwrite config item '%s.%s'"
26 msg %= (extname, section, key)
26 msg %= (extname, section, key)
27 ui.develwarn(msg, config='warn-config')
27 ui.develwarn(msg, config='warn-config')
28
28
29 knownitems.update(items)
29 knownitems.update(items)
30
30
31 class configitem(object):
31 class configitem(object):
32 """represent a known config item
32 """represent a known config item
33
33
34 :section: the official config section where to find this item,
34 :section: the official config section where to find this item,
35 :name: the official name within the section,
35 :name: the official name within the section,
36 :default: default value for this item,
36 :default: default value for this item,
37 :alias: optional list of tuples as alternatives,
37 :alias: optional list of tuples as alternatives,
38 :generic: this is a generic definition, match name using regular expression.
38 :generic: this is a generic definition, match name using regular expression.
39 """
39 """
40
40
41 def __init__(self, section, name, default=None, alias=(),
41 def __init__(self, section, name, default=None, alias=(),
42 generic=False, priority=0):
42 generic=False, priority=0):
43 self.section = section
43 self.section = section
44 self.name = name
44 self.name = name
45 self.default = default
45 self.default = default
46 self.alias = list(alias)
46 self.alias = list(alias)
47 self.generic = generic
47 self.generic = generic
48 self.priority = priority
48 self.priority = priority
49 self._re = None
49 self._re = None
50 if generic:
50 if generic:
51 self._re = re.compile(self.name)
51 self._re = re.compile(self.name)
52
52
53 class itemregister(dict):
53 class itemregister(dict):
54 """A specialized dictionary that can handle wild-card selection"""
54 """A specialized dictionary that can handle wild-card selection"""
55
55
56 def __init__(self):
56 def __init__(self):
57 super(itemregister, self).__init__()
57 super(itemregister, self).__init__()
58 self._generics = set()
58 self._generics = set()
59
59
60 def update(self, other):
60 def update(self, other):
61 super(itemregister, self).update(other)
61 super(itemregister, self).update(other)
62 self._generics.update(other._generics)
62 self._generics.update(other._generics)
63
63
64 def __setitem__(self, key, item):
64 def __setitem__(self, key, item):
65 super(itemregister, self).__setitem__(key, item)
65 super(itemregister, self).__setitem__(key, item)
66 if item.generic:
66 if item.generic:
67 self._generics.add(item)
67 self._generics.add(item)
68
68
69 def get(self, key):
69 def get(self, key):
70 baseitem = super(itemregister, self).get(key)
70 baseitem = super(itemregister, self).get(key)
71 if baseitem is not None and not baseitem.generic:
71 if baseitem is not None and not baseitem.generic:
72 return baseitem
72 return baseitem
73
73
74 # search for a matching generic item
74 # search for a matching generic item
75 generics = sorted(self._generics, key=(lambda x: (x.priority, x.name)))
75 generics = sorted(self._generics, key=(lambda x: (x.priority, x.name)))
76 for item in generics:
76 for item in generics:
77 # we use 'match' instead of 'search' to make the matching simpler
77 # we use 'match' instead of 'search' to make the matching simpler
78 # for people unfamiliar with regular expression. Having the match
78 # for people unfamiliar with regular expression. Having the match
79 # rooted to the start of the string will produce less surprising
79 # rooted to the start of the string will produce less surprising
80 # result for user writing simple regex for sub-attribute.
80 # result for user writing simple regex for sub-attribute.
81 #
81 #
82 # For example using "color\..*" match produces an unsurprising
82 # For example using "color\..*" match produces an unsurprising
83 # result, while using search could suddenly match apparently
83 # result, while using search could suddenly match apparently
84 # unrelated configuration that happens to contains "color."
84 # unrelated configuration that happens to contains "color."
85 # anywhere. This is a tradeoff where we favor requiring ".*" on
85 # anywhere. This is a tradeoff where we favor requiring ".*" on
86 # some match to avoid the need to prefix most pattern with "^".
86 # some match to avoid the need to prefix most pattern with "^".
87 # The "^" seems more error prone.
87 # The "^" seems more error prone.
88 if item._re.match(key):
88 if item._re.match(key):
89 return item
89 return item
90
90
91 return None
91 return None
92
92
93 coreitems = {}
93 coreitems = {}
94
94
95 def _register(configtable, *args, **kwargs):
95 def _register(configtable, *args, **kwargs):
96 item = configitem(*args, **kwargs)
96 item = configitem(*args, **kwargs)
97 section = configtable.setdefault(item.section, itemregister())
97 section = configtable.setdefault(item.section, itemregister())
98 if item.name in section:
98 if item.name in section:
99 msg = "duplicated config item registration for '%s.%s'"
99 msg = "duplicated config item registration for '%s.%s'"
100 raise error.ProgrammingError(msg % (item.section, item.name))
100 raise error.ProgrammingError(msg % (item.section, item.name))
101 section[item.name] = item
101 section[item.name] = item
102
102
103 # special value for case where the default is derived from other values
103 # special value for case where the default is derived from other values
104 dynamicdefault = object()
104 dynamicdefault = object()
105
105
106 # Registering actual config items
106 # Registering actual config items
107
107
108 def getitemregister(configtable):
108 def getitemregister(configtable):
109 f = functools.partial(_register, configtable)
109 f = functools.partial(_register, configtable)
110 # export pseudo enum as configitem.*
110 # export pseudo enum as configitem.*
111 f.dynamicdefault = dynamicdefault
111 f.dynamicdefault = dynamicdefault
112 return f
112 return f
113
113
114 coreconfigitem = getitemregister(coreitems)
114 coreconfigitem = getitemregister(coreitems)
115
115
116 def _registerdiffopts(section, configprefix=''):
116 def _registerdiffopts(section, configprefix=''):
117 coreconfigitem(section, configprefix + 'nodates',
117 coreconfigitem(section, configprefix + 'nodates',
118 default=False,
118 default=False,
119 )
119 )
120 coreconfigitem(section, configprefix + 'showfunc',
120 coreconfigitem(section, configprefix + 'showfunc',
121 default=False,
121 default=False,
122 )
122 )
123 coreconfigitem(section, configprefix + 'unified',
123 coreconfigitem(section, configprefix + 'unified',
124 default=None,
124 default=None,
125 )
125 )
126 coreconfigitem(section, configprefix + 'git',
126 coreconfigitem(section, configprefix + 'git',
127 default=False,
127 default=False,
128 )
128 )
129 coreconfigitem(section, configprefix + 'ignorews',
129 coreconfigitem(section, configprefix + 'ignorews',
130 default=False,
130 default=False,
131 )
131 )
132 coreconfigitem(section, configprefix + 'ignorewsamount',
132 coreconfigitem(section, configprefix + 'ignorewsamount',
133 default=False,
133 default=False,
134 )
134 )
135 coreconfigitem(section, configprefix + 'ignoreblanklines',
135 coreconfigitem(section, configprefix + 'ignoreblanklines',
136 default=False,
136 default=False,
137 )
137 )
138 coreconfigitem(section, configprefix + 'ignorewseol',
138 coreconfigitem(section, configprefix + 'ignorewseol',
139 default=False,
139 default=False,
140 )
140 )
141 coreconfigitem(section, configprefix + 'nobinary',
141 coreconfigitem(section, configprefix + 'nobinary',
142 default=False,
142 default=False,
143 )
143 )
144 coreconfigitem(section, configprefix + 'noprefix',
144 coreconfigitem(section, configprefix + 'noprefix',
145 default=False,
145 default=False,
146 )
146 )
147 coreconfigitem(section, configprefix + 'word-diff',
147 coreconfigitem(section, configprefix + 'word-diff',
148 default=False,
148 default=False,
149 )
149 )
150
150
151 coreconfigitem('alias', '.*',
151 coreconfigitem('alias', '.*',
152 default=dynamicdefault,
152 default=dynamicdefault,
153 generic=True,
153 generic=True,
154 )
154 )
155 coreconfigitem('auth', 'cookiefile',
155 coreconfigitem('auth', 'cookiefile',
156 default=None,
156 default=None,
157 )
157 )
158 _registerdiffopts(section='annotate')
158 _registerdiffopts(section='annotate')
159 # bookmarks.pushing: internal hack for discovery
159 # bookmarks.pushing: internal hack for discovery
160 coreconfigitem('bookmarks', 'pushing',
160 coreconfigitem('bookmarks', 'pushing',
161 default=list,
161 default=list,
162 )
162 )
163 # bundle.mainreporoot: internal hack for bundlerepo
163 # bundle.mainreporoot: internal hack for bundlerepo
164 coreconfigitem('bundle', 'mainreporoot',
164 coreconfigitem('bundle', 'mainreporoot',
165 default='',
165 default='',
166 )
166 )
167 coreconfigitem('censor', 'policy',
167 coreconfigitem('censor', 'policy',
168 default='abort',
168 default='abort',
169 )
169 )
170 coreconfigitem('chgserver', 'idletimeout',
170 coreconfigitem('chgserver', 'idletimeout',
171 default=3600,
171 default=3600,
172 )
172 )
173 coreconfigitem('chgserver', 'skiphash',
173 coreconfigitem('chgserver', 'skiphash',
174 default=False,
174 default=False,
175 )
175 )
176 coreconfigitem('cmdserver', 'log',
176 coreconfigitem('cmdserver', 'log',
177 default=None,
177 default=None,
178 )
178 )
179 coreconfigitem('cmdserver', 'max-log-files',
179 coreconfigitem('cmdserver', 'max-log-files',
180 default=7,
180 default=7,
181 )
181 )
182 coreconfigitem('cmdserver', 'max-log-size',
182 coreconfigitem('cmdserver', 'max-log-size',
183 default='1 MB',
183 default='1 MB',
184 )
184 )
185 coreconfigitem('cmdserver', 'max-repo-cache',
185 coreconfigitem('cmdserver', 'max-repo-cache',
186 default=0,
186 default=0,
187 )
187 )
188 coreconfigitem('cmdserver', 'message-encodings',
188 coreconfigitem('cmdserver', 'message-encodings',
189 default=list,
189 default=list,
190 )
190 )
191 coreconfigitem('cmdserver', 'track-log',
191 coreconfigitem('cmdserver', 'track-log',
192 default=lambda: ['chgserver', 'cmdserver', 'repocache'],
192 default=lambda: ['chgserver', 'cmdserver', 'repocache'],
193 )
193 )
194 coreconfigitem('color', '.*',
194 coreconfigitem('color', '.*',
195 default=None,
195 default=None,
196 generic=True,
196 generic=True,
197 )
197 )
198 coreconfigitem('color', 'mode',
198 coreconfigitem('color', 'mode',
199 default='auto',
199 default='auto',
200 )
200 )
201 coreconfigitem('color', 'pagermode',
201 coreconfigitem('color', 'pagermode',
202 default=dynamicdefault,
202 default=dynamicdefault,
203 )
203 )
204 _registerdiffopts(section='commands', configprefix='commit.interactive.')
204 _registerdiffopts(section='commands', configprefix='commit.interactive.')
205 coreconfigitem('commands', 'grep.all-files',
205 coreconfigitem('commands', 'grep.all-files',
206 default=False,
206 default=False,
207 )
207 )
208 coreconfigitem('commands', 'resolve.confirm',
208 coreconfigitem('commands', 'resolve.confirm',
209 default=False,
209 default=False,
210 )
210 )
211 coreconfigitem('commands', 'resolve.explicit-re-merge',
211 coreconfigitem('commands', 'resolve.explicit-re-merge',
212 default=False,
212 default=False,
213 )
213 )
214 coreconfigitem('commands', 'resolve.mark-check',
214 coreconfigitem('commands', 'resolve.mark-check',
215 default='none',
215 default='none',
216 )
216 )
217 _registerdiffopts(section='commands', configprefix='revert.interactive.')
217 _registerdiffopts(section='commands', configprefix='revert.interactive.')
218 coreconfigitem('commands', 'show.aliasprefix',
218 coreconfigitem('commands', 'show.aliasprefix',
219 default=list,
219 default=list,
220 )
220 )
221 coreconfigitem('commands', 'status.relative',
221 coreconfigitem('commands', 'status.relative',
222 default=False,
222 default=False,
223 )
223 )
224 coreconfigitem('commands', 'status.skipstates',
224 coreconfigitem('commands', 'status.skipstates',
225 default=[],
225 default=[],
226 )
226 )
227 coreconfigitem('commands', 'status.terse',
227 coreconfigitem('commands', 'status.terse',
228 default='',
228 default='',
229 )
229 )
230 coreconfigitem('commands', 'status.verbose',
230 coreconfigitem('commands', 'status.verbose',
231 default=False,
231 default=False,
232 )
232 )
233 coreconfigitem('commands', 'update.check',
233 coreconfigitem('commands', 'update.check',
234 default=None,
234 default=None,
235 )
235 )
236 coreconfigitem('commands', 'update.requiredest',
236 coreconfigitem('commands', 'update.requiredest',
237 default=False,
237 default=False,
238 )
238 )
239 coreconfigitem('committemplate', '.*',
239 coreconfigitem('committemplate', '.*',
240 default=None,
240 default=None,
241 generic=True,
241 generic=True,
242 )
242 )
243 coreconfigitem('convert', 'bzr.saverev',
243 coreconfigitem('convert', 'bzr.saverev',
244 default=True,
244 default=True,
245 )
245 )
246 coreconfigitem('convert', 'cvsps.cache',
246 coreconfigitem('convert', 'cvsps.cache',
247 default=True,
247 default=True,
248 )
248 )
249 coreconfigitem('convert', 'cvsps.fuzz',
249 coreconfigitem('convert', 'cvsps.fuzz',
250 default=60,
250 default=60,
251 )
251 )
252 coreconfigitem('convert', 'cvsps.logencoding',
252 coreconfigitem('convert', 'cvsps.logencoding',
253 default=None,
253 default=None,
254 )
254 )
255 coreconfigitem('convert', 'cvsps.mergefrom',
255 coreconfigitem('convert', 'cvsps.mergefrom',
256 default=None,
256 default=None,
257 )
257 )
258 coreconfigitem('convert', 'cvsps.mergeto',
258 coreconfigitem('convert', 'cvsps.mergeto',
259 default=None,
259 default=None,
260 )
260 )
261 coreconfigitem('convert', 'git.committeractions',
261 coreconfigitem('convert', 'git.committeractions',
262 default=lambda: ['messagedifferent'],
262 default=lambda: ['messagedifferent'],
263 )
263 )
264 coreconfigitem('convert', 'git.extrakeys',
264 coreconfigitem('convert', 'git.extrakeys',
265 default=list,
265 default=list,
266 )
266 )
267 coreconfigitem('convert', 'git.findcopiesharder',
267 coreconfigitem('convert', 'git.findcopiesharder',
268 default=False,
268 default=False,
269 )
269 )
270 coreconfigitem('convert', 'git.remoteprefix',
270 coreconfigitem('convert', 'git.remoteprefix',
271 default='remote',
271 default='remote',
272 )
272 )
273 coreconfigitem('convert', 'git.renamelimit',
273 coreconfigitem('convert', 'git.renamelimit',
274 default=400,
274 default=400,
275 )
275 )
276 coreconfigitem('convert', 'git.saverev',
276 coreconfigitem('convert', 'git.saverev',
277 default=True,
277 default=True,
278 )
278 )
279 coreconfigitem('convert', 'git.similarity',
279 coreconfigitem('convert', 'git.similarity',
280 default=50,
280 default=50,
281 )
281 )
282 coreconfigitem('convert', 'git.skipsubmodules',
282 coreconfigitem('convert', 'git.skipsubmodules',
283 default=False,
283 default=False,
284 )
284 )
285 coreconfigitem('convert', 'hg.clonebranches',
285 coreconfigitem('convert', 'hg.clonebranches',
286 default=False,
286 default=False,
287 )
287 )
288 coreconfigitem('convert', 'hg.ignoreerrors',
288 coreconfigitem('convert', 'hg.ignoreerrors',
289 default=False,
289 default=False,
290 )
290 )
291 coreconfigitem('convert', 'hg.revs',
291 coreconfigitem('convert', 'hg.revs',
292 default=None,
292 default=None,
293 )
293 )
294 coreconfigitem('convert', 'hg.saverev',
294 coreconfigitem('convert', 'hg.saverev',
295 default=False,
295 default=False,
296 )
296 )
297 coreconfigitem('convert', 'hg.sourcename',
297 coreconfigitem('convert', 'hg.sourcename',
298 default=None,
298 default=None,
299 )
299 )
300 coreconfigitem('convert', 'hg.startrev',
300 coreconfigitem('convert', 'hg.startrev',
301 default=None,
301 default=None,
302 )
302 )
303 coreconfigitem('convert', 'hg.tagsbranch',
303 coreconfigitem('convert', 'hg.tagsbranch',
304 default='default',
304 default='default',
305 )
305 )
306 coreconfigitem('convert', 'hg.usebranchnames',
306 coreconfigitem('convert', 'hg.usebranchnames',
307 default=True,
307 default=True,
308 )
308 )
309 coreconfigitem('convert', 'ignoreancestorcheck',
309 coreconfigitem('convert', 'ignoreancestorcheck',
310 default=False,
310 default=False,
311 )
311 )
312 coreconfigitem('convert', 'localtimezone',
312 coreconfigitem('convert', 'localtimezone',
313 default=False,
313 default=False,
314 )
314 )
315 coreconfigitem('convert', 'p4.encoding',
315 coreconfigitem('convert', 'p4.encoding',
316 default=dynamicdefault,
316 default=dynamicdefault,
317 )
317 )
318 coreconfigitem('convert', 'p4.startrev',
318 coreconfigitem('convert', 'p4.startrev',
319 default=0,
319 default=0,
320 )
320 )
321 coreconfigitem('convert', 'skiptags',
321 coreconfigitem('convert', 'skiptags',
322 default=False,
322 default=False,
323 )
323 )
324 coreconfigitem('convert', 'svn.debugsvnlog',
324 coreconfigitem('convert', 'svn.debugsvnlog',
325 default=True,
325 default=True,
326 )
326 )
327 coreconfigitem('convert', 'svn.trunk',
327 coreconfigitem('convert', 'svn.trunk',
328 default=None,
328 default=None,
329 )
329 )
330 coreconfigitem('convert', 'svn.tags',
330 coreconfigitem('convert', 'svn.tags',
331 default=None,
331 default=None,
332 )
332 )
333 coreconfigitem('convert', 'svn.branches',
333 coreconfigitem('convert', 'svn.branches',
334 default=None,
334 default=None,
335 )
335 )
336 coreconfigitem('convert', 'svn.startrev',
336 coreconfigitem('convert', 'svn.startrev',
337 default=0,
337 default=0,
338 )
338 )
339 coreconfigitem('debug', 'dirstate.delaywrite',
339 coreconfigitem('debug', 'dirstate.delaywrite',
340 default=0,
340 default=0,
341 )
341 )
342 coreconfigitem('defaults', '.*',
342 coreconfigitem('defaults', '.*',
343 default=None,
343 default=None,
344 generic=True,
344 generic=True,
345 )
345 )
346 coreconfigitem('devel', 'all-warnings',
346 coreconfigitem('devel', 'all-warnings',
347 default=False,
347 default=False,
348 )
348 )
349 coreconfigitem('devel', 'bundle2.debug',
349 coreconfigitem('devel', 'bundle2.debug',
350 default=False,
350 default=False,
351 )
351 )
352 coreconfigitem('devel', 'bundle.delta',
352 coreconfigitem('devel', 'bundle.delta',
353 default='',
353 default='',
354 )
354 )
355 coreconfigitem('devel', 'cache-vfs',
355 coreconfigitem('devel', 'cache-vfs',
356 default=None,
356 default=None,
357 )
357 )
358 coreconfigitem('devel', 'check-locks',
358 coreconfigitem('devel', 'check-locks',
359 default=False,
359 default=False,
360 )
360 )
361 coreconfigitem('devel', 'check-relroot',
361 coreconfigitem('devel', 'check-relroot',
362 default=False,
362 default=False,
363 )
363 )
364 coreconfigitem('devel', 'default-date',
364 coreconfigitem('devel', 'default-date',
365 default=None,
365 default=None,
366 )
366 )
367 coreconfigitem('devel', 'deprec-warn',
367 coreconfigitem('devel', 'deprec-warn',
368 default=False,
368 default=False,
369 )
369 )
370 coreconfigitem('devel', 'disableloaddefaultcerts',
370 coreconfigitem('devel', 'disableloaddefaultcerts',
371 default=False,
371 default=False,
372 )
372 )
373 coreconfigitem('devel', 'warn-empty-changegroup',
373 coreconfigitem('devel', 'warn-empty-changegroup',
374 default=False,
374 default=False,
375 )
375 )
376 coreconfigitem('devel', 'legacy.exchange',
376 coreconfigitem('devel', 'legacy.exchange',
377 default=list,
377 default=list,
378 )
378 )
379 coreconfigitem('devel', 'servercafile',
379 coreconfigitem('devel', 'servercafile',
380 default='',
380 default='',
381 )
381 )
382 coreconfigitem('devel', 'serverexactprotocol',
382 coreconfigitem('devel', 'serverexactprotocol',
383 default='',
383 default='',
384 )
384 )
385 coreconfigitem('devel', 'serverrequirecert',
385 coreconfigitem('devel', 'serverrequirecert',
386 default=False,
386 default=False,
387 )
387 )
388 coreconfigitem('devel', 'strip-obsmarkers',
388 coreconfigitem('devel', 'strip-obsmarkers',
389 default=True,
389 default=True,
390 )
390 )
391 coreconfigitem('devel', 'warn-config',
391 coreconfigitem('devel', 'warn-config',
392 default=None,
392 default=None,
393 )
393 )
394 coreconfigitem('devel', 'warn-config-default',
394 coreconfigitem('devel', 'warn-config-default',
395 default=None,
395 default=None,
396 )
396 )
397 coreconfigitem('devel', 'user.obsmarker',
397 coreconfigitem('devel', 'user.obsmarker',
398 default=None,
398 default=None,
399 )
399 )
400 coreconfigitem('devel', 'warn-config-unknown',
400 coreconfigitem('devel', 'warn-config-unknown',
401 default=None,
401 default=None,
402 )
402 )
403 coreconfigitem('devel', 'debug.copies',
403 coreconfigitem('devel', 'debug.copies',
404 default=False,
404 default=False,
405 )
405 )
406 coreconfigitem('devel', 'debug.extensions',
406 coreconfigitem('devel', 'debug.extensions',
407 default=False,
407 default=False,
408 )
408 )
409 coreconfigitem('devel', 'debug.peer-request',
409 coreconfigitem('devel', 'debug.peer-request',
410 default=False,
410 default=False,
411 )
411 )
412 _registerdiffopts(section='diff')
412 _registerdiffopts(section='diff')
413 coreconfigitem('email', 'bcc',
413 coreconfigitem('email', 'bcc',
414 default=None,
414 default=None,
415 )
415 )
416 coreconfigitem('email', 'cc',
416 coreconfigitem('email', 'cc',
417 default=None,
417 default=None,
418 )
418 )
419 coreconfigitem('email', 'charsets',
419 coreconfigitem('email', 'charsets',
420 default=list,
420 default=list,
421 )
421 )
422 coreconfigitem('email', 'from',
422 coreconfigitem('email', 'from',
423 default=None,
423 default=None,
424 )
424 )
425 coreconfigitem('email', 'method',
425 coreconfigitem('email', 'method',
426 default='smtp',
426 default='smtp',
427 )
427 )
428 coreconfigitem('email', 'reply-to',
428 coreconfigitem('email', 'reply-to',
429 default=None,
429 default=None,
430 )
430 )
431 coreconfigitem('email', 'to',
431 coreconfigitem('email', 'to',
432 default=None,
432 default=None,
433 )
433 )
434 coreconfigitem('experimental', 'archivemetatemplate',
434 coreconfigitem('experimental', 'archivemetatemplate',
435 default=dynamicdefault,
435 default=dynamicdefault,
436 )
436 )
437 coreconfigitem('experimental', 'auto-publish',
437 coreconfigitem('experimental', 'auto-publish',
438 default='publish',
438 default='publish',
439 )
439 )
440 coreconfigitem('experimental', 'bundle-phases',
440 coreconfigitem('experimental', 'bundle-phases',
441 default=False,
441 default=False,
442 )
442 )
443 coreconfigitem('experimental', 'bundle2-advertise',
443 coreconfigitem('experimental', 'bundle2-advertise',
444 default=True,
444 default=True,
445 )
445 )
446 coreconfigitem('experimental', 'bundle2-output-capture',
446 coreconfigitem('experimental', 'bundle2-output-capture',
447 default=False,
447 default=False,
448 )
448 )
449 coreconfigitem('experimental', 'bundle2.pushback',
449 coreconfigitem('experimental', 'bundle2.pushback',
450 default=False,
450 default=False,
451 )
451 )
452 coreconfigitem('experimental', 'bundle2lazylocking',
452 coreconfigitem('experimental', 'bundle2lazylocking',
453 default=False,
453 default=False,
454 )
454 )
455 coreconfigitem('experimental', 'bundlecomplevel',
455 coreconfigitem('experimental', 'bundlecomplevel',
456 default=None,
456 default=None,
457 )
457 )
458 coreconfigitem('experimental', 'bundlecomplevel.bzip2',
458 coreconfigitem('experimental', 'bundlecomplevel.bzip2',
459 default=None,
459 default=None,
460 )
460 )
461 coreconfigitem('experimental', 'bundlecomplevel.gzip',
461 coreconfigitem('experimental', 'bundlecomplevel.gzip',
462 default=None,
462 default=None,
463 )
463 )
464 coreconfigitem('experimental', 'bundlecomplevel.none',
464 coreconfigitem('experimental', 'bundlecomplevel.none',
465 default=None,
465 default=None,
466 )
466 )
467 coreconfigitem('experimental', 'bundlecomplevel.zstd',
467 coreconfigitem('experimental', 'bundlecomplevel.zstd',
468 default=None,
468 default=None,
469 )
469 )
470 coreconfigitem('experimental', 'changegroup3',
470 coreconfigitem('experimental', 'changegroup3',
471 default=False,
471 default=False,
472 )
472 )
473 coreconfigitem('experimental', 'cleanup-as-archived',
474 default=False,
475 )
473 coreconfigitem('experimental', 'clientcompressionengines',
476 coreconfigitem('experimental', 'clientcompressionengines',
474 default=list,
477 default=list,
475 )
478 )
476 coreconfigitem('experimental', 'copytrace',
479 coreconfigitem('experimental', 'copytrace',
477 default='on',
480 default='on',
478 )
481 )
479 coreconfigitem('experimental', 'copytrace.movecandidateslimit',
482 coreconfigitem('experimental', 'copytrace.movecandidateslimit',
480 default=100,
483 default=100,
481 )
484 )
482 coreconfigitem('experimental', 'copytrace.sourcecommitlimit',
485 coreconfigitem('experimental', 'copytrace.sourcecommitlimit',
483 default=100,
486 default=100,
484 )
487 )
485 coreconfigitem('experimental', 'copies.read-from',
488 coreconfigitem('experimental', 'copies.read-from',
486 default="filelog-only",
489 default="filelog-only",
487 )
490 )
488 coreconfigitem('experimental', 'crecordtest',
491 coreconfigitem('experimental', 'crecordtest',
489 default=None,
492 default=None,
490 )
493 )
491 coreconfigitem('experimental', 'directaccess',
494 coreconfigitem('experimental', 'directaccess',
492 default=False,
495 default=False,
493 )
496 )
494 coreconfigitem('experimental', 'directaccess.revnums',
497 coreconfigitem('experimental', 'directaccess.revnums',
495 default=False,
498 default=False,
496 )
499 )
497 coreconfigitem('experimental', 'editortmpinhg',
500 coreconfigitem('experimental', 'editortmpinhg',
498 default=False,
501 default=False,
499 )
502 )
500 coreconfigitem('experimental', 'evolution',
503 coreconfigitem('experimental', 'evolution',
501 default=list,
504 default=list,
502 )
505 )
503 coreconfigitem('experimental', 'evolution.allowdivergence',
506 coreconfigitem('experimental', 'evolution.allowdivergence',
504 default=False,
507 default=False,
505 alias=[('experimental', 'allowdivergence')]
508 alias=[('experimental', 'allowdivergence')]
506 )
509 )
507 coreconfigitem('experimental', 'evolution.allowunstable',
510 coreconfigitem('experimental', 'evolution.allowunstable',
508 default=None,
511 default=None,
509 )
512 )
510 coreconfigitem('experimental', 'evolution.createmarkers',
513 coreconfigitem('experimental', 'evolution.createmarkers',
511 default=None,
514 default=None,
512 )
515 )
513 coreconfigitem('experimental', 'evolution.effect-flags',
516 coreconfigitem('experimental', 'evolution.effect-flags',
514 default=True,
517 default=True,
515 alias=[('experimental', 'effect-flags')]
518 alias=[('experimental', 'effect-flags')]
516 )
519 )
517 coreconfigitem('experimental', 'evolution.exchange',
520 coreconfigitem('experimental', 'evolution.exchange',
518 default=None,
521 default=None,
519 )
522 )
520 coreconfigitem('experimental', 'evolution.bundle-obsmarker',
523 coreconfigitem('experimental', 'evolution.bundle-obsmarker',
521 default=False,
524 default=False,
522 )
525 )
523 coreconfigitem('experimental', 'evolution.report-instabilities',
526 coreconfigitem('experimental', 'evolution.report-instabilities',
524 default=True,
527 default=True,
525 )
528 )
526 coreconfigitem('experimental', 'evolution.track-operation',
529 coreconfigitem('experimental', 'evolution.track-operation',
527 default=True,
530 default=True,
528 )
531 )
529 coreconfigitem('experimental', 'maxdeltachainspan',
532 coreconfigitem('experimental', 'maxdeltachainspan',
530 default=-1,
533 default=-1,
531 )
534 )
532 coreconfigitem('experimental', 'mergetempdirprefix',
535 coreconfigitem('experimental', 'mergetempdirprefix',
533 default=None,
536 default=None,
534 )
537 )
535 coreconfigitem('experimental', 'mmapindexthreshold',
538 coreconfigitem('experimental', 'mmapindexthreshold',
536 default=None,
539 default=None,
537 )
540 )
538 coreconfigitem('experimental', 'narrow',
541 coreconfigitem('experimental', 'narrow',
539 default=False,
542 default=False,
540 )
543 )
541 coreconfigitem('experimental', 'nonnormalparanoidcheck',
544 coreconfigitem('experimental', 'nonnormalparanoidcheck',
542 default=False,
545 default=False,
543 )
546 )
544 coreconfigitem('experimental', 'exportableenviron',
547 coreconfigitem('experimental', 'exportableenviron',
545 default=list,
548 default=list,
546 )
549 )
547 coreconfigitem('experimental', 'extendedheader.index',
550 coreconfigitem('experimental', 'extendedheader.index',
548 default=None,
551 default=None,
549 )
552 )
550 coreconfigitem('experimental', 'extendedheader.similarity',
553 coreconfigitem('experimental', 'extendedheader.similarity',
551 default=False,
554 default=False,
552 )
555 )
553 coreconfigitem('experimental', 'format.compression',
556 coreconfigitem('experimental', 'format.compression',
554 default='zlib',
557 default='zlib',
555 )
558 )
556 coreconfigitem('experimental', 'graphshorten',
559 coreconfigitem('experimental', 'graphshorten',
557 default=False,
560 default=False,
558 )
561 )
559 coreconfigitem('experimental', 'graphstyle.parent',
562 coreconfigitem('experimental', 'graphstyle.parent',
560 default=dynamicdefault,
563 default=dynamicdefault,
561 )
564 )
562 coreconfigitem('experimental', 'graphstyle.missing',
565 coreconfigitem('experimental', 'graphstyle.missing',
563 default=dynamicdefault,
566 default=dynamicdefault,
564 )
567 )
565 coreconfigitem('experimental', 'graphstyle.grandparent',
568 coreconfigitem('experimental', 'graphstyle.grandparent',
566 default=dynamicdefault,
569 default=dynamicdefault,
567 )
570 )
568 coreconfigitem('experimental', 'hook-track-tags',
571 coreconfigitem('experimental', 'hook-track-tags',
569 default=False,
572 default=False,
570 )
573 )
571 coreconfigitem('experimental', 'httppeer.advertise-v2',
574 coreconfigitem('experimental', 'httppeer.advertise-v2',
572 default=False,
575 default=False,
573 )
576 )
574 coreconfigitem('experimental', 'httppeer.v2-encoder-order',
577 coreconfigitem('experimental', 'httppeer.v2-encoder-order',
575 default=None,
578 default=None,
576 )
579 )
577 coreconfigitem('experimental', 'httppostargs',
580 coreconfigitem('experimental', 'httppostargs',
578 default=False,
581 default=False,
579 )
582 )
580 coreconfigitem('experimental', 'mergedriver',
583 coreconfigitem('experimental', 'mergedriver',
581 default=None,
584 default=None,
582 )
585 )
583 coreconfigitem('experimental', 'nointerrupt', default=False)
586 coreconfigitem('experimental', 'nointerrupt', default=False)
584 coreconfigitem('experimental', 'nointerrupt-interactiveonly', default=True)
587 coreconfigitem('experimental', 'nointerrupt-interactiveonly', default=True)
585
588
586 coreconfigitem('experimental', 'obsmarkers-exchange-debug',
589 coreconfigitem('experimental', 'obsmarkers-exchange-debug',
587 default=False,
590 default=False,
588 )
591 )
589 coreconfigitem('experimental', 'remotenames',
592 coreconfigitem('experimental', 'remotenames',
590 default=False,
593 default=False,
591 )
594 )
592 coreconfigitem('experimental', 'removeemptydirs',
595 coreconfigitem('experimental', 'removeemptydirs',
593 default=True,
596 default=True,
594 )
597 )
595 coreconfigitem('experimental', 'revisions.prefixhexnode',
598 coreconfigitem('experimental', 'revisions.prefixhexnode',
596 default=False,
599 default=False,
597 )
600 )
598 coreconfigitem('experimental', 'revlogv2',
601 coreconfigitem('experimental', 'revlogv2',
599 default=None,
602 default=None,
600 )
603 )
601 coreconfigitem('experimental', 'revisions.disambiguatewithin',
604 coreconfigitem('experimental', 'revisions.disambiguatewithin',
602 default=None,
605 default=None,
603 )
606 )
604 coreconfigitem('experimental', 'server.filesdata.recommended-batch-size',
607 coreconfigitem('experimental', 'server.filesdata.recommended-batch-size',
605 default=50000,
608 default=50000,
606 )
609 )
607 coreconfigitem('experimental', 'server.manifestdata.recommended-batch-size',
610 coreconfigitem('experimental', 'server.manifestdata.recommended-batch-size',
608 default=100000,
611 default=100000,
609 )
612 )
610 coreconfigitem('experimental', 'server.stream-narrow-clones',
613 coreconfigitem('experimental', 'server.stream-narrow-clones',
611 default=False,
614 default=False,
612 )
615 )
613 coreconfigitem('experimental', 'single-head-per-branch',
616 coreconfigitem('experimental', 'single-head-per-branch',
614 default=False,
617 default=False,
615 )
618 )
616 coreconfigitem('experimental', 'sshserver.support-v2',
619 coreconfigitem('experimental', 'sshserver.support-v2',
617 default=False,
620 default=False,
618 )
621 )
619 coreconfigitem('experimental', 'sparse-read',
622 coreconfigitem('experimental', 'sparse-read',
620 default=False,
623 default=False,
621 )
624 )
622 coreconfigitem('experimental', 'sparse-read.density-threshold',
625 coreconfigitem('experimental', 'sparse-read.density-threshold',
623 default=0.50,
626 default=0.50,
624 )
627 )
625 coreconfigitem('experimental', 'sparse-read.min-gap-size',
628 coreconfigitem('experimental', 'sparse-read.min-gap-size',
626 default='65K',
629 default='65K',
627 )
630 )
628 coreconfigitem('experimental', 'treemanifest',
631 coreconfigitem('experimental', 'treemanifest',
629 default=False,
632 default=False,
630 )
633 )
631 coreconfigitem('experimental', 'update.atomic-file',
634 coreconfigitem('experimental', 'update.atomic-file',
632 default=False,
635 default=False,
633 )
636 )
634 coreconfigitem('experimental', 'sshpeer.advertise-v2',
637 coreconfigitem('experimental', 'sshpeer.advertise-v2',
635 default=False,
638 default=False,
636 )
639 )
637 coreconfigitem('experimental', 'web.apiserver',
640 coreconfigitem('experimental', 'web.apiserver',
638 default=False,
641 default=False,
639 )
642 )
640 coreconfigitem('experimental', 'web.api.http-v2',
643 coreconfigitem('experimental', 'web.api.http-v2',
641 default=False,
644 default=False,
642 )
645 )
643 coreconfigitem('experimental', 'web.api.debugreflect',
646 coreconfigitem('experimental', 'web.api.debugreflect',
644 default=False,
647 default=False,
645 )
648 )
646 coreconfigitem('experimental', 'worker.wdir-get-thread-safe',
649 coreconfigitem('experimental', 'worker.wdir-get-thread-safe',
647 default=False,
650 default=False,
648 )
651 )
649 coreconfigitem('experimental', 'xdiff',
652 coreconfigitem('experimental', 'xdiff',
650 default=False,
653 default=False,
651 )
654 )
652 coreconfigitem('extensions', '.*',
655 coreconfigitem('extensions', '.*',
653 default=None,
656 default=None,
654 generic=True,
657 generic=True,
655 )
658 )
656 coreconfigitem('extdata', '.*',
659 coreconfigitem('extdata', '.*',
657 default=None,
660 default=None,
658 generic=True,
661 generic=True,
659 )
662 )
660 coreconfigitem('format', 'chunkcachesize',
663 coreconfigitem('format', 'chunkcachesize',
661 default=None,
664 default=None,
662 )
665 )
663 coreconfigitem('format', 'dotencode',
666 coreconfigitem('format', 'dotencode',
664 default=True,
667 default=True,
665 )
668 )
666 coreconfigitem('format', 'generaldelta',
669 coreconfigitem('format', 'generaldelta',
667 default=False,
670 default=False,
668 )
671 )
669 coreconfigitem('format', 'manifestcachesize',
672 coreconfigitem('format', 'manifestcachesize',
670 default=None,
673 default=None,
671 )
674 )
672 coreconfigitem('format', 'maxchainlen',
675 coreconfigitem('format', 'maxchainlen',
673 default=dynamicdefault,
676 default=dynamicdefault,
674 )
677 )
675 coreconfigitem('format', 'obsstore-version',
678 coreconfigitem('format', 'obsstore-version',
676 default=None,
679 default=None,
677 )
680 )
678 coreconfigitem('format', 'sparse-revlog',
681 coreconfigitem('format', 'sparse-revlog',
679 default=True,
682 default=True,
680 )
683 )
681 coreconfigitem('format', 'usefncache',
684 coreconfigitem('format', 'usefncache',
682 default=True,
685 default=True,
683 )
686 )
684 coreconfigitem('format', 'usegeneraldelta',
687 coreconfigitem('format', 'usegeneraldelta',
685 default=True,
688 default=True,
686 )
689 )
687 coreconfigitem('format', 'usestore',
690 coreconfigitem('format', 'usestore',
688 default=True,
691 default=True,
689 )
692 )
690 coreconfigitem('format', 'internal-phase',
693 coreconfigitem('format', 'internal-phase',
691 default=False,
694 default=False,
692 )
695 )
693 coreconfigitem('fsmonitor', 'warn_when_unused',
696 coreconfigitem('fsmonitor', 'warn_when_unused',
694 default=True,
697 default=True,
695 )
698 )
696 coreconfigitem('fsmonitor', 'warn_update_file_count',
699 coreconfigitem('fsmonitor', 'warn_update_file_count',
697 default=50000,
700 default=50000,
698 )
701 )
699 coreconfigitem('help', br'hidden-command\..*',
702 coreconfigitem('help', br'hidden-command\..*',
700 default=False,
703 default=False,
701 generic=True,
704 generic=True,
702 )
705 )
703 coreconfigitem('help', br'hidden-topic\..*',
706 coreconfigitem('help', br'hidden-topic\..*',
704 default=False,
707 default=False,
705 generic=True,
708 generic=True,
706 )
709 )
707 coreconfigitem('hooks', '.*',
710 coreconfigitem('hooks', '.*',
708 default=dynamicdefault,
711 default=dynamicdefault,
709 generic=True,
712 generic=True,
710 )
713 )
711 coreconfigitem('hgweb-paths', '.*',
714 coreconfigitem('hgweb-paths', '.*',
712 default=list,
715 default=list,
713 generic=True,
716 generic=True,
714 )
717 )
715 coreconfigitem('hostfingerprints', '.*',
718 coreconfigitem('hostfingerprints', '.*',
716 default=list,
719 default=list,
717 generic=True,
720 generic=True,
718 )
721 )
719 coreconfigitem('hostsecurity', 'ciphers',
722 coreconfigitem('hostsecurity', 'ciphers',
720 default=None,
723 default=None,
721 )
724 )
722 coreconfigitem('hostsecurity', 'disabletls10warning',
725 coreconfigitem('hostsecurity', 'disabletls10warning',
723 default=False,
726 default=False,
724 )
727 )
725 coreconfigitem('hostsecurity', 'minimumprotocol',
728 coreconfigitem('hostsecurity', 'minimumprotocol',
726 default=dynamicdefault,
729 default=dynamicdefault,
727 )
730 )
728 coreconfigitem('hostsecurity', '.*:minimumprotocol$',
731 coreconfigitem('hostsecurity', '.*:minimumprotocol$',
729 default=dynamicdefault,
732 default=dynamicdefault,
730 generic=True,
733 generic=True,
731 )
734 )
732 coreconfigitem('hostsecurity', '.*:ciphers$',
735 coreconfigitem('hostsecurity', '.*:ciphers$',
733 default=dynamicdefault,
736 default=dynamicdefault,
734 generic=True,
737 generic=True,
735 )
738 )
736 coreconfigitem('hostsecurity', '.*:fingerprints$',
739 coreconfigitem('hostsecurity', '.*:fingerprints$',
737 default=list,
740 default=list,
738 generic=True,
741 generic=True,
739 )
742 )
740 coreconfigitem('hostsecurity', '.*:verifycertsfile$',
743 coreconfigitem('hostsecurity', '.*:verifycertsfile$',
741 default=None,
744 default=None,
742 generic=True,
745 generic=True,
743 )
746 )
744
747
745 coreconfigitem('http_proxy', 'always',
748 coreconfigitem('http_proxy', 'always',
746 default=False,
749 default=False,
747 )
750 )
748 coreconfigitem('http_proxy', 'host',
751 coreconfigitem('http_proxy', 'host',
749 default=None,
752 default=None,
750 )
753 )
751 coreconfigitem('http_proxy', 'no',
754 coreconfigitem('http_proxy', 'no',
752 default=list,
755 default=list,
753 )
756 )
754 coreconfigitem('http_proxy', 'passwd',
757 coreconfigitem('http_proxy', 'passwd',
755 default=None,
758 default=None,
756 )
759 )
757 coreconfigitem('http_proxy', 'user',
760 coreconfigitem('http_proxy', 'user',
758 default=None,
761 default=None,
759 )
762 )
760
763
761 coreconfigitem('http', 'timeout',
764 coreconfigitem('http', 'timeout',
762 default=None,
765 default=None,
763 )
766 )
764
767
765 coreconfigitem('logtoprocess', 'commandexception',
768 coreconfigitem('logtoprocess', 'commandexception',
766 default=None,
769 default=None,
767 )
770 )
768 coreconfigitem('logtoprocess', 'commandfinish',
771 coreconfigitem('logtoprocess', 'commandfinish',
769 default=None,
772 default=None,
770 )
773 )
771 coreconfigitem('logtoprocess', 'command',
774 coreconfigitem('logtoprocess', 'command',
772 default=None,
775 default=None,
773 )
776 )
774 coreconfigitem('logtoprocess', 'develwarn',
777 coreconfigitem('logtoprocess', 'develwarn',
775 default=None,
778 default=None,
776 )
779 )
777 coreconfigitem('logtoprocess', 'uiblocked',
780 coreconfigitem('logtoprocess', 'uiblocked',
778 default=None,
781 default=None,
779 )
782 )
780 coreconfigitem('merge', 'checkunknown',
783 coreconfigitem('merge', 'checkunknown',
781 default='abort',
784 default='abort',
782 )
785 )
783 coreconfigitem('merge', 'checkignored',
786 coreconfigitem('merge', 'checkignored',
784 default='abort',
787 default='abort',
785 )
788 )
786 coreconfigitem('experimental', 'merge.checkpathconflicts',
789 coreconfigitem('experimental', 'merge.checkpathconflicts',
787 default=False,
790 default=False,
788 )
791 )
789 coreconfigitem('merge', 'followcopies',
792 coreconfigitem('merge', 'followcopies',
790 default=True,
793 default=True,
791 )
794 )
792 coreconfigitem('merge', 'on-failure',
795 coreconfigitem('merge', 'on-failure',
793 default='continue',
796 default='continue',
794 )
797 )
795 coreconfigitem('merge', 'preferancestor',
798 coreconfigitem('merge', 'preferancestor',
796 default=lambda: ['*'],
799 default=lambda: ['*'],
797 )
800 )
798 coreconfigitem('merge', 'strict-capability-check',
801 coreconfigitem('merge', 'strict-capability-check',
799 default=False,
802 default=False,
800 )
803 )
801 coreconfigitem('merge-tools', '.*',
804 coreconfigitem('merge-tools', '.*',
802 default=None,
805 default=None,
803 generic=True,
806 generic=True,
804 )
807 )
805 coreconfigitem('merge-tools', br'.*\.args$',
808 coreconfigitem('merge-tools', br'.*\.args$',
806 default="$local $base $other",
809 default="$local $base $other",
807 generic=True,
810 generic=True,
808 priority=-1,
811 priority=-1,
809 )
812 )
810 coreconfigitem('merge-tools', br'.*\.binary$',
813 coreconfigitem('merge-tools', br'.*\.binary$',
811 default=False,
814 default=False,
812 generic=True,
815 generic=True,
813 priority=-1,
816 priority=-1,
814 )
817 )
815 coreconfigitem('merge-tools', br'.*\.check$',
818 coreconfigitem('merge-tools', br'.*\.check$',
816 default=list,
819 default=list,
817 generic=True,
820 generic=True,
818 priority=-1,
821 priority=-1,
819 )
822 )
820 coreconfigitem('merge-tools', br'.*\.checkchanged$',
823 coreconfigitem('merge-tools', br'.*\.checkchanged$',
821 default=False,
824 default=False,
822 generic=True,
825 generic=True,
823 priority=-1,
826 priority=-1,
824 )
827 )
825 coreconfigitem('merge-tools', br'.*\.executable$',
828 coreconfigitem('merge-tools', br'.*\.executable$',
826 default=dynamicdefault,
829 default=dynamicdefault,
827 generic=True,
830 generic=True,
828 priority=-1,
831 priority=-1,
829 )
832 )
830 coreconfigitem('merge-tools', br'.*\.fixeol$',
833 coreconfigitem('merge-tools', br'.*\.fixeol$',
831 default=False,
834 default=False,
832 generic=True,
835 generic=True,
833 priority=-1,
836 priority=-1,
834 )
837 )
835 coreconfigitem('merge-tools', br'.*\.gui$',
838 coreconfigitem('merge-tools', br'.*\.gui$',
836 default=False,
839 default=False,
837 generic=True,
840 generic=True,
838 priority=-1,
841 priority=-1,
839 )
842 )
840 coreconfigitem('merge-tools', br'.*\.mergemarkers$',
843 coreconfigitem('merge-tools', br'.*\.mergemarkers$',
841 default='basic',
844 default='basic',
842 generic=True,
845 generic=True,
843 priority=-1,
846 priority=-1,
844 )
847 )
845 coreconfigitem('merge-tools', br'.*\.mergemarkertemplate$',
848 coreconfigitem('merge-tools', br'.*\.mergemarkertemplate$',
846 default=dynamicdefault, # take from ui.mergemarkertemplate
849 default=dynamicdefault, # take from ui.mergemarkertemplate
847 generic=True,
850 generic=True,
848 priority=-1,
851 priority=-1,
849 )
852 )
850 coreconfigitem('merge-tools', br'.*\.priority$',
853 coreconfigitem('merge-tools', br'.*\.priority$',
851 default=0,
854 default=0,
852 generic=True,
855 generic=True,
853 priority=-1,
856 priority=-1,
854 )
857 )
855 coreconfigitem('merge-tools', br'.*\.premerge$',
858 coreconfigitem('merge-tools', br'.*\.premerge$',
856 default=dynamicdefault,
859 default=dynamicdefault,
857 generic=True,
860 generic=True,
858 priority=-1,
861 priority=-1,
859 )
862 )
860 coreconfigitem('merge-tools', br'.*\.symlink$',
863 coreconfigitem('merge-tools', br'.*\.symlink$',
861 default=False,
864 default=False,
862 generic=True,
865 generic=True,
863 priority=-1,
866 priority=-1,
864 )
867 )
865 coreconfigitem('pager', 'attend-.*',
868 coreconfigitem('pager', 'attend-.*',
866 default=dynamicdefault,
869 default=dynamicdefault,
867 generic=True,
870 generic=True,
868 )
871 )
869 coreconfigitem('pager', 'ignore',
872 coreconfigitem('pager', 'ignore',
870 default=list,
873 default=list,
871 )
874 )
872 coreconfigitem('pager', 'pager',
875 coreconfigitem('pager', 'pager',
873 default=dynamicdefault,
876 default=dynamicdefault,
874 )
877 )
875 coreconfigitem('patch', 'eol',
878 coreconfigitem('patch', 'eol',
876 default='strict',
879 default='strict',
877 )
880 )
878 coreconfigitem('patch', 'fuzz',
881 coreconfigitem('patch', 'fuzz',
879 default=2,
882 default=2,
880 )
883 )
881 coreconfigitem('paths', 'default',
884 coreconfigitem('paths', 'default',
882 default=None,
885 default=None,
883 )
886 )
884 coreconfigitem('paths', 'default-push',
887 coreconfigitem('paths', 'default-push',
885 default=None,
888 default=None,
886 )
889 )
887 coreconfigitem('paths', '.*',
890 coreconfigitem('paths', '.*',
888 default=None,
891 default=None,
889 generic=True,
892 generic=True,
890 )
893 )
891 coreconfigitem('phases', 'checksubrepos',
894 coreconfigitem('phases', 'checksubrepos',
892 default='follow',
895 default='follow',
893 )
896 )
894 coreconfigitem('phases', 'new-commit',
897 coreconfigitem('phases', 'new-commit',
895 default='draft',
898 default='draft',
896 )
899 )
897 coreconfigitem('phases', 'publish',
900 coreconfigitem('phases', 'publish',
898 default=True,
901 default=True,
899 )
902 )
900 coreconfigitem('profiling', 'enabled',
903 coreconfigitem('profiling', 'enabled',
901 default=False,
904 default=False,
902 )
905 )
903 coreconfigitem('profiling', 'format',
906 coreconfigitem('profiling', 'format',
904 default='text',
907 default='text',
905 )
908 )
906 coreconfigitem('profiling', 'freq',
909 coreconfigitem('profiling', 'freq',
907 default=1000,
910 default=1000,
908 )
911 )
909 coreconfigitem('profiling', 'limit',
912 coreconfigitem('profiling', 'limit',
910 default=30,
913 default=30,
911 )
914 )
912 coreconfigitem('profiling', 'nested',
915 coreconfigitem('profiling', 'nested',
913 default=0,
916 default=0,
914 )
917 )
915 coreconfigitem('profiling', 'output',
918 coreconfigitem('profiling', 'output',
916 default=None,
919 default=None,
917 )
920 )
918 coreconfigitem('profiling', 'showmax',
921 coreconfigitem('profiling', 'showmax',
919 default=0.999,
922 default=0.999,
920 )
923 )
921 coreconfigitem('profiling', 'showmin',
924 coreconfigitem('profiling', 'showmin',
922 default=dynamicdefault,
925 default=dynamicdefault,
923 )
926 )
924 coreconfigitem('profiling', 'sort',
927 coreconfigitem('profiling', 'sort',
925 default='inlinetime',
928 default='inlinetime',
926 )
929 )
927 coreconfigitem('profiling', 'statformat',
930 coreconfigitem('profiling', 'statformat',
928 default='hotpath',
931 default='hotpath',
929 )
932 )
930 coreconfigitem('profiling', 'time-track',
933 coreconfigitem('profiling', 'time-track',
931 default=dynamicdefault,
934 default=dynamicdefault,
932 )
935 )
933 coreconfigitem('profiling', 'type',
936 coreconfigitem('profiling', 'type',
934 default='stat',
937 default='stat',
935 )
938 )
936 coreconfigitem('progress', 'assume-tty',
939 coreconfigitem('progress', 'assume-tty',
937 default=False,
940 default=False,
938 )
941 )
939 coreconfigitem('progress', 'changedelay',
942 coreconfigitem('progress', 'changedelay',
940 default=1,
943 default=1,
941 )
944 )
942 coreconfigitem('progress', 'clear-complete',
945 coreconfigitem('progress', 'clear-complete',
943 default=True,
946 default=True,
944 )
947 )
945 coreconfigitem('progress', 'debug',
948 coreconfigitem('progress', 'debug',
946 default=False,
949 default=False,
947 )
950 )
948 coreconfigitem('progress', 'delay',
951 coreconfigitem('progress', 'delay',
949 default=3,
952 default=3,
950 )
953 )
951 coreconfigitem('progress', 'disable',
954 coreconfigitem('progress', 'disable',
952 default=False,
955 default=False,
953 )
956 )
954 coreconfigitem('progress', 'estimateinterval',
957 coreconfigitem('progress', 'estimateinterval',
955 default=60.0,
958 default=60.0,
956 )
959 )
957 coreconfigitem('progress', 'format',
960 coreconfigitem('progress', 'format',
958 default=lambda: ['topic', 'bar', 'number', 'estimate'],
961 default=lambda: ['topic', 'bar', 'number', 'estimate'],
959 )
962 )
960 coreconfigitem('progress', 'refresh',
963 coreconfigitem('progress', 'refresh',
961 default=0.1,
964 default=0.1,
962 )
965 )
963 coreconfigitem('progress', 'width',
966 coreconfigitem('progress', 'width',
964 default=dynamicdefault,
967 default=dynamicdefault,
965 )
968 )
966 coreconfigitem('push', 'pushvars.server',
969 coreconfigitem('push', 'pushvars.server',
967 default=False,
970 default=False,
968 )
971 )
969 coreconfigitem('rewrite', 'backup-bundle',
972 coreconfigitem('rewrite', 'backup-bundle',
970 default=True,
973 default=True,
971 alias=[('ui', 'history-editing-backup')],
974 alias=[('ui', 'history-editing-backup')],
972 )
975 )
973 coreconfigitem('rewrite', 'update-timestamp',
976 coreconfigitem('rewrite', 'update-timestamp',
974 default=False,
977 default=False,
975 )
978 )
976 coreconfigitem('storage', 'new-repo-backend',
979 coreconfigitem('storage', 'new-repo-backend',
977 default='revlogv1',
980 default='revlogv1',
978 )
981 )
979 coreconfigitem('storage', 'revlog.optimize-delta-parent-choice',
982 coreconfigitem('storage', 'revlog.optimize-delta-parent-choice',
980 default=True,
983 default=True,
981 alias=[('format', 'aggressivemergedeltas')],
984 alias=[('format', 'aggressivemergedeltas')],
982 )
985 )
983 coreconfigitem('server', 'bookmarks-pushkey-compat',
986 coreconfigitem('server', 'bookmarks-pushkey-compat',
984 default=True,
987 default=True,
985 )
988 )
986 coreconfigitem('server', 'bundle1',
989 coreconfigitem('server', 'bundle1',
987 default=True,
990 default=True,
988 )
991 )
989 coreconfigitem('server', 'bundle1gd',
992 coreconfigitem('server', 'bundle1gd',
990 default=None,
993 default=None,
991 )
994 )
992 coreconfigitem('server', 'bundle1.pull',
995 coreconfigitem('server', 'bundle1.pull',
993 default=None,
996 default=None,
994 )
997 )
995 coreconfigitem('server', 'bundle1gd.pull',
998 coreconfigitem('server', 'bundle1gd.pull',
996 default=None,
999 default=None,
997 )
1000 )
998 coreconfigitem('server', 'bundle1.push',
1001 coreconfigitem('server', 'bundle1.push',
999 default=None,
1002 default=None,
1000 )
1003 )
1001 coreconfigitem('server', 'bundle1gd.push',
1004 coreconfigitem('server', 'bundle1gd.push',
1002 default=None,
1005 default=None,
1003 )
1006 )
1004 coreconfigitem('server', 'bundle2.stream',
1007 coreconfigitem('server', 'bundle2.stream',
1005 default=True,
1008 default=True,
1006 alias=[('experimental', 'bundle2.stream')]
1009 alias=[('experimental', 'bundle2.stream')]
1007 )
1010 )
1008 coreconfigitem('server', 'compressionengines',
1011 coreconfigitem('server', 'compressionengines',
1009 default=list,
1012 default=list,
1010 )
1013 )
1011 coreconfigitem('server', 'concurrent-push-mode',
1014 coreconfigitem('server', 'concurrent-push-mode',
1012 default='strict',
1015 default='strict',
1013 )
1016 )
1014 coreconfigitem('server', 'disablefullbundle',
1017 coreconfigitem('server', 'disablefullbundle',
1015 default=False,
1018 default=False,
1016 )
1019 )
1017 coreconfigitem('server', 'maxhttpheaderlen',
1020 coreconfigitem('server', 'maxhttpheaderlen',
1018 default=1024,
1021 default=1024,
1019 )
1022 )
1020 coreconfigitem('server', 'pullbundle',
1023 coreconfigitem('server', 'pullbundle',
1021 default=False,
1024 default=False,
1022 )
1025 )
1023 coreconfigitem('server', 'preferuncompressed',
1026 coreconfigitem('server', 'preferuncompressed',
1024 default=False,
1027 default=False,
1025 )
1028 )
1026 coreconfigitem('server', 'streamunbundle',
1029 coreconfigitem('server', 'streamunbundle',
1027 default=False,
1030 default=False,
1028 )
1031 )
1029 coreconfigitem('server', 'uncompressed',
1032 coreconfigitem('server', 'uncompressed',
1030 default=True,
1033 default=True,
1031 )
1034 )
1032 coreconfigitem('server', 'uncompressedallowsecret',
1035 coreconfigitem('server', 'uncompressedallowsecret',
1033 default=False,
1036 default=False,
1034 )
1037 )
1035 coreconfigitem('server', 'validate',
1038 coreconfigitem('server', 'validate',
1036 default=False,
1039 default=False,
1037 )
1040 )
1038 coreconfigitem('server', 'zliblevel',
1041 coreconfigitem('server', 'zliblevel',
1039 default=-1,
1042 default=-1,
1040 )
1043 )
1041 coreconfigitem('server', 'zstdlevel',
1044 coreconfigitem('server', 'zstdlevel',
1042 default=3,
1045 default=3,
1043 )
1046 )
1044 coreconfigitem('share', 'pool',
1047 coreconfigitem('share', 'pool',
1045 default=None,
1048 default=None,
1046 )
1049 )
1047 coreconfigitem('share', 'poolnaming',
1050 coreconfigitem('share', 'poolnaming',
1048 default='identity',
1051 default='identity',
1049 )
1052 )
1050 coreconfigitem('smtp', 'host',
1053 coreconfigitem('smtp', 'host',
1051 default=None,
1054 default=None,
1052 )
1055 )
1053 coreconfigitem('smtp', 'local_hostname',
1056 coreconfigitem('smtp', 'local_hostname',
1054 default=None,
1057 default=None,
1055 )
1058 )
1056 coreconfigitem('smtp', 'password',
1059 coreconfigitem('smtp', 'password',
1057 default=None,
1060 default=None,
1058 )
1061 )
1059 coreconfigitem('smtp', 'port',
1062 coreconfigitem('smtp', 'port',
1060 default=dynamicdefault,
1063 default=dynamicdefault,
1061 )
1064 )
1062 coreconfigitem('smtp', 'tls',
1065 coreconfigitem('smtp', 'tls',
1063 default='none',
1066 default='none',
1064 )
1067 )
1065 coreconfigitem('smtp', 'username',
1068 coreconfigitem('smtp', 'username',
1066 default=None,
1069 default=None,
1067 )
1070 )
1068 coreconfigitem('sparse', 'missingwarning',
1071 coreconfigitem('sparse', 'missingwarning',
1069 default=True,
1072 default=True,
1070 )
1073 )
1071 coreconfigitem('subrepos', 'allowed',
1074 coreconfigitem('subrepos', 'allowed',
1072 default=dynamicdefault, # to make backporting simpler
1075 default=dynamicdefault, # to make backporting simpler
1073 )
1076 )
1074 coreconfigitem('subrepos', 'hg:allowed',
1077 coreconfigitem('subrepos', 'hg:allowed',
1075 default=dynamicdefault,
1078 default=dynamicdefault,
1076 )
1079 )
1077 coreconfigitem('subrepos', 'git:allowed',
1080 coreconfigitem('subrepos', 'git:allowed',
1078 default=dynamicdefault,
1081 default=dynamicdefault,
1079 )
1082 )
1080 coreconfigitem('subrepos', 'svn:allowed',
1083 coreconfigitem('subrepos', 'svn:allowed',
1081 default=dynamicdefault,
1084 default=dynamicdefault,
1082 )
1085 )
1083 coreconfigitem('templates', '.*',
1086 coreconfigitem('templates', '.*',
1084 default=None,
1087 default=None,
1085 generic=True,
1088 generic=True,
1086 )
1089 )
1087 coreconfigitem('templateconfig', '.*',
1090 coreconfigitem('templateconfig', '.*',
1088 default=dynamicdefault,
1091 default=dynamicdefault,
1089 generic=True,
1092 generic=True,
1090 )
1093 )
1091 coreconfigitem('trusted', 'groups',
1094 coreconfigitem('trusted', 'groups',
1092 default=list,
1095 default=list,
1093 )
1096 )
1094 coreconfigitem('trusted', 'users',
1097 coreconfigitem('trusted', 'users',
1095 default=list,
1098 default=list,
1096 )
1099 )
1097 coreconfigitem('ui', '_usedassubrepo',
1100 coreconfigitem('ui', '_usedassubrepo',
1098 default=False,
1101 default=False,
1099 )
1102 )
1100 coreconfigitem('ui', 'allowemptycommit',
1103 coreconfigitem('ui', 'allowemptycommit',
1101 default=False,
1104 default=False,
1102 )
1105 )
1103 coreconfigitem('ui', 'archivemeta',
1106 coreconfigitem('ui', 'archivemeta',
1104 default=True,
1107 default=True,
1105 )
1108 )
1106 coreconfigitem('ui', 'askusername',
1109 coreconfigitem('ui', 'askusername',
1107 default=False,
1110 default=False,
1108 )
1111 )
1109 coreconfigitem('ui', 'clonebundlefallback',
1112 coreconfigitem('ui', 'clonebundlefallback',
1110 default=False,
1113 default=False,
1111 )
1114 )
1112 coreconfigitem('ui', 'clonebundleprefers',
1115 coreconfigitem('ui', 'clonebundleprefers',
1113 default=list,
1116 default=list,
1114 )
1117 )
1115 coreconfigitem('ui', 'clonebundles',
1118 coreconfigitem('ui', 'clonebundles',
1116 default=True,
1119 default=True,
1117 )
1120 )
1118 coreconfigitem('ui', 'color',
1121 coreconfigitem('ui', 'color',
1119 default='auto',
1122 default='auto',
1120 )
1123 )
1121 coreconfigitem('ui', 'commitsubrepos',
1124 coreconfigitem('ui', 'commitsubrepos',
1122 default=False,
1125 default=False,
1123 )
1126 )
1124 coreconfigitem('ui', 'debug',
1127 coreconfigitem('ui', 'debug',
1125 default=False,
1128 default=False,
1126 )
1129 )
1127 coreconfigitem('ui', 'debugger',
1130 coreconfigitem('ui', 'debugger',
1128 default=None,
1131 default=None,
1129 )
1132 )
1130 coreconfigitem('ui', 'editor',
1133 coreconfigitem('ui', 'editor',
1131 default=dynamicdefault,
1134 default=dynamicdefault,
1132 )
1135 )
1133 coreconfigitem('ui', 'fallbackencoding',
1136 coreconfigitem('ui', 'fallbackencoding',
1134 default=None,
1137 default=None,
1135 )
1138 )
1136 coreconfigitem('ui', 'forcecwd',
1139 coreconfigitem('ui', 'forcecwd',
1137 default=None,
1140 default=None,
1138 )
1141 )
1139 coreconfigitem('ui', 'forcemerge',
1142 coreconfigitem('ui', 'forcemerge',
1140 default=None,
1143 default=None,
1141 )
1144 )
1142 coreconfigitem('ui', 'formatdebug',
1145 coreconfigitem('ui', 'formatdebug',
1143 default=False,
1146 default=False,
1144 )
1147 )
1145 coreconfigitem('ui', 'formatjson',
1148 coreconfigitem('ui', 'formatjson',
1146 default=False,
1149 default=False,
1147 )
1150 )
1148 coreconfigitem('ui', 'formatted',
1151 coreconfigitem('ui', 'formatted',
1149 default=None,
1152 default=None,
1150 )
1153 )
1151 coreconfigitem('ui', 'graphnodetemplate',
1154 coreconfigitem('ui', 'graphnodetemplate',
1152 default=None,
1155 default=None,
1153 )
1156 )
1154 coreconfigitem('ui', 'interactive',
1157 coreconfigitem('ui', 'interactive',
1155 default=None,
1158 default=None,
1156 )
1159 )
1157 coreconfigitem('ui', 'interface',
1160 coreconfigitem('ui', 'interface',
1158 default=None,
1161 default=None,
1159 )
1162 )
1160 coreconfigitem('ui', 'interface.chunkselector',
1163 coreconfigitem('ui', 'interface.chunkselector',
1161 default=None,
1164 default=None,
1162 )
1165 )
1163 coreconfigitem('ui', 'large-file-limit',
1166 coreconfigitem('ui', 'large-file-limit',
1164 default=10000000,
1167 default=10000000,
1165 )
1168 )
1166 coreconfigitem('ui', 'logblockedtimes',
1169 coreconfigitem('ui', 'logblockedtimes',
1167 default=False,
1170 default=False,
1168 )
1171 )
1169 coreconfigitem('ui', 'logtemplate',
1172 coreconfigitem('ui', 'logtemplate',
1170 default=None,
1173 default=None,
1171 )
1174 )
1172 coreconfigitem('ui', 'merge',
1175 coreconfigitem('ui', 'merge',
1173 default=None,
1176 default=None,
1174 )
1177 )
1175 coreconfigitem('ui', 'mergemarkers',
1178 coreconfigitem('ui', 'mergemarkers',
1176 default='basic',
1179 default='basic',
1177 )
1180 )
1178 coreconfigitem('ui', 'mergemarkertemplate',
1181 coreconfigitem('ui', 'mergemarkertemplate',
1179 default=('{node|short} '
1182 default=('{node|short} '
1180 '{ifeq(tags, "tip", "", '
1183 '{ifeq(tags, "tip", "", '
1181 'ifeq(tags, "", "", "{tags} "))}'
1184 'ifeq(tags, "", "", "{tags} "))}'
1182 '{if(bookmarks, "{bookmarks} ")}'
1185 '{if(bookmarks, "{bookmarks} ")}'
1183 '{ifeq(branch, "default", "", "{branch} ")}'
1186 '{ifeq(branch, "default", "", "{branch} ")}'
1184 '- {author|user}: {desc|firstline}')
1187 '- {author|user}: {desc|firstline}')
1185 )
1188 )
1186 coreconfigitem('ui', 'message-output',
1189 coreconfigitem('ui', 'message-output',
1187 default='stdio',
1190 default='stdio',
1188 )
1191 )
1189 coreconfigitem('ui', 'nontty',
1192 coreconfigitem('ui', 'nontty',
1190 default=False,
1193 default=False,
1191 )
1194 )
1192 coreconfigitem('ui', 'origbackuppath',
1195 coreconfigitem('ui', 'origbackuppath',
1193 default=None,
1196 default=None,
1194 )
1197 )
1195 coreconfigitem('ui', 'paginate',
1198 coreconfigitem('ui', 'paginate',
1196 default=True,
1199 default=True,
1197 )
1200 )
1198 coreconfigitem('ui', 'patch',
1201 coreconfigitem('ui', 'patch',
1199 default=None,
1202 default=None,
1200 )
1203 )
1201 coreconfigitem('ui', 'pre-merge-tool-output-template',
1204 coreconfigitem('ui', 'pre-merge-tool-output-template',
1202 default=None,
1205 default=None,
1203 )
1206 )
1204 coreconfigitem('ui', 'portablefilenames',
1207 coreconfigitem('ui', 'portablefilenames',
1205 default='warn',
1208 default='warn',
1206 )
1209 )
1207 coreconfigitem('ui', 'promptecho',
1210 coreconfigitem('ui', 'promptecho',
1208 default=False,
1211 default=False,
1209 )
1212 )
1210 coreconfigitem('ui', 'quiet',
1213 coreconfigitem('ui', 'quiet',
1211 default=False,
1214 default=False,
1212 )
1215 )
1213 coreconfigitem('ui', 'quietbookmarkmove',
1216 coreconfigitem('ui', 'quietbookmarkmove',
1214 default=False,
1217 default=False,
1215 )
1218 )
1216 coreconfigitem('ui', 'relative-paths',
1219 coreconfigitem('ui', 'relative-paths',
1217 default='legacy',
1220 default='legacy',
1218 )
1221 )
1219 coreconfigitem('ui', 'remotecmd',
1222 coreconfigitem('ui', 'remotecmd',
1220 default='hg',
1223 default='hg',
1221 )
1224 )
1222 coreconfigitem('ui', 'report_untrusted',
1225 coreconfigitem('ui', 'report_untrusted',
1223 default=True,
1226 default=True,
1224 )
1227 )
1225 coreconfigitem('ui', 'rollback',
1228 coreconfigitem('ui', 'rollback',
1226 default=True,
1229 default=True,
1227 )
1230 )
1228 coreconfigitem('ui', 'signal-safe-lock',
1231 coreconfigitem('ui', 'signal-safe-lock',
1229 default=True,
1232 default=True,
1230 )
1233 )
1231 coreconfigitem('ui', 'slash',
1234 coreconfigitem('ui', 'slash',
1232 default=False,
1235 default=False,
1233 )
1236 )
1234 coreconfigitem('ui', 'ssh',
1237 coreconfigitem('ui', 'ssh',
1235 default='ssh',
1238 default='ssh',
1236 )
1239 )
1237 coreconfigitem('ui', 'ssherrorhint',
1240 coreconfigitem('ui', 'ssherrorhint',
1238 default=None,
1241 default=None,
1239 )
1242 )
1240 coreconfigitem('ui', 'statuscopies',
1243 coreconfigitem('ui', 'statuscopies',
1241 default=False,
1244 default=False,
1242 )
1245 )
1243 coreconfigitem('ui', 'strict',
1246 coreconfigitem('ui', 'strict',
1244 default=False,
1247 default=False,
1245 )
1248 )
1246 coreconfigitem('ui', 'style',
1249 coreconfigitem('ui', 'style',
1247 default='',
1250 default='',
1248 )
1251 )
1249 coreconfigitem('ui', 'supportcontact',
1252 coreconfigitem('ui', 'supportcontact',
1250 default=None,
1253 default=None,
1251 )
1254 )
1252 coreconfigitem('ui', 'textwidth',
1255 coreconfigitem('ui', 'textwidth',
1253 default=78,
1256 default=78,
1254 )
1257 )
1255 coreconfigitem('ui', 'timeout',
1258 coreconfigitem('ui', 'timeout',
1256 default='600',
1259 default='600',
1257 )
1260 )
1258 coreconfigitem('ui', 'timeout.warn',
1261 coreconfigitem('ui', 'timeout.warn',
1259 default=0,
1262 default=0,
1260 )
1263 )
1261 coreconfigitem('ui', 'traceback',
1264 coreconfigitem('ui', 'traceback',
1262 default=False,
1265 default=False,
1263 )
1266 )
1264 coreconfigitem('ui', 'tweakdefaults',
1267 coreconfigitem('ui', 'tweakdefaults',
1265 default=False,
1268 default=False,
1266 )
1269 )
1267 coreconfigitem('ui', 'username',
1270 coreconfigitem('ui', 'username',
1268 alias=[('ui', 'user')]
1271 alias=[('ui', 'user')]
1269 )
1272 )
1270 coreconfigitem('ui', 'verbose',
1273 coreconfigitem('ui', 'verbose',
1271 default=False,
1274 default=False,
1272 )
1275 )
1273 coreconfigitem('verify', 'skipflags',
1276 coreconfigitem('verify', 'skipflags',
1274 default=None,
1277 default=None,
1275 )
1278 )
1276 coreconfigitem('web', 'allowbz2',
1279 coreconfigitem('web', 'allowbz2',
1277 default=False,
1280 default=False,
1278 )
1281 )
1279 coreconfigitem('web', 'allowgz',
1282 coreconfigitem('web', 'allowgz',
1280 default=False,
1283 default=False,
1281 )
1284 )
1282 coreconfigitem('web', 'allow-pull',
1285 coreconfigitem('web', 'allow-pull',
1283 alias=[('web', 'allowpull')],
1286 alias=[('web', 'allowpull')],
1284 default=True,
1287 default=True,
1285 )
1288 )
1286 coreconfigitem('web', 'allow-push',
1289 coreconfigitem('web', 'allow-push',
1287 alias=[('web', 'allow_push')],
1290 alias=[('web', 'allow_push')],
1288 default=list,
1291 default=list,
1289 )
1292 )
1290 coreconfigitem('web', 'allowzip',
1293 coreconfigitem('web', 'allowzip',
1291 default=False,
1294 default=False,
1292 )
1295 )
1293 coreconfigitem('web', 'archivesubrepos',
1296 coreconfigitem('web', 'archivesubrepos',
1294 default=False,
1297 default=False,
1295 )
1298 )
1296 coreconfigitem('web', 'cache',
1299 coreconfigitem('web', 'cache',
1297 default=True,
1300 default=True,
1298 )
1301 )
1299 coreconfigitem('web', 'comparisoncontext',
1302 coreconfigitem('web', 'comparisoncontext',
1300 default=5,
1303 default=5,
1301 )
1304 )
1302 coreconfigitem('web', 'contact',
1305 coreconfigitem('web', 'contact',
1303 default=None,
1306 default=None,
1304 )
1307 )
1305 coreconfigitem('web', 'deny_push',
1308 coreconfigitem('web', 'deny_push',
1306 default=list,
1309 default=list,
1307 )
1310 )
1308 coreconfigitem('web', 'guessmime',
1311 coreconfigitem('web', 'guessmime',
1309 default=False,
1312 default=False,
1310 )
1313 )
1311 coreconfigitem('web', 'hidden',
1314 coreconfigitem('web', 'hidden',
1312 default=False,
1315 default=False,
1313 )
1316 )
1314 coreconfigitem('web', 'labels',
1317 coreconfigitem('web', 'labels',
1315 default=list,
1318 default=list,
1316 )
1319 )
1317 coreconfigitem('web', 'logoimg',
1320 coreconfigitem('web', 'logoimg',
1318 default='hglogo.png',
1321 default='hglogo.png',
1319 )
1322 )
1320 coreconfigitem('web', 'logourl',
1323 coreconfigitem('web', 'logourl',
1321 default='https://mercurial-scm.org/',
1324 default='https://mercurial-scm.org/',
1322 )
1325 )
1323 coreconfigitem('web', 'accesslog',
1326 coreconfigitem('web', 'accesslog',
1324 default='-',
1327 default='-',
1325 )
1328 )
1326 coreconfigitem('web', 'address',
1329 coreconfigitem('web', 'address',
1327 default='',
1330 default='',
1328 )
1331 )
1329 coreconfigitem('web', 'allow-archive',
1332 coreconfigitem('web', 'allow-archive',
1330 alias=[('web', 'allow_archive')],
1333 alias=[('web', 'allow_archive')],
1331 default=list,
1334 default=list,
1332 )
1335 )
1333 coreconfigitem('web', 'allow_read',
1336 coreconfigitem('web', 'allow_read',
1334 default=list,
1337 default=list,
1335 )
1338 )
1336 coreconfigitem('web', 'baseurl',
1339 coreconfigitem('web', 'baseurl',
1337 default=None,
1340 default=None,
1338 )
1341 )
1339 coreconfigitem('web', 'cacerts',
1342 coreconfigitem('web', 'cacerts',
1340 default=None,
1343 default=None,
1341 )
1344 )
1342 coreconfigitem('web', 'certificate',
1345 coreconfigitem('web', 'certificate',
1343 default=None,
1346 default=None,
1344 )
1347 )
1345 coreconfigitem('web', 'collapse',
1348 coreconfigitem('web', 'collapse',
1346 default=False,
1349 default=False,
1347 )
1350 )
1348 coreconfigitem('web', 'csp',
1351 coreconfigitem('web', 'csp',
1349 default=None,
1352 default=None,
1350 )
1353 )
1351 coreconfigitem('web', 'deny_read',
1354 coreconfigitem('web', 'deny_read',
1352 default=list,
1355 default=list,
1353 )
1356 )
1354 coreconfigitem('web', 'descend',
1357 coreconfigitem('web', 'descend',
1355 default=True,
1358 default=True,
1356 )
1359 )
1357 coreconfigitem('web', 'description',
1360 coreconfigitem('web', 'description',
1358 default="",
1361 default="",
1359 )
1362 )
1360 coreconfigitem('web', 'encoding',
1363 coreconfigitem('web', 'encoding',
1361 default=lambda: encoding.encoding,
1364 default=lambda: encoding.encoding,
1362 )
1365 )
1363 coreconfigitem('web', 'errorlog',
1366 coreconfigitem('web', 'errorlog',
1364 default='-',
1367 default='-',
1365 )
1368 )
1366 coreconfigitem('web', 'ipv6',
1369 coreconfigitem('web', 'ipv6',
1367 default=False,
1370 default=False,
1368 )
1371 )
1369 coreconfigitem('web', 'maxchanges',
1372 coreconfigitem('web', 'maxchanges',
1370 default=10,
1373 default=10,
1371 )
1374 )
1372 coreconfigitem('web', 'maxfiles',
1375 coreconfigitem('web', 'maxfiles',
1373 default=10,
1376 default=10,
1374 )
1377 )
1375 coreconfigitem('web', 'maxshortchanges',
1378 coreconfigitem('web', 'maxshortchanges',
1376 default=60,
1379 default=60,
1377 )
1380 )
1378 coreconfigitem('web', 'motd',
1381 coreconfigitem('web', 'motd',
1379 default='',
1382 default='',
1380 )
1383 )
1381 coreconfigitem('web', 'name',
1384 coreconfigitem('web', 'name',
1382 default=dynamicdefault,
1385 default=dynamicdefault,
1383 )
1386 )
1384 coreconfigitem('web', 'port',
1387 coreconfigitem('web', 'port',
1385 default=8000,
1388 default=8000,
1386 )
1389 )
1387 coreconfigitem('web', 'prefix',
1390 coreconfigitem('web', 'prefix',
1388 default='',
1391 default='',
1389 )
1392 )
1390 coreconfigitem('web', 'push_ssl',
1393 coreconfigitem('web', 'push_ssl',
1391 default=True,
1394 default=True,
1392 )
1395 )
1393 coreconfigitem('web', 'refreshinterval',
1396 coreconfigitem('web', 'refreshinterval',
1394 default=20,
1397 default=20,
1395 )
1398 )
1396 coreconfigitem('web', 'server-header',
1399 coreconfigitem('web', 'server-header',
1397 default=None,
1400 default=None,
1398 )
1401 )
1399 coreconfigitem('web', 'static',
1402 coreconfigitem('web', 'static',
1400 default=None,
1403 default=None,
1401 )
1404 )
1402 coreconfigitem('web', 'staticurl',
1405 coreconfigitem('web', 'staticurl',
1403 default=None,
1406 default=None,
1404 )
1407 )
1405 coreconfigitem('web', 'stripes',
1408 coreconfigitem('web', 'stripes',
1406 default=1,
1409 default=1,
1407 )
1410 )
1408 coreconfigitem('web', 'style',
1411 coreconfigitem('web', 'style',
1409 default='paper',
1412 default='paper',
1410 )
1413 )
1411 coreconfigitem('web', 'templates',
1414 coreconfigitem('web', 'templates',
1412 default=None,
1415 default=None,
1413 )
1416 )
1414 coreconfigitem('web', 'view',
1417 coreconfigitem('web', 'view',
1415 default='served',
1418 default='served',
1416 )
1419 )
1417 coreconfigitem('worker', 'backgroundclose',
1420 coreconfigitem('worker', 'backgroundclose',
1418 default=dynamicdefault,
1421 default=dynamicdefault,
1419 )
1422 )
1420 # Windows defaults to a limit of 512 open files. A buffer of 128
1423 # Windows defaults to a limit of 512 open files. A buffer of 128
1421 # should give us enough headway.
1424 # should give us enough headway.
1422 coreconfigitem('worker', 'backgroundclosemaxqueue',
1425 coreconfigitem('worker', 'backgroundclosemaxqueue',
1423 default=384,
1426 default=384,
1424 )
1427 )
1425 coreconfigitem('worker', 'backgroundcloseminfilecount',
1428 coreconfigitem('worker', 'backgroundcloseminfilecount',
1426 default=2048,
1429 default=2048,
1427 )
1430 )
1428 coreconfigitem('worker', 'backgroundclosethreadcount',
1431 coreconfigitem('worker', 'backgroundclosethreadcount',
1429 default=4,
1432 default=4,
1430 )
1433 )
1431 coreconfigitem('worker', 'enabled',
1434 coreconfigitem('worker', 'enabled',
1432 default=True,
1435 default=True,
1433 )
1436 )
1434 coreconfigitem('worker', 'numcpus',
1437 coreconfigitem('worker', 'numcpus',
1435 default=None,
1438 default=None,
1436 )
1439 )
1437
1440
1438 # Rebase related configuration moved to core because other extension are doing
1441 # Rebase related configuration moved to core because other extension are doing
1439 # strange things. For example, shelve import the extensions to reuse some bit
1442 # strange things. For example, shelve import the extensions to reuse some bit
1440 # without formally loading it.
1443 # without formally loading it.
1441 coreconfigitem('commands', 'rebase.requiredest',
1444 coreconfigitem('commands', 'rebase.requiredest',
1442 default=False,
1445 default=False,
1443 )
1446 )
1444 coreconfigitem('experimental', 'rebaseskipobsolete',
1447 coreconfigitem('experimental', 'rebaseskipobsolete',
1445 default=True,
1448 default=True,
1446 )
1449 )
1447 coreconfigitem('rebase', 'singletransaction',
1450 coreconfigitem('rebase', 'singletransaction',
1448 default=False,
1451 default=False,
1449 )
1452 )
1450 coreconfigitem('rebase', 'experimental.inmemory',
1453 coreconfigitem('rebase', 'experimental.inmemory',
1451 default=False,
1454 default=False,
1452 )
1455 )
@@ -1,1891 +1,1903 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import posixpath
14 import posixpath
15 import re
15 import re
16 import subprocess
16 import subprocess
17 import weakref
17 import weakref
18
18
19 from .i18n import _
19 from .i18n import _
20 from .node import (
20 from .node import (
21 bin,
21 bin,
22 hex,
22 hex,
23 nullid,
23 nullid,
24 nullrev,
24 nullrev,
25 short,
25 short,
26 wdirid,
26 wdirid,
27 wdirrev,
27 wdirrev,
28 )
28 )
29
29
30 from . import (
30 from . import (
31 encoding,
31 encoding,
32 error,
32 error,
33 match as matchmod,
33 match as matchmod,
34 obsolete,
34 obsolete,
35 obsutil,
35 obsutil,
36 pathutil,
36 pathutil,
37 phases,
37 phases,
38 policy,
38 policy,
39 pycompat,
39 pycompat,
40 revsetlang,
40 revsetlang,
41 similar,
41 similar,
42 smartset,
42 smartset,
43 url,
43 url,
44 util,
44 util,
45 vfs,
45 vfs,
46 )
46 )
47
47
48 from .utils import (
48 from .utils import (
49 procutil,
49 procutil,
50 stringutil,
50 stringutil,
51 )
51 )
52
52
53 if pycompat.iswindows:
53 if pycompat.iswindows:
54 from . import scmwindows as scmplatform
54 from . import scmwindows as scmplatform
55 else:
55 else:
56 from . import scmposix as scmplatform
56 from . import scmposix as scmplatform
57
57
58 parsers = policy.importmod(r'parsers')
58 parsers = policy.importmod(r'parsers')
59
59
60 termsize = scmplatform.termsize
60 termsize = scmplatform.termsize
61
61
62 class status(tuple):
62 class status(tuple):
63 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
63 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
64 and 'ignored' properties are only relevant to the working copy.
64 and 'ignored' properties are only relevant to the working copy.
65 '''
65 '''
66
66
67 __slots__ = ()
67 __slots__ = ()
68
68
69 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
69 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
70 clean):
70 clean):
71 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
71 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
72 ignored, clean))
72 ignored, clean))
73
73
74 @property
74 @property
75 def modified(self):
75 def modified(self):
76 '''files that have been modified'''
76 '''files that have been modified'''
77 return self[0]
77 return self[0]
78
78
79 @property
79 @property
80 def added(self):
80 def added(self):
81 '''files that have been added'''
81 '''files that have been added'''
82 return self[1]
82 return self[1]
83
83
84 @property
84 @property
85 def removed(self):
85 def removed(self):
86 '''files that have been removed'''
86 '''files that have been removed'''
87 return self[2]
87 return self[2]
88
88
89 @property
89 @property
90 def deleted(self):
90 def deleted(self):
91 '''files that are in the dirstate, but have been deleted from the
91 '''files that are in the dirstate, but have been deleted from the
92 working copy (aka "missing")
92 working copy (aka "missing")
93 '''
93 '''
94 return self[3]
94 return self[3]
95
95
96 @property
96 @property
97 def unknown(self):
97 def unknown(self):
98 '''files not in the dirstate that are not ignored'''
98 '''files not in the dirstate that are not ignored'''
99 return self[4]
99 return self[4]
100
100
101 @property
101 @property
102 def ignored(self):
102 def ignored(self):
103 '''files not in the dirstate that are ignored (by _dirignore())'''
103 '''files not in the dirstate that are ignored (by _dirignore())'''
104 return self[5]
104 return self[5]
105
105
106 @property
106 @property
107 def clean(self):
107 def clean(self):
108 '''files that have not been modified'''
108 '''files that have not been modified'''
109 return self[6]
109 return self[6]
110
110
111 def __repr__(self, *args, **kwargs):
111 def __repr__(self, *args, **kwargs):
112 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
112 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
113 r'unknown=%s, ignored=%s, clean=%s>') %
113 r'unknown=%s, ignored=%s, clean=%s>') %
114 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
114 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
115
115
116 def itersubrepos(ctx1, ctx2):
116 def itersubrepos(ctx1, ctx2):
117 """find subrepos in ctx1 or ctx2"""
117 """find subrepos in ctx1 or ctx2"""
118 # Create a (subpath, ctx) mapping where we prefer subpaths from
118 # Create a (subpath, ctx) mapping where we prefer subpaths from
119 # ctx1. The subpaths from ctx2 are important when the .hgsub file
119 # ctx1. The subpaths from ctx2 are important when the .hgsub file
120 # has been modified (in ctx2) but not yet committed (in ctx1).
120 # has been modified (in ctx2) but not yet committed (in ctx1).
121 subpaths = dict.fromkeys(ctx2.substate, ctx2)
121 subpaths = dict.fromkeys(ctx2.substate, ctx2)
122 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
122 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
123
123
124 missing = set()
124 missing = set()
125
125
126 for subpath in ctx2.substate:
126 for subpath in ctx2.substate:
127 if subpath not in ctx1.substate:
127 if subpath not in ctx1.substate:
128 del subpaths[subpath]
128 del subpaths[subpath]
129 missing.add(subpath)
129 missing.add(subpath)
130
130
131 for subpath, ctx in sorted(subpaths.iteritems()):
131 for subpath, ctx in sorted(subpaths.iteritems()):
132 yield subpath, ctx.sub(subpath)
132 yield subpath, ctx.sub(subpath)
133
133
134 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
134 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
135 # status and diff will have an accurate result when it does
135 # status and diff will have an accurate result when it does
136 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
136 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
137 # against itself.
137 # against itself.
138 for subpath in missing:
138 for subpath in missing:
139 yield subpath, ctx2.nullsub(subpath, ctx1)
139 yield subpath, ctx2.nullsub(subpath, ctx1)
140
140
141 def nochangesfound(ui, repo, excluded=None):
141 def nochangesfound(ui, repo, excluded=None):
142 '''Report no changes for push/pull, excluded is None or a list of
142 '''Report no changes for push/pull, excluded is None or a list of
143 nodes excluded from the push/pull.
143 nodes excluded from the push/pull.
144 '''
144 '''
145 secretlist = []
145 secretlist = []
146 if excluded:
146 if excluded:
147 for n in excluded:
147 for n in excluded:
148 ctx = repo[n]
148 ctx = repo[n]
149 if ctx.phase() >= phases.secret and not ctx.extinct():
149 if ctx.phase() >= phases.secret and not ctx.extinct():
150 secretlist.append(n)
150 secretlist.append(n)
151
151
152 if secretlist:
152 if secretlist:
153 ui.status(_("no changes found (ignored %d secret changesets)\n")
153 ui.status(_("no changes found (ignored %d secret changesets)\n")
154 % len(secretlist))
154 % len(secretlist))
155 else:
155 else:
156 ui.status(_("no changes found\n"))
156 ui.status(_("no changes found\n"))
157
157
158 def callcatch(ui, func):
158 def callcatch(ui, func):
159 """call func() with global exception handling
159 """call func() with global exception handling
160
160
161 return func() if no exception happens. otherwise do some error handling
161 return func() if no exception happens. otherwise do some error handling
162 and return an exit code accordingly. does not handle all exceptions.
162 and return an exit code accordingly. does not handle all exceptions.
163 """
163 """
164 try:
164 try:
165 try:
165 try:
166 return func()
166 return func()
167 except: # re-raises
167 except: # re-raises
168 ui.traceback()
168 ui.traceback()
169 raise
169 raise
170 # Global exception handling, alphabetically
170 # Global exception handling, alphabetically
171 # Mercurial-specific first, followed by built-in and library exceptions
171 # Mercurial-specific first, followed by built-in and library exceptions
172 except error.LockHeld as inst:
172 except error.LockHeld as inst:
173 if inst.errno == errno.ETIMEDOUT:
173 if inst.errno == errno.ETIMEDOUT:
174 reason = _('timed out waiting for lock held by %r') % (
174 reason = _('timed out waiting for lock held by %r') % (
175 pycompat.bytestr(inst.locker))
175 pycompat.bytestr(inst.locker))
176 else:
176 else:
177 reason = _('lock held by %r') % inst.locker
177 reason = _('lock held by %r') % inst.locker
178 ui.error(_("abort: %s: %s\n") % (
178 ui.error(_("abort: %s: %s\n") % (
179 inst.desc or stringutil.forcebytestr(inst.filename), reason))
179 inst.desc or stringutil.forcebytestr(inst.filename), reason))
180 if not inst.locker:
180 if not inst.locker:
181 ui.error(_("(lock might be very busy)\n"))
181 ui.error(_("(lock might be very busy)\n"))
182 except error.LockUnavailable as inst:
182 except error.LockUnavailable as inst:
183 ui.error(_("abort: could not lock %s: %s\n") %
183 ui.error(_("abort: could not lock %s: %s\n") %
184 (inst.desc or stringutil.forcebytestr(inst.filename),
184 (inst.desc or stringutil.forcebytestr(inst.filename),
185 encoding.strtolocal(inst.strerror)))
185 encoding.strtolocal(inst.strerror)))
186 except error.OutOfBandError as inst:
186 except error.OutOfBandError as inst:
187 if inst.args:
187 if inst.args:
188 msg = _("abort: remote error:\n")
188 msg = _("abort: remote error:\n")
189 else:
189 else:
190 msg = _("abort: remote error\n")
190 msg = _("abort: remote error\n")
191 ui.error(msg)
191 ui.error(msg)
192 if inst.args:
192 if inst.args:
193 ui.error(''.join(inst.args))
193 ui.error(''.join(inst.args))
194 if inst.hint:
194 if inst.hint:
195 ui.error('(%s)\n' % inst.hint)
195 ui.error('(%s)\n' % inst.hint)
196 except error.RepoError as inst:
196 except error.RepoError as inst:
197 ui.error(_("abort: %s!\n") % inst)
197 ui.error(_("abort: %s!\n") % inst)
198 if inst.hint:
198 if inst.hint:
199 ui.error(_("(%s)\n") % inst.hint)
199 ui.error(_("(%s)\n") % inst.hint)
200 except error.ResponseError as inst:
200 except error.ResponseError as inst:
201 ui.error(_("abort: %s") % inst.args[0])
201 ui.error(_("abort: %s") % inst.args[0])
202 msg = inst.args[1]
202 msg = inst.args[1]
203 if isinstance(msg, type(u'')):
203 if isinstance(msg, type(u'')):
204 msg = pycompat.sysbytes(msg)
204 msg = pycompat.sysbytes(msg)
205 if not isinstance(msg, bytes):
205 if not isinstance(msg, bytes):
206 ui.error(" %r\n" % (msg,))
206 ui.error(" %r\n" % (msg,))
207 elif not msg:
207 elif not msg:
208 ui.error(_(" empty string\n"))
208 ui.error(_(" empty string\n"))
209 else:
209 else:
210 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
210 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
211 except error.CensoredNodeError as inst:
211 except error.CensoredNodeError as inst:
212 ui.error(_("abort: file censored %s!\n") % inst)
212 ui.error(_("abort: file censored %s!\n") % inst)
213 except error.StorageError as inst:
213 except error.StorageError as inst:
214 ui.error(_("abort: %s!\n") % inst)
214 ui.error(_("abort: %s!\n") % inst)
215 if inst.hint:
215 if inst.hint:
216 ui.error(_("(%s)\n") % inst.hint)
216 ui.error(_("(%s)\n") % inst.hint)
217 except error.InterventionRequired as inst:
217 except error.InterventionRequired as inst:
218 ui.error("%s\n" % inst)
218 ui.error("%s\n" % inst)
219 if inst.hint:
219 if inst.hint:
220 ui.error(_("(%s)\n") % inst.hint)
220 ui.error(_("(%s)\n") % inst.hint)
221 return 1
221 return 1
222 except error.WdirUnsupported:
222 except error.WdirUnsupported:
223 ui.error(_("abort: working directory revision cannot be specified\n"))
223 ui.error(_("abort: working directory revision cannot be specified\n"))
224 except error.Abort as inst:
224 except error.Abort as inst:
225 ui.error(_("abort: %s\n") % inst)
225 ui.error(_("abort: %s\n") % inst)
226 if inst.hint:
226 if inst.hint:
227 ui.error(_("(%s)\n") % inst.hint)
227 ui.error(_("(%s)\n") % inst.hint)
228 except ImportError as inst:
228 except ImportError as inst:
229 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
229 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
230 m = stringutil.forcebytestr(inst).split()[-1]
230 m = stringutil.forcebytestr(inst).split()[-1]
231 if m in "mpatch bdiff".split():
231 if m in "mpatch bdiff".split():
232 ui.error(_("(did you forget to compile extensions?)\n"))
232 ui.error(_("(did you forget to compile extensions?)\n"))
233 elif m in "zlib".split():
233 elif m in "zlib".split():
234 ui.error(_("(is your Python install correct?)\n"))
234 ui.error(_("(is your Python install correct?)\n"))
235 except (IOError, OSError) as inst:
235 except (IOError, OSError) as inst:
236 if util.safehasattr(inst, "code"): # HTTPError
236 if util.safehasattr(inst, "code"): # HTTPError
237 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
237 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
238 elif util.safehasattr(inst, "reason"): # URLError or SSLError
238 elif util.safehasattr(inst, "reason"): # URLError or SSLError
239 try: # usually it is in the form (errno, strerror)
239 try: # usually it is in the form (errno, strerror)
240 reason = inst.reason.args[1]
240 reason = inst.reason.args[1]
241 except (AttributeError, IndexError):
241 except (AttributeError, IndexError):
242 # it might be anything, for example a string
242 # it might be anything, for example a string
243 reason = inst.reason
243 reason = inst.reason
244 if isinstance(reason, pycompat.unicode):
244 if isinstance(reason, pycompat.unicode):
245 # SSLError of Python 2.7.9 contains a unicode
245 # SSLError of Python 2.7.9 contains a unicode
246 reason = encoding.unitolocal(reason)
246 reason = encoding.unitolocal(reason)
247 ui.error(_("abort: error: %s\n") % reason)
247 ui.error(_("abort: error: %s\n") % reason)
248 elif (util.safehasattr(inst, "args")
248 elif (util.safehasattr(inst, "args")
249 and inst.args and inst.args[0] == errno.EPIPE):
249 and inst.args and inst.args[0] == errno.EPIPE):
250 pass
250 pass
251 elif getattr(inst, "strerror", None): # common IOError or OSError
251 elif getattr(inst, "strerror", None): # common IOError or OSError
252 if getattr(inst, "filename", None) is not None:
252 if getattr(inst, "filename", None) is not None:
253 ui.error(_("abort: %s: '%s'\n") % (
253 ui.error(_("abort: %s: '%s'\n") % (
254 encoding.strtolocal(inst.strerror),
254 encoding.strtolocal(inst.strerror),
255 stringutil.forcebytestr(inst.filename)))
255 stringutil.forcebytestr(inst.filename)))
256 else:
256 else:
257 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
257 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
258 else: # suspicious IOError
258 else: # suspicious IOError
259 raise
259 raise
260 except MemoryError:
260 except MemoryError:
261 ui.error(_("abort: out of memory\n"))
261 ui.error(_("abort: out of memory\n"))
262 except SystemExit as inst:
262 except SystemExit as inst:
263 # Commands shouldn't sys.exit directly, but give a return code.
263 # Commands shouldn't sys.exit directly, but give a return code.
264 # Just in case catch this and and pass exit code to caller.
264 # Just in case catch this and and pass exit code to caller.
265 return inst.code
265 return inst.code
266
266
267 return -1
267 return -1
268
268
269 def checknewlabel(repo, lbl, kind):
269 def checknewlabel(repo, lbl, kind):
270 # Do not use the "kind" parameter in ui output.
270 # Do not use the "kind" parameter in ui output.
271 # It makes strings difficult to translate.
271 # It makes strings difficult to translate.
272 if lbl in ['tip', '.', 'null']:
272 if lbl in ['tip', '.', 'null']:
273 raise error.Abort(_("the name '%s' is reserved") % lbl)
273 raise error.Abort(_("the name '%s' is reserved") % lbl)
274 for c in (':', '\0', '\n', '\r'):
274 for c in (':', '\0', '\n', '\r'):
275 if c in lbl:
275 if c in lbl:
276 raise error.Abort(
276 raise error.Abort(
277 _("%r cannot be used in a name") % pycompat.bytestr(c))
277 _("%r cannot be used in a name") % pycompat.bytestr(c))
278 try:
278 try:
279 int(lbl)
279 int(lbl)
280 raise error.Abort(_("cannot use an integer as a name"))
280 raise error.Abort(_("cannot use an integer as a name"))
281 except ValueError:
281 except ValueError:
282 pass
282 pass
283 if lbl.strip() != lbl:
283 if lbl.strip() != lbl:
284 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
284 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
285
285
286 def checkfilename(f):
286 def checkfilename(f):
287 '''Check that the filename f is an acceptable filename for a tracked file'''
287 '''Check that the filename f is an acceptable filename for a tracked file'''
288 if '\r' in f or '\n' in f:
288 if '\r' in f or '\n' in f:
289 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
289 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
290 % pycompat.bytestr(f))
290 % pycompat.bytestr(f))
291
291
292 def checkportable(ui, f):
292 def checkportable(ui, f):
293 '''Check if filename f is portable and warn or abort depending on config'''
293 '''Check if filename f is portable and warn or abort depending on config'''
294 checkfilename(f)
294 checkfilename(f)
295 abort, warn = checkportabilityalert(ui)
295 abort, warn = checkportabilityalert(ui)
296 if abort or warn:
296 if abort or warn:
297 msg = util.checkwinfilename(f)
297 msg = util.checkwinfilename(f)
298 if msg:
298 if msg:
299 msg = "%s: %s" % (msg, procutil.shellquote(f))
299 msg = "%s: %s" % (msg, procutil.shellquote(f))
300 if abort:
300 if abort:
301 raise error.Abort(msg)
301 raise error.Abort(msg)
302 ui.warn(_("warning: %s\n") % msg)
302 ui.warn(_("warning: %s\n") % msg)
303
303
304 def checkportabilityalert(ui):
304 def checkportabilityalert(ui):
305 '''check if the user's config requests nothing, a warning, or abort for
305 '''check if the user's config requests nothing, a warning, or abort for
306 non-portable filenames'''
306 non-portable filenames'''
307 val = ui.config('ui', 'portablefilenames')
307 val = ui.config('ui', 'portablefilenames')
308 lval = val.lower()
308 lval = val.lower()
309 bval = stringutil.parsebool(val)
309 bval = stringutil.parsebool(val)
310 abort = pycompat.iswindows or lval == 'abort'
310 abort = pycompat.iswindows or lval == 'abort'
311 warn = bval or lval == 'warn'
311 warn = bval or lval == 'warn'
312 if bval is None and not (warn or abort or lval == 'ignore'):
312 if bval is None and not (warn or abort or lval == 'ignore'):
313 raise error.ConfigError(
313 raise error.ConfigError(
314 _("ui.portablefilenames value is invalid ('%s')") % val)
314 _("ui.portablefilenames value is invalid ('%s')") % val)
315 return abort, warn
315 return abort, warn
316
316
317 class casecollisionauditor(object):
317 class casecollisionauditor(object):
318 def __init__(self, ui, abort, dirstate):
318 def __init__(self, ui, abort, dirstate):
319 self._ui = ui
319 self._ui = ui
320 self._abort = abort
320 self._abort = abort
321 allfiles = '\0'.join(dirstate._map)
321 allfiles = '\0'.join(dirstate._map)
322 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
322 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
323 self._dirstate = dirstate
323 self._dirstate = dirstate
324 # The purpose of _newfiles is so that we don't complain about
324 # The purpose of _newfiles is so that we don't complain about
325 # case collisions if someone were to call this object with the
325 # case collisions if someone were to call this object with the
326 # same filename twice.
326 # same filename twice.
327 self._newfiles = set()
327 self._newfiles = set()
328
328
329 def __call__(self, f):
329 def __call__(self, f):
330 if f in self._newfiles:
330 if f in self._newfiles:
331 return
331 return
332 fl = encoding.lower(f)
332 fl = encoding.lower(f)
333 if fl in self._loweredfiles and f not in self._dirstate:
333 if fl in self._loweredfiles and f not in self._dirstate:
334 msg = _('possible case-folding collision for %s') % f
334 msg = _('possible case-folding collision for %s') % f
335 if self._abort:
335 if self._abort:
336 raise error.Abort(msg)
336 raise error.Abort(msg)
337 self._ui.warn(_("warning: %s\n") % msg)
337 self._ui.warn(_("warning: %s\n") % msg)
338 self._loweredfiles.add(fl)
338 self._loweredfiles.add(fl)
339 self._newfiles.add(f)
339 self._newfiles.add(f)
340
340
341 def filteredhash(repo, maxrev):
341 def filteredhash(repo, maxrev):
342 """build hash of filtered revisions in the current repoview.
342 """build hash of filtered revisions in the current repoview.
343
343
344 Multiple caches perform up-to-date validation by checking that the
344 Multiple caches perform up-to-date validation by checking that the
345 tiprev and tipnode stored in the cache file match the current repository.
345 tiprev and tipnode stored in the cache file match the current repository.
346 However, this is not sufficient for validating repoviews because the set
346 However, this is not sufficient for validating repoviews because the set
347 of revisions in the view may change without the repository tiprev and
347 of revisions in the view may change without the repository tiprev and
348 tipnode changing.
348 tipnode changing.
349
349
350 This function hashes all the revs filtered from the view and returns
350 This function hashes all the revs filtered from the view and returns
351 that SHA-1 digest.
351 that SHA-1 digest.
352 """
352 """
353 cl = repo.changelog
353 cl = repo.changelog
354 if not cl.filteredrevs:
354 if not cl.filteredrevs:
355 return None
355 return None
356 key = None
356 key = None
357 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
357 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
358 if revs:
358 if revs:
359 s = hashlib.sha1()
359 s = hashlib.sha1()
360 for rev in revs:
360 for rev in revs:
361 s.update('%d;' % rev)
361 s.update('%d;' % rev)
362 key = s.digest()
362 key = s.digest()
363 return key
363 return key
364
364
365 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
365 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
366 '''yield every hg repository under path, always recursively.
366 '''yield every hg repository under path, always recursively.
367 The recurse flag will only control recursion into repo working dirs'''
367 The recurse flag will only control recursion into repo working dirs'''
368 def errhandler(err):
368 def errhandler(err):
369 if err.filename == path:
369 if err.filename == path:
370 raise err
370 raise err
371 samestat = getattr(os.path, 'samestat', None)
371 samestat = getattr(os.path, 'samestat', None)
372 if followsym and samestat is not None:
372 if followsym and samestat is not None:
373 def adddir(dirlst, dirname):
373 def adddir(dirlst, dirname):
374 dirstat = os.stat(dirname)
374 dirstat = os.stat(dirname)
375 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
375 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
376 if not match:
376 if not match:
377 dirlst.append(dirstat)
377 dirlst.append(dirstat)
378 return not match
378 return not match
379 else:
379 else:
380 followsym = False
380 followsym = False
381
381
382 if (seen_dirs is None) and followsym:
382 if (seen_dirs is None) and followsym:
383 seen_dirs = []
383 seen_dirs = []
384 adddir(seen_dirs, path)
384 adddir(seen_dirs, path)
385 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
385 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
386 dirs.sort()
386 dirs.sort()
387 if '.hg' in dirs:
387 if '.hg' in dirs:
388 yield root # found a repository
388 yield root # found a repository
389 qroot = os.path.join(root, '.hg', 'patches')
389 qroot = os.path.join(root, '.hg', 'patches')
390 if os.path.isdir(os.path.join(qroot, '.hg')):
390 if os.path.isdir(os.path.join(qroot, '.hg')):
391 yield qroot # we have a patch queue repo here
391 yield qroot # we have a patch queue repo here
392 if recurse:
392 if recurse:
393 # avoid recursing inside the .hg directory
393 # avoid recursing inside the .hg directory
394 dirs.remove('.hg')
394 dirs.remove('.hg')
395 else:
395 else:
396 dirs[:] = [] # don't descend further
396 dirs[:] = [] # don't descend further
397 elif followsym:
397 elif followsym:
398 newdirs = []
398 newdirs = []
399 for d in dirs:
399 for d in dirs:
400 fname = os.path.join(root, d)
400 fname = os.path.join(root, d)
401 if adddir(seen_dirs, fname):
401 if adddir(seen_dirs, fname):
402 if os.path.islink(fname):
402 if os.path.islink(fname):
403 for hgname in walkrepos(fname, True, seen_dirs):
403 for hgname in walkrepos(fname, True, seen_dirs):
404 yield hgname
404 yield hgname
405 else:
405 else:
406 newdirs.append(d)
406 newdirs.append(d)
407 dirs[:] = newdirs
407 dirs[:] = newdirs
408
408
409 def binnode(ctx):
409 def binnode(ctx):
410 """Return binary node id for a given basectx"""
410 """Return binary node id for a given basectx"""
411 node = ctx.node()
411 node = ctx.node()
412 if node is None:
412 if node is None:
413 return wdirid
413 return wdirid
414 return node
414 return node
415
415
416 def intrev(ctx):
416 def intrev(ctx):
417 """Return integer for a given basectx that can be used in comparison or
417 """Return integer for a given basectx that can be used in comparison or
418 arithmetic operation"""
418 arithmetic operation"""
419 rev = ctx.rev()
419 rev = ctx.rev()
420 if rev is None:
420 if rev is None:
421 return wdirrev
421 return wdirrev
422 return rev
422 return rev
423
423
424 def formatchangeid(ctx):
424 def formatchangeid(ctx):
425 """Format changectx as '{rev}:{node|formatnode}', which is the default
425 """Format changectx as '{rev}:{node|formatnode}', which is the default
426 template provided by logcmdutil.changesettemplater"""
426 template provided by logcmdutil.changesettemplater"""
427 repo = ctx.repo()
427 repo = ctx.repo()
428 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
428 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
429
429
430 def formatrevnode(ui, rev, node):
430 def formatrevnode(ui, rev, node):
431 """Format given revision and node depending on the current verbosity"""
431 """Format given revision and node depending on the current verbosity"""
432 if ui.debugflag:
432 if ui.debugflag:
433 hexfunc = hex
433 hexfunc = hex
434 else:
434 else:
435 hexfunc = short
435 hexfunc = short
436 return '%d:%s' % (rev, hexfunc(node))
436 return '%d:%s' % (rev, hexfunc(node))
437
437
438 def resolvehexnodeidprefix(repo, prefix):
438 def resolvehexnodeidprefix(repo, prefix):
439 if (prefix.startswith('x') and
439 if (prefix.startswith('x') and
440 repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
440 repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
441 prefix = prefix[1:]
441 prefix = prefix[1:]
442 try:
442 try:
443 # Uses unfiltered repo because it's faster when prefix is ambiguous/
443 # Uses unfiltered repo because it's faster when prefix is ambiguous/
444 # This matches the shortesthexnodeidprefix() function below.
444 # This matches the shortesthexnodeidprefix() function below.
445 node = repo.unfiltered().changelog._partialmatch(prefix)
445 node = repo.unfiltered().changelog._partialmatch(prefix)
446 except error.AmbiguousPrefixLookupError:
446 except error.AmbiguousPrefixLookupError:
447 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
447 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
448 if revset:
448 if revset:
449 # Clear config to avoid infinite recursion
449 # Clear config to avoid infinite recursion
450 configoverrides = {('experimental',
450 configoverrides = {('experimental',
451 'revisions.disambiguatewithin'): None}
451 'revisions.disambiguatewithin'): None}
452 with repo.ui.configoverride(configoverrides):
452 with repo.ui.configoverride(configoverrides):
453 revs = repo.anyrevs([revset], user=True)
453 revs = repo.anyrevs([revset], user=True)
454 matches = []
454 matches = []
455 for rev in revs:
455 for rev in revs:
456 node = repo.changelog.node(rev)
456 node = repo.changelog.node(rev)
457 if hex(node).startswith(prefix):
457 if hex(node).startswith(prefix):
458 matches.append(node)
458 matches.append(node)
459 if len(matches) == 1:
459 if len(matches) == 1:
460 return matches[0]
460 return matches[0]
461 raise
461 raise
462 if node is None:
462 if node is None:
463 return
463 return
464 repo.changelog.rev(node) # make sure node isn't filtered
464 repo.changelog.rev(node) # make sure node isn't filtered
465 return node
465 return node
466
466
467 def mayberevnum(repo, prefix):
467 def mayberevnum(repo, prefix):
468 """Checks if the given prefix may be mistaken for a revision number"""
468 """Checks if the given prefix may be mistaken for a revision number"""
469 try:
469 try:
470 i = int(prefix)
470 i = int(prefix)
471 # if we are a pure int, then starting with zero will not be
471 # if we are a pure int, then starting with zero will not be
472 # confused as a rev; or, obviously, if the int is larger
472 # confused as a rev; or, obviously, if the int is larger
473 # than the value of the tip rev. We still need to disambiguate if
473 # than the value of the tip rev. We still need to disambiguate if
474 # prefix == '0', since that *is* a valid revnum.
474 # prefix == '0', since that *is* a valid revnum.
475 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
475 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
476 return False
476 return False
477 return True
477 return True
478 except ValueError:
478 except ValueError:
479 return False
479 return False
480
480
481 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
481 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
482 """Find the shortest unambiguous prefix that matches hexnode.
482 """Find the shortest unambiguous prefix that matches hexnode.
483
483
484 If "cache" is not None, it must be a dictionary that can be used for
484 If "cache" is not None, it must be a dictionary that can be used for
485 caching between calls to this method.
485 caching between calls to this method.
486 """
486 """
487 # _partialmatch() of filtered changelog could take O(len(repo)) time,
487 # _partialmatch() of filtered changelog could take O(len(repo)) time,
488 # which would be unacceptably slow. so we look for hash collision in
488 # which would be unacceptably slow. so we look for hash collision in
489 # unfiltered space, which means some hashes may be slightly longer.
489 # unfiltered space, which means some hashes may be slightly longer.
490
490
491 minlength=max(minlength, 1)
491 minlength=max(minlength, 1)
492
492
493 def disambiguate(prefix):
493 def disambiguate(prefix):
494 """Disambiguate against revnums."""
494 """Disambiguate against revnums."""
495 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
495 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
496 if mayberevnum(repo, prefix):
496 if mayberevnum(repo, prefix):
497 return 'x' + prefix
497 return 'x' + prefix
498 else:
498 else:
499 return prefix
499 return prefix
500
500
501 hexnode = hex(node)
501 hexnode = hex(node)
502 for length in range(len(prefix), len(hexnode) + 1):
502 for length in range(len(prefix), len(hexnode) + 1):
503 prefix = hexnode[:length]
503 prefix = hexnode[:length]
504 if not mayberevnum(repo, prefix):
504 if not mayberevnum(repo, prefix):
505 return prefix
505 return prefix
506
506
507 cl = repo.unfiltered().changelog
507 cl = repo.unfiltered().changelog
508 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
508 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
509 if revset:
509 if revset:
510 revs = None
510 revs = None
511 if cache is not None:
511 if cache is not None:
512 revs = cache.get('disambiguationrevset')
512 revs = cache.get('disambiguationrevset')
513 if revs is None:
513 if revs is None:
514 revs = repo.anyrevs([revset], user=True)
514 revs = repo.anyrevs([revset], user=True)
515 if cache is not None:
515 if cache is not None:
516 cache['disambiguationrevset'] = revs
516 cache['disambiguationrevset'] = revs
517 if cl.rev(node) in revs:
517 if cl.rev(node) in revs:
518 hexnode = hex(node)
518 hexnode = hex(node)
519 nodetree = None
519 nodetree = None
520 if cache is not None:
520 if cache is not None:
521 nodetree = cache.get('disambiguationnodetree')
521 nodetree = cache.get('disambiguationnodetree')
522 if not nodetree:
522 if not nodetree:
523 try:
523 try:
524 nodetree = parsers.nodetree(cl.index, len(revs))
524 nodetree = parsers.nodetree(cl.index, len(revs))
525 except AttributeError:
525 except AttributeError:
526 # no native nodetree
526 # no native nodetree
527 pass
527 pass
528 else:
528 else:
529 for r in revs:
529 for r in revs:
530 nodetree.insert(r)
530 nodetree.insert(r)
531 if cache is not None:
531 if cache is not None:
532 cache['disambiguationnodetree'] = nodetree
532 cache['disambiguationnodetree'] = nodetree
533 if nodetree is not None:
533 if nodetree is not None:
534 length = max(nodetree.shortest(node), minlength)
534 length = max(nodetree.shortest(node), minlength)
535 prefix = hexnode[:length]
535 prefix = hexnode[:length]
536 return disambiguate(prefix)
536 return disambiguate(prefix)
537 for length in range(minlength, len(hexnode) + 1):
537 for length in range(minlength, len(hexnode) + 1):
538 matches = []
538 matches = []
539 prefix = hexnode[:length]
539 prefix = hexnode[:length]
540 for rev in revs:
540 for rev in revs:
541 otherhexnode = repo[rev].hex()
541 otherhexnode = repo[rev].hex()
542 if prefix == otherhexnode[:length]:
542 if prefix == otherhexnode[:length]:
543 matches.append(otherhexnode)
543 matches.append(otherhexnode)
544 if len(matches) == 1:
544 if len(matches) == 1:
545 return disambiguate(prefix)
545 return disambiguate(prefix)
546
546
547 try:
547 try:
548 return disambiguate(cl.shortest(node, minlength))
548 return disambiguate(cl.shortest(node, minlength))
549 except error.LookupError:
549 except error.LookupError:
550 raise error.RepoLookupError()
550 raise error.RepoLookupError()
551
551
552 def isrevsymbol(repo, symbol):
552 def isrevsymbol(repo, symbol):
553 """Checks if a symbol exists in the repo.
553 """Checks if a symbol exists in the repo.
554
554
555 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
555 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
556 symbol is an ambiguous nodeid prefix.
556 symbol is an ambiguous nodeid prefix.
557 """
557 """
558 try:
558 try:
559 revsymbol(repo, symbol)
559 revsymbol(repo, symbol)
560 return True
560 return True
561 except error.RepoLookupError:
561 except error.RepoLookupError:
562 return False
562 return False
563
563
564 def revsymbol(repo, symbol):
564 def revsymbol(repo, symbol):
565 """Returns a context given a single revision symbol (as string).
565 """Returns a context given a single revision symbol (as string).
566
566
567 This is similar to revsingle(), but accepts only a single revision symbol,
567 This is similar to revsingle(), but accepts only a single revision symbol,
568 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
568 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
569 not "max(public())".
569 not "max(public())".
570 """
570 """
571 if not isinstance(symbol, bytes):
571 if not isinstance(symbol, bytes):
572 msg = ("symbol (%s of type %s) was not a string, did you mean "
572 msg = ("symbol (%s of type %s) was not a string, did you mean "
573 "repo[symbol]?" % (symbol, type(symbol)))
573 "repo[symbol]?" % (symbol, type(symbol)))
574 raise error.ProgrammingError(msg)
574 raise error.ProgrammingError(msg)
575 try:
575 try:
576 if symbol in ('.', 'tip', 'null'):
576 if symbol in ('.', 'tip', 'null'):
577 return repo[symbol]
577 return repo[symbol]
578
578
579 try:
579 try:
580 r = int(symbol)
580 r = int(symbol)
581 if '%d' % r != symbol:
581 if '%d' % r != symbol:
582 raise ValueError
582 raise ValueError
583 l = len(repo.changelog)
583 l = len(repo.changelog)
584 if r < 0:
584 if r < 0:
585 r += l
585 r += l
586 if r < 0 or r >= l and r != wdirrev:
586 if r < 0 or r >= l and r != wdirrev:
587 raise ValueError
587 raise ValueError
588 return repo[r]
588 return repo[r]
589 except error.FilteredIndexError:
589 except error.FilteredIndexError:
590 raise
590 raise
591 except (ValueError, OverflowError, IndexError):
591 except (ValueError, OverflowError, IndexError):
592 pass
592 pass
593
593
594 if len(symbol) == 40:
594 if len(symbol) == 40:
595 try:
595 try:
596 node = bin(symbol)
596 node = bin(symbol)
597 rev = repo.changelog.rev(node)
597 rev = repo.changelog.rev(node)
598 return repo[rev]
598 return repo[rev]
599 except error.FilteredLookupError:
599 except error.FilteredLookupError:
600 raise
600 raise
601 except (TypeError, LookupError):
601 except (TypeError, LookupError):
602 pass
602 pass
603
603
604 # look up bookmarks through the name interface
604 # look up bookmarks through the name interface
605 try:
605 try:
606 node = repo.names.singlenode(repo, symbol)
606 node = repo.names.singlenode(repo, symbol)
607 rev = repo.changelog.rev(node)
607 rev = repo.changelog.rev(node)
608 return repo[rev]
608 return repo[rev]
609 except KeyError:
609 except KeyError:
610 pass
610 pass
611
611
612 node = resolvehexnodeidprefix(repo, symbol)
612 node = resolvehexnodeidprefix(repo, symbol)
613 if node is not None:
613 if node is not None:
614 rev = repo.changelog.rev(node)
614 rev = repo.changelog.rev(node)
615 return repo[rev]
615 return repo[rev]
616
616
617 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
617 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
618
618
619 except error.WdirUnsupported:
619 except error.WdirUnsupported:
620 return repo[None]
620 return repo[None]
621 except (error.FilteredIndexError, error.FilteredLookupError,
621 except (error.FilteredIndexError, error.FilteredLookupError,
622 error.FilteredRepoLookupError):
622 error.FilteredRepoLookupError):
623 raise _filterederror(repo, symbol)
623 raise _filterederror(repo, symbol)
624
624
625 def _filterederror(repo, changeid):
625 def _filterederror(repo, changeid):
626 """build an exception to be raised about a filtered changeid
626 """build an exception to be raised about a filtered changeid
627
627
628 This is extracted in a function to help extensions (eg: evolve) to
628 This is extracted in a function to help extensions (eg: evolve) to
629 experiment with various message variants."""
629 experiment with various message variants."""
630 if repo.filtername.startswith('visible'):
630 if repo.filtername.startswith('visible'):
631
631
632 # Check if the changeset is obsolete
632 # Check if the changeset is obsolete
633 unfilteredrepo = repo.unfiltered()
633 unfilteredrepo = repo.unfiltered()
634 ctx = revsymbol(unfilteredrepo, changeid)
634 ctx = revsymbol(unfilteredrepo, changeid)
635
635
636 # If the changeset is obsolete, enrich the message with the reason
636 # If the changeset is obsolete, enrich the message with the reason
637 # that made this changeset not visible
637 # that made this changeset not visible
638 if ctx.obsolete():
638 if ctx.obsolete():
639 msg = obsutil._getfilteredreason(repo, changeid, ctx)
639 msg = obsutil._getfilteredreason(repo, changeid, ctx)
640 else:
640 else:
641 msg = _("hidden revision '%s'") % changeid
641 msg = _("hidden revision '%s'") % changeid
642
642
643 hint = _('use --hidden to access hidden revisions')
643 hint = _('use --hidden to access hidden revisions')
644
644
645 return error.FilteredRepoLookupError(msg, hint=hint)
645 return error.FilteredRepoLookupError(msg, hint=hint)
646 msg = _("filtered revision '%s' (not in '%s' subset)")
646 msg = _("filtered revision '%s' (not in '%s' subset)")
647 msg %= (changeid, repo.filtername)
647 msg %= (changeid, repo.filtername)
648 return error.FilteredRepoLookupError(msg)
648 return error.FilteredRepoLookupError(msg)
649
649
650 def revsingle(repo, revspec, default='.', localalias=None):
650 def revsingle(repo, revspec, default='.', localalias=None):
651 if not revspec and revspec != 0:
651 if not revspec and revspec != 0:
652 return repo[default]
652 return repo[default]
653
653
654 l = revrange(repo, [revspec], localalias=localalias)
654 l = revrange(repo, [revspec], localalias=localalias)
655 if not l:
655 if not l:
656 raise error.Abort(_('empty revision set'))
656 raise error.Abort(_('empty revision set'))
657 return repo[l.last()]
657 return repo[l.last()]
658
658
659 def _pairspec(revspec):
659 def _pairspec(revspec):
660 tree = revsetlang.parse(revspec)
660 tree = revsetlang.parse(revspec)
661 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
661 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
662
662
663 def revpair(repo, revs):
663 def revpair(repo, revs):
664 if not revs:
664 if not revs:
665 return repo['.'], repo[None]
665 return repo['.'], repo[None]
666
666
667 l = revrange(repo, revs)
667 l = revrange(repo, revs)
668
668
669 if not l:
669 if not l:
670 raise error.Abort(_('empty revision range'))
670 raise error.Abort(_('empty revision range'))
671
671
672 first = l.first()
672 first = l.first()
673 second = l.last()
673 second = l.last()
674
674
675 if (first == second and len(revs) >= 2
675 if (first == second and len(revs) >= 2
676 and not all(revrange(repo, [r]) for r in revs)):
676 and not all(revrange(repo, [r]) for r in revs)):
677 raise error.Abort(_('empty revision on one side of range'))
677 raise error.Abort(_('empty revision on one side of range'))
678
678
679 # if top-level is range expression, the result must always be a pair
679 # if top-level is range expression, the result must always be a pair
680 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
680 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
681 return repo[first], repo[None]
681 return repo[first], repo[None]
682
682
683 return repo[first], repo[second]
683 return repo[first], repo[second]
684
684
685 def revrange(repo, specs, localalias=None):
685 def revrange(repo, specs, localalias=None):
686 """Execute 1 to many revsets and return the union.
686 """Execute 1 to many revsets and return the union.
687
687
688 This is the preferred mechanism for executing revsets using user-specified
688 This is the preferred mechanism for executing revsets using user-specified
689 config options, such as revset aliases.
689 config options, such as revset aliases.
690
690
691 The revsets specified by ``specs`` will be executed via a chained ``OR``
691 The revsets specified by ``specs`` will be executed via a chained ``OR``
692 expression. If ``specs`` is empty, an empty result is returned.
692 expression. If ``specs`` is empty, an empty result is returned.
693
693
694 ``specs`` can contain integers, in which case they are assumed to be
694 ``specs`` can contain integers, in which case they are assumed to be
695 revision numbers.
695 revision numbers.
696
696
697 It is assumed the revsets are already formatted. If you have arguments
697 It is assumed the revsets are already formatted. If you have arguments
698 that need to be expanded in the revset, call ``revsetlang.formatspec()``
698 that need to be expanded in the revset, call ``revsetlang.formatspec()``
699 and pass the result as an element of ``specs``.
699 and pass the result as an element of ``specs``.
700
700
701 Specifying a single revset is allowed.
701 Specifying a single revset is allowed.
702
702
703 Returns a ``revset.abstractsmartset`` which is a list-like interface over
703 Returns a ``revset.abstractsmartset`` which is a list-like interface over
704 integer revisions.
704 integer revisions.
705 """
705 """
706 allspecs = []
706 allspecs = []
707 for spec in specs:
707 for spec in specs:
708 if isinstance(spec, int):
708 if isinstance(spec, int):
709 spec = revsetlang.formatspec('%d', spec)
709 spec = revsetlang.formatspec('%d', spec)
710 allspecs.append(spec)
710 allspecs.append(spec)
711 return repo.anyrevs(allspecs, user=True, localalias=localalias)
711 return repo.anyrevs(allspecs, user=True, localalias=localalias)
712
712
713 def meaningfulparents(repo, ctx):
713 def meaningfulparents(repo, ctx):
714 """Return list of meaningful (or all if debug) parentrevs for rev.
714 """Return list of meaningful (or all if debug) parentrevs for rev.
715
715
716 For merges (two non-nullrev revisions) both parents are meaningful.
716 For merges (two non-nullrev revisions) both parents are meaningful.
717 Otherwise the first parent revision is considered meaningful if it
717 Otherwise the first parent revision is considered meaningful if it
718 is not the preceding revision.
718 is not the preceding revision.
719 """
719 """
720 parents = ctx.parents()
720 parents = ctx.parents()
721 if len(parents) > 1:
721 if len(parents) > 1:
722 return parents
722 return parents
723 if repo.ui.debugflag:
723 if repo.ui.debugflag:
724 return [parents[0], repo[nullrev]]
724 return [parents[0], repo[nullrev]]
725 if parents[0].rev() >= intrev(ctx) - 1:
725 if parents[0].rev() >= intrev(ctx) - 1:
726 return []
726 return []
727 return parents
727 return parents
728
728
729 def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None):
729 def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None):
730 """Return a function that produced paths for presenting to the user.
730 """Return a function that produced paths for presenting to the user.
731
731
732 The returned function takes a repo-relative path and produces a path
732 The returned function takes a repo-relative path and produces a path
733 that can be presented in the UI.
733 that can be presented in the UI.
734
734
735 Depending on the value of ui.relative-paths, either a repo-relative or
735 Depending on the value of ui.relative-paths, either a repo-relative or
736 cwd-relative path will be produced.
736 cwd-relative path will be produced.
737
737
738 legacyrelativevalue is the value to use if ui.relative-paths=legacy
738 legacyrelativevalue is the value to use if ui.relative-paths=legacy
739
739
740 If forcerelativevalue is not None, then that value will be used regardless
740 If forcerelativevalue is not None, then that value will be used regardless
741 of what ui.relative-paths is set to.
741 of what ui.relative-paths is set to.
742 """
742 """
743 if forcerelativevalue is not None:
743 if forcerelativevalue is not None:
744 relative = forcerelativevalue
744 relative = forcerelativevalue
745 else:
745 else:
746 config = repo.ui.config('ui', 'relative-paths')
746 config = repo.ui.config('ui', 'relative-paths')
747 if config == 'legacy':
747 if config == 'legacy':
748 relative = legacyrelativevalue
748 relative = legacyrelativevalue
749 else:
749 else:
750 relative = stringutil.parsebool(config)
750 relative = stringutil.parsebool(config)
751 if relative is None:
751 if relative is None:
752 raise error.ConfigError(
752 raise error.ConfigError(
753 _("ui.relative-paths is not a boolean ('%s')") % config)
753 _("ui.relative-paths is not a boolean ('%s')") % config)
754
754
755 if relative:
755 if relative:
756 cwd = repo.getcwd()
756 cwd = repo.getcwd()
757 pathto = repo.pathto
757 pathto = repo.pathto
758 return lambda f: pathto(f, cwd)
758 return lambda f: pathto(f, cwd)
759 elif repo.ui.configbool('ui', 'slash'):
759 elif repo.ui.configbool('ui', 'slash'):
760 return lambda f: f
760 return lambda f: f
761 else:
761 else:
762 return util.localpath
762 return util.localpath
763
763
764 def subdiruipathfn(subpath, uipathfn):
764 def subdiruipathfn(subpath, uipathfn):
765 '''Create a new uipathfn that treats the file as relative to subpath.'''
765 '''Create a new uipathfn that treats the file as relative to subpath.'''
766 return lambda f: uipathfn(posixpath.join(subpath, f))
766 return lambda f: uipathfn(posixpath.join(subpath, f))
767
767
768 def anypats(pats, opts):
768 def anypats(pats, opts):
769 '''Checks if any patterns, including --include and --exclude were given.
769 '''Checks if any patterns, including --include and --exclude were given.
770
770
771 Some commands (e.g. addremove) use this condition for deciding whether to
771 Some commands (e.g. addremove) use this condition for deciding whether to
772 print absolute or relative paths.
772 print absolute or relative paths.
773 '''
773 '''
774 return bool(pats or opts.get('include') or opts.get('exclude'))
774 return bool(pats or opts.get('include') or opts.get('exclude'))
775
775
776 def expandpats(pats):
776 def expandpats(pats):
777 '''Expand bare globs when running on windows.
777 '''Expand bare globs when running on windows.
778 On posix we assume it already has already been done by sh.'''
778 On posix we assume it already has already been done by sh.'''
779 if not util.expandglobs:
779 if not util.expandglobs:
780 return list(pats)
780 return list(pats)
781 ret = []
781 ret = []
782 for kindpat in pats:
782 for kindpat in pats:
783 kind, pat = matchmod._patsplit(kindpat, None)
783 kind, pat = matchmod._patsplit(kindpat, None)
784 if kind is None:
784 if kind is None:
785 try:
785 try:
786 globbed = glob.glob(pat)
786 globbed = glob.glob(pat)
787 except re.error:
787 except re.error:
788 globbed = [pat]
788 globbed = [pat]
789 if globbed:
789 if globbed:
790 ret.extend(globbed)
790 ret.extend(globbed)
791 continue
791 continue
792 ret.append(kindpat)
792 ret.append(kindpat)
793 return ret
793 return ret
794
794
795 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
795 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
796 badfn=None):
796 badfn=None):
797 '''Return a matcher and the patterns that were used.
797 '''Return a matcher and the patterns that were used.
798 The matcher will warn about bad matches, unless an alternate badfn callback
798 The matcher will warn about bad matches, unless an alternate badfn callback
799 is provided.'''
799 is provided.'''
800 if opts is None:
800 if opts is None:
801 opts = {}
801 opts = {}
802 if not globbed and default == 'relpath':
802 if not globbed and default == 'relpath':
803 pats = expandpats(pats or [])
803 pats = expandpats(pats or [])
804
804
805 uipathfn = getuipathfn(ctx.repo(), legacyrelativevalue=True)
805 uipathfn = getuipathfn(ctx.repo(), legacyrelativevalue=True)
806 def bad(f, msg):
806 def bad(f, msg):
807 ctx.repo().ui.warn("%s: %s\n" % (uipathfn(f), msg))
807 ctx.repo().ui.warn("%s: %s\n" % (uipathfn(f), msg))
808
808
809 if badfn is None:
809 if badfn is None:
810 badfn = bad
810 badfn = bad
811
811
812 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
812 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
813 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
813 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
814
814
815 if m.always():
815 if m.always():
816 pats = []
816 pats = []
817 return m, pats
817 return m, pats
818
818
819 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
819 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
820 badfn=None):
820 badfn=None):
821 '''Return a matcher that will warn about bad matches.'''
821 '''Return a matcher that will warn about bad matches.'''
822 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
822 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
823
823
824 def matchall(repo):
824 def matchall(repo):
825 '''Return a matcher that will efficiently match everything.'''
825 '''Return a matcher that will efficiently match everything.'''
826 return matchmod.always()
826 return matchmod.always()
827
827
828 def matchfiles(repo, files, badfn=None):
828 def matchfiles(repo, files, badfn=None):
829 '''Return a matcher that will efficiently match exactly these files.'''
829 '''Return a matcher that will efficiently match exactly these files.'''
830 return matchmod.exact(files, badfn=badfn)
830 return matchmod.exact(files, badfn=badfn)
831
831
832 def parsefollowlinespattern(repo, rev, pat, msg):
832 def parsefollowlinespattern(repo, rev, pat, msg):
833 """Return a file name from `pat` pattern suitable for usage in followlines
833 """Return a file name from `pat` pattern suitable for usage in followlines
834 logic.
834 logic.
835 """
835 """
836 if not matchmod.patkind(pat):
836 if not matchmod.patkind(pat):
837 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
837 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
838 else:
838 else:
839 ctx = repo[rev]
839 ctx = repo[rev]
840 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
840 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
841 files = [f for f in ctx if m(f)]
841 files = [f for f in ctx if m(f)]
842 if len(files) != 1:
842 if len(files) != 1:
843 raise error.ParseError(msg)
843 raise error.ParseError(msg)
844 return files[0]
844 return files[0]
845
845
846 def getorigvfs(ui, repo):
846 def getorigvfs(ui, repo):
847 """return a vfs suitable to save 'orig' file
847 """return a vfs suitable to save 'orig' file
848
848
849 return None if no special directory is configured"""
849 return None if no special directory is configured"""
850 origbackuppath = ui.config('ui', 'origbackuppath')
850 origbackuppath = ui.config('ui', 'origbackuppath')
851 if not origbackuppath:
851 if not origbackuppath:
852 return None
852 return None
853 return vfs.vfs(repo.wvfs.join(origbackuppath))
853 return vfs.vfs(repo.wvfs.join(origbackuppath))
854
854
855 def backuppath(ui, repo, filepath):
855 def backuppath(ui, repo, filepath):
856 '''customize where working copy backup files (.orig files) are created
856 '''customize where working copy backup files (.orig files) are created
857
857
858 Fetch user defined path from config file: [ui] origbackuppath = <path>
858 Fetch user defined path from config file: [ui] origbackuppath = <path>
859 Fall back to default (filepath with .orig suffix) if not specified
859 Fall back to default (filepath with .orig suffix) if not specified
860
860
861 filepath is repo-relative
861 filepath is repo-relative
862
862
863 Returns an absolute path
863 Returns an absolute path
864 '''
864 '''
865 origvfs = getorigvfs(ui, repo)
865 origvfs = getorigvfs(ui, repo)
866 if origvfs is None:
866 if origvfs is None:
867 return repo.wjoin(filepath + ".orig")
867 return repo.wjoin(filepath + ".orig")
868
868
869 origbackupdir = origvfs.dirname(filepath)
869 origbackupdir = origvfs.dirname(filepath)
870 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
870 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
871 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
871 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
872
872
873 # Remove any files that conflict with the backup file's path
873 # Remove any files that conflict with the backup file's path
874 for f in reversed(list(util.finddirs(filepath))):
874 for f in reversed(list(util.finddirs(filepath))):
875 if origvfs.isfileorlink(f):
875 if origvfs.isfileorlink(f):
876 ui.note(_('removing conflicting file: %s\n')
876 ui.note(_('removing conflicting file: %s\n')
877 % origvfs.join(f))
877 % origvfs.join(f))
878 origvfs.unlink(f)
878 origvfs.unlink(f)
879 break
879 break
880
880
881 origvfs.makedirs(origbackupdir)
881 origvfs.makedirs(origbackupdir)
882
882
883 if origvfs.isdir(filepath) and not origvfs.islink(filepath):
883 if origvfs.isdir(filepath) and not origvfs.islink(filepath):
884 ui.note(_('removing conflicting directory: %s\n')
884 ui.note(_('removing conflicting directory: %s\n')
885 % origvfs.join(filepath))
885 % origvfs.join(filepath))
886 origvfs.rmtree(filepath, forcibly=True)
886 origvfs.rmtree(filepath, forcibly=True)
887
887
888 return origvfs.join(filepath)
888 return origvfs.join(filepath)
889
889
890 class _containsnode(object):
890 class _containsnode(object):
891 """proxy __contains__(node) to container.__contains__ which accepts revs"""
891 """proxy __contains__(node) to container.__contains__ which accepts revs"""
892
892
893 def __init__(self, repo, revcontainer):
893 def __init__(self, repo, revcontainer):
894 self._torev = repo.changelog.rev
894 self._torev = repo.changelog.rev
895 self._revcontains = revcontainer.__contains__
895 self._revcontains = revcontainer.__contains__
896
896
897 def __contains__(self, node):
897 def __contains__(self, node):
898 return self._revcontains(self._torev(node))
898 return self._revcontains(self._torev(node))
899
899
900 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
900 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
901 fixphase=False, targetphase=None, backup=True):
901 fixphase=False, targetphase=None, backup=True):
902 """do common cleanups when old nodes are replaced by new nodes
902 """do common cleanups when old nodes are replaced by new nodes
903
903
904 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
904 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
905 (we might also want to move working directory parent in the future)
905 (we might also want to move working directory parent in the future)
906
906
907 By default, bookmark moves are calculated automatically from 'replacements',
907 By default, bookmark moves are calculated automatically from 'replacements',
908 but 'moves' can be used to override that. Also, 'moves' may include
908 but 'moves' can be used to override that. Also, 'moves' may include
909 additional bookmark moves that should not have associated obsmarkers.
909 additional bookmark moves that should not have associated obsmarkers.
910
910
911 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
911 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
912 have replacements. operation is a string, like "rebase".
912 have replacements. operation is a string, like "rebase".
913
913
914 metadata is dictionary containing metadata to be stored in obsmarker if
914 metadata is dictionary containing metadata to be stored in obsmarker if
915 obsolescence is enabled.
915 obsolescence is enabled.
916 """
916 """
917 assert fixphase or targetphase is None
917 assert fixphase or targetphase is None
918 if not replacements and not moves:
918 if not replacements and not moves:
919 return
919 return
920
920
921 # translate mapping's other forms
921 # translate mapping's other forms
922 if not util.safehasattr(replacements, 'items'):
922 if not util.safehasattr(replacements, 'items'):
923 replacements = {(n,): () for n in replacements}
923 replacements = {(n,): () for n in replacements}
924 else:
924 else:
925 # upgrading non tuple "source" to tuple ones for BC
925 # upgrading non tuple "source" to tuple ones for BC
926 repls = {}
926 repls = {}
927 for key, value in replacements.items():
927 for key, value in replacements.items():
928 if not isinstance(key, tuple):
928 if not isinstance(key, tuple):
929 key = (key,)
929 key = (key,)
930 repls[key] = value
930 repls[key] = value
931 replacements = repls
931 replacements = repls
932
932
933 # Unfiltered repo is needed since nodes in replacements might be hidden.
933 # Unfiltered repo is needed since nodes in replacements might be hidden.
934 unfi = repo.unfiltered()
934 unfi = repo.unfiltered()
935
935
936 # Calculate bookmark movements
936 # Calculate bookmark movements
937 if moves is None:
937 if moves is None:
938 moves = {}
938 moves = {}
939 for oldnodes, newnodes in replacements.items():
939 for oldnodes, newnodes in replacements.items():
940 for oldnode in oldnodes:
940 for oldnode in oldnodes:
941 if oldnode in moves:
941 if oldnode in moves:
942 continue
942 continue
943 if len(newnodes) > 1:
943 if len(newnodes) > 1:
944 # usually a split, take the one with biggest rev number
944 # usually a split, take the one with biggest rev number
945 newnode = next(unfi.set('max(%ln)', newnodes)).node()
945 newnode = next(unfi.set('max(%ln)', newnodes)).node()
946 elif len(newnodes) == 0:
946 elif len(newnodes) == 0:
947 # move bookmark backwards
947 # move bookmark backwards
948 allreplaced = []
948 allreplaced = []
949 for rep in replacements:
949 for rep in replacements:
950 allreplaced.extend(rep)
950 allreplaced.extend(rep)
951 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
951 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
952 allreplaced))
952 allreplaced))
953 if roots:
953 if roots:
954 newnode = roots[0].node()
954 newnode = roots[0].node()
955 else:
955 else:
956 newnode = nullid
956 newnode = nullid
957 else:
957 else:
958 newnode = newnodes[0]
958 newnode = newnodes[0]
959 moves[oldnode] = newnode
959 moves[oldnode] = newnode
960
960
961 allnewnodes = [n for ns in replacements.values() for n in ns]
961 allnewnodes = [n for ns in replacements.values() for n in ns]
962 toretract = {}
962 toretract = {}
963 toadvance = {}
963 toadvance = {}
964 if fixphase:
964 if fixphase:
965 precursors = {}
965 precursors = {}
966 for oldnodes, newnodes in replacements.items():
966 for oldnodes, newnodes in replacements.items():
967 for oldnode in oldnodes:
967 for oldnode in oldnodes:
968 for newnode in newnodes:
968 for newnode in newnodes:
969 precursors.setdefault(newnode, []).append(oldnode)
969 precursors.setdefault(newnode, []).append(oldnode)
970
970
971 allnewnodes.sort(key=lambda n: unfi[n].rev())
971 allnewnodes.sort(key=lambda n: unfi[n].rev())
972 newphases = {}
972 newphases = {}
973 def phase(ctx):
973 def phase(ctx):
974 return newphases.get(ctx.node(), ctx.phase())
974 return newphases.get(ctx.node(), ctx.phase())
975 for newnode in allnewnodes:
975 for newnode in allnewnodes:
976 ctx = unfi[newnode]
976 ctx = unfi[newnode]
977 parentphase = max(phase(p) for p in ctx.parents())
977 parentphase = max(phase(p) for p in ctx.parents())
978 if targetphase is None:
978 if targetphase is None:
979 oldphase = max(unfi[oldnode].phase()
979 oldphase = max(unfi[oldnode].phase()
980 for oldnode in precursors[newnode])
980 for oldnode in precursors[newnode])
981 newphase = max(oldphase, parentphase)
981 newphase = max(oldphase, parentphase)
982 else:
982 else:
983 newphase = max(targetphase, parentphase)
983 newphase = max(targetphase, parentphase)
984 newphases[newnode] = newphase
984 newphases[newnode] = newphase
985 if newphase > ctx.phase():
985 if newphase > ctx.phase():
986 toretract.setdefault(newphase, []).append(newnode)
986 toretract.setdefault(newphase, []).append(newnode)
987 elif newphase < ctx.phase():
987 elif newphase < ctx.phase():
988 toadvance.setdefault(newphase, []).append(newnode)
988 toadvance.setdefault(newphase, []).append(newnode)
989
989
990 with repo.transaction('cleanup') as tr:
990 with repo.transaction('cleanup') as tr:
991 # Move bookmarks
991 # Move bookmarks
992 bmarks = repo._bookmarks
992 bmarks = repo._bookmarks
993 bmarkchanges = []
993 bmarkchanges = []
994 for oldnode, newnode in moves.items():
994 for oldnode, newnode in moves.items():
995 oldbmarks = repo.nodebookmarks(oldnode)
995 oldbmarks = repo.nodebookmarks(oldnode)
996 if not oldbmarks:
996 if not oldbmarks:
997 continue
997 continue
998 from . import bookmarks # avoid import cycle
998 from . import bookmarks # avoid import cycle
999 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
999 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
1000 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
1000 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
1001 hex(oldnode), hex(newnode)))
1001 hex(oldnode), hex(newnode)))
1002 # Delete divergent bookmarks being parents of related newnodes
1002 # Delete divergent bookmarks being parents of related newnodes
1003 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
1003 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
1004 allnewnodes, newnode, oldnode)
1004 allnewnodes, newnode, oldnode)
1005 deletenodes = _containsnode(repo, deleterevs)
1005 deletenodes = _containsnode(repo, deleterevs)
1006 for name in oldbmarks:
1006 for name in oldbmarks:
1007 bmarkchanges.append((name, newnode))
1007 bmarkchanges.append((name, newnode))
1008 for b in bookmarks.divergent2delete(repo, deletenodes, name):
1008 for b in bookmarks.divergent2delete(repo, deletenodes, name):
1009 bmarkchanges.append((b, None))
1009 bmarkchanges.append((b, None))
1010
1010
1011 if bmarkchanges:
1011 if bmarkchanges:
1012 bmarks.applychanges(repo, tr, bmarkchanges)
1012 bmarks.applychanges(repo, tr, bmarkchanges)
1013
1013
1014 for phase, nodes in toretract.items():
1014 for phase, nodes in toretract.items():
1015 phases.retractboundary(repo, tr, phase, nodes)
1015 phases.retractboundary(repo, tr, phase, nodes)
1016 for phase, nodes in toadvance.items():
1016 for phase, nodes in toadvance.items():
1017 phases.advanceboundary(repo, tr, phase, nodes)
1017 phases.advanceboundary(repo, tr, phase, nodes)
1018
1018
1019 mayusearchived = repo.ui.config('experimental', 'cleanup-as-archived')
1019 # Obsolete or strip nodes
1020 # Obsolete or strip nodes
1020 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1021 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1021 # If a node is already obsoleted, and we want to obsolete it
1022 # If a node is already obsoleted, and we want to obsolete it
1022 # without a successor, skip that obssolete request since it's
1023 # without a successor, skip that obssolete request since it's
1023 # unnecessary. That's the "if s or not isobs(n)" check below.
1024 # unnecessary. That's the "if s or not isobs(n)" check below.
1024 # Also sort the node in topology order, that might be useful for
1025 # Also sort the node in topology order, that might be useful for
1025 # some obsstore logic.
1026 # some obsstore logic.
1026 # NOTE: the sorting might belong to createmarkers.
1027 # NOTE: the sorting might belong to createmarkers.
1027 torev = unfi.changelog.rev
1028 torev = unfi.changelog.rev
1028 sortfunc = lambda ns: torev(ns[0][0])
1029 sortfunc = lambda ns: torev(ns[0][0])
1029 rels = []
1030 rels = []
1030 for ns, s in sorted(replacements.items(), key=sortfunc):
1031 for ns, s in sorted(replacements.items(), key=sortfunc):
1031 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1032 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1032 rels.append(rel)
1033 rels.append(rel)
1033 if rels:
1034 if rels:
1034 obsolete.createmarkers(repo, rels, operation=operation,
1035 obsolete.createmarkers(repo, rels, operation=operation,
1035 metadata=metadata)
1036 metadata=metadata)
1037 elif phases.supportinternal(repo) and mayusearchived:
1038 # this assume we do not have "unstable" nodes above the cleaned ones
1039 allreplaced = set()
1040 for ns in replacements.keys():
1041 allreplaced.update(ns)
1042 if backup:
1043 from . import repair # avoid import cycle
1044 node = min(allreplaced, key=repo.changelog.rev)
1045 repair.backupbundle(repo, allreplaced, allreplaced, node,
1046 operation)
1047 phases.retractboundary(repo, tr, phases.archived, allreplaced)
1036 else:
1048 else:
1037 from . import repair # avoid import cycle
1049 from . import repair # avoid import cycle
1038 tostrip = list(n for ns in replacements for n in ns)
1050 tostrip = list(n for ns in replacements for n in ns)
1039 if tostrip:
1051 if tostrip:
1040 repair.delayedstrip(repo.ui, repo, tostrip, operation,
1052 repair.delayedstrip(repo.ui, repo, tostrip, operation,
1041 backup=backup)
1053 backup=backup)
1042
1054
1043 def addremove(repo, matcher, prefix, uipathfn, opts=None):
1055 def addremove(repo, matcher, prefix, uipathfn, opts=None):
1044 if opts is None:
1056 if opts is None:
1045 opts = {}
1057 opts = {}
1046 m = matcher
1058 m = matcher
1047 dry_run = opts.get('dry_run')
1059 dry_run = opts.get('dry_run')
1048 try:
1060 try:
1049 similarity = float(opts.get('similarity') or 0)
1061 similarity = float(opts.get('similarity') or 0)
1050 except ValueError:
1062 except ValueError:
1051 raise error.Abort(_('similarity must be a number'))
1063 raise error.Abort(_('similarity must be a number'))
1052 if similarity < 0 or similarity > 100:
1064 if similarity < 0 or similarity > 100:
1053 raise error.Abort(_('similarity must be between 0 and 100'))
1065 raise error.Abort(_('similarity must be between 0 and 100'))
1054 similarity /= 100.0
1066 similarity /= 100.0
1055
1067
1056 ret = 0
1068 ret = 0
1057
1069
1058 wctx = repo[None]
1070 wctx = repo[None]
1059 for subpath in sorted(wctx.substate):
1071 for subpath in sorted(wctx.substate):
1060 submatch = matchmod.subdirmatcher(subpath, m)
1072 submatch = matchmod.subdirmatcher(subpath, m)
1061 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
1073 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
1062 sub = wctx.sub(subpath)
1074 sub = wctx.sub(subpath)
1063 subprefix = repo.wvfs.reljoin(prefix, subpath)
1075 subprefix = repo.wvfs.reljoin(prefix, subpath)
1064 subuipathfn = subdiruipathfn(subpath, uipathfn)
1076 subuipathfn = subdiruipathfn(subpath, uipathfn)
1065 try:
1077 try:
1066 if sub.addremove(submatch, subprefix, subuipathfn, opts):
1078 if sub.addremove(submatch, subprefix, subuipathfn, opts):
1067 ret = 1
1079 ret = 1
1068 except error.LookupError:
1080 except error.LookupError:
1069 repo.ui.status(_("skipping missing subrepository: %s\n")
1081 repo.ui.status(_("skipping missing subrepository: %s\n")
1070 % uipathfn(subpath))
1082 % uipathfn(subpath))
1071
1083
1072 rejected = []
1084 rejected = []
1073 def badfn(f, msg):
1085 def badfn(f, msg):
1074 if f in m.files():
1086 if f in m.files():
1075 m.bad(f, msg)
1087 m.bad(f, msg)
1076 rejected.append(f)
1088 rejected.append(f)
1077
1089
1078 badmatch = matchmod.badmatch(m, badfn)
1090 badmatch = matchmod.badmatch(m, badfn)
1079 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
1091 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
1080 badmatch)
1092 badmatch)
1081
1093
1082 unknownset = set(unknown + forgotten)
1094 unknownset = set(unknown + forgotten)
1083 toprint = unknownset.copy()
1095 toprint = unknownset.copy()
1084 toprint.update(deleted)
1096 toprint.update(deleted)
1085 for abs in sorted(toprint):
1097 for abs in sorted(toprint):
1086 if repo.ui.verbose or not m.exact(abs):
1098 if repo.ui.verbose or not m.exact(abs):
1087 if abs in unknownset:
1099 if abs in unknownset:
1088 status = _('adding %s\n') % uipathfn(abs)
1100 status = _('adding %s\n') % uipathfn(abs)
1089 label = 'ui.addremove.added'
1101 label = 'ui.addremove.added'
1090 else:
1102 else:
1091 status = _('removing %s\n') % uipathfn(abs)
1103 status = _('removing %s\n') % uipathfn(abs)
1092 label = 'ui.addremove.removed'
1104 label = 'ui.addremove.removed'
1093 repo.ui.status(status, label=label)
1105 repo.ui.status(status, label=label)
1094
1106
1095 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1107 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1096 similarity, uipathfn)
1108 similarity, uipathfn)
1097
1109
1098 if not dry_run:
1110 if not dry_run:
1099 _markchanges(repo, unknown + forgotten, deleted, renames)
1111 _markchanges(repo, unknown + forgotten, deleted, renames)
1100
1112
1101 for f in rejected:
1113 for f in rejected:
1102 if f in m.files():
1114 if f in m.files():
1103 return 1
1115 return 1
1104 return ret
1116 return ret
1105
1117
1106 def marktouched(repo, files, similarity=0.0):
1118 def marktouched(repo, files, similarity=0.0):
1107 '''Assert that files have somehow been operated upon. files are relative to
1119 '''Assert that files have somehow been operated upon. files are relative to
1108 the repo root.'''
1120 the repo root.'''
1109 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1121 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1110 rejected = []
1122 rejected = []
1111
1123
1112 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1124 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1113
1125
1114 if repo.ui.verbose:
1126 if repo.ui.verbose:
1115 unknownset = set(unknown + forgotten)
1127 unknownset = set(unknown + forgotten)
1116 toprint = unknownset.copy()
1128 toprint = unknownset.copy()
1117 toprint.update(deleted)
1129 toprint.update(deleted)
1118 for abs in sorted(toprint):
1130 for abs in sorted(toprint):
1119 if abs in unknownset:
1131 if abs in unknownset:
1120 status = _('adding %s\n') % abs
1132 status = _('adding %s\n') % abs
1121 else:
1133 else:
1122 status = _('removing %s\n') % abs
1134 status = _('removing %s\n') % abs
1123 repo.ui.status(status)
1135 repo.ui.status(status)
1124
1136
1125 # TODO: We should probably have the caller pass in uipathfn and apply it to
1137 # TODO: We should probably have the caller pass in uipathfn and apply it to
1126 # the messages above too. legacyrelativevalue=True is consistent with how
1138 # the messages above too. legacyrelativevalue=True is consistent with how
1127 # it used to work.
1139 # it used to work.
1128 uipathfn = getuipathfn(repo, legacyrelativevalue=True)
1140 uipathfn = getuipathfn(repo, legacyrelativevalue=True)
1129 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1141 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1130 similarity, uipathfn)
1142 similarity, uipathfn)
1131
1143
1132 _markchanges(repo, unknown + forgotten, deleted, renames)
1144 _markchanges(repo, unknown + forgotten, deleted, renames)
1133
1145
1134 for f in rejected:
1146 for f in rejected:
1135 if f in m.files():
1147 if f in m.files():
1136 return 1
1148 return 1
1137 return 0
1149 return 0
1138
1150
1139 def _interestingfiles(repo, matcher):
1151 def _interestingfiles(repo, matcher):
1140 '''Walk dirstate with matcher, looking for files that addremove would care
1152 '''Walk dirstate with matcher, looking for files that addremove would care
1141 about.
1153 about.
1142
1154
1143 This is different from dirstate.status because it doesn't care about
1155 This is different from dirstate.status because it doesn't care about
1144 whether files are modified or clean.'''
1156 whether files are modified or clean.'''
1145 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1157 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1146 audit_path = pathutil.pathauditor(repo.root, cached=True)
1158 audit_path = pathutil.pathauditor(repo.root, cached=True)
1147
1159
1148 ctx = repo[None]
1160 ctx = repo[None]
1149 dirstate = repo.dirstate
1161 dirstate = repo.dirstate
1150 matcher = repo.narrowmatch(matcher, includeexact=True)
1162 matcher = repo.narrowmatch(matcher, includeexact=True)
1151 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1163 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1152 unknown=True, ignored=False, full=False)
1164 unknown=True, ignored=False, full=False)
1153 for abs, st in walkresults.iteritems():
1165 for abs, st in walkresults.iteritems():
1154 dstate = dirstate[abs]
1166 dstate = dirstate[abs]
1155 if dstate == '?' and audit_path.check(abs):
1167 if dstate == '?' and audit_path.check(abs):
1156 unknown.append(abs)
1168 unknown.append(abs)
1157 elif dstate != 'r' and not st:
1169 elif dstate != 'r' and not st:
1158 deleted.append(abs)
1170 deleted.append(abs)
1159 elif dstate == 'r' and st:
1171 elif dstate == 'r' and st:
1160 forgotten.append(abs)
1172 forgotten.append(abs)
1161 # for finding renames
1173 # for finding renames
1162 elif dstate == 'r' and not st:
1174 elif dstate == 'r' and not st:
1163 removed.append(abs)
1175 removed.append(abs)
1164 elif dstate == 'a':
1176 elif dstate == 'a':
1165 added.append(abs)
1177 added.append(abs)
1166
1178
1167 return added, unknown, deleted, removed, forgotten
1179 return added, unknown, deleted, removed, forgotten
1168
1180
1169 def _findrenames(repo, matcher, added, removed, similarity, uipathfn):
1181 def _findrenames(repo, matcher, added, removed, similarity, uipathfn):
1170 '''Find renames from removed files to added ones.'''
1182 '''Find renames from removed files to added ones.'''
1171 renames = {}
1183 renames = {}
1172 if similarity > 0:
1184 if similarity > 0:
1173 for old, new, score in similar.findrenames(repo, added, removed,
1185 for old, new, score in similar.findrenames(repo, added, removed,
1174 similarity):
1186 similarity):
1175 if (repo.ui.verbose or not matcher.exact(old)
1187 if (repo.ui.verbose or not matcher.exact(old)
1176 or not matcher.exact(new)):
1188 or not matcher.exact(new)):
1177 repo.ui.status(_('recording removal of %s as rename to %s '
1189 repo.ui.status(_('recording removal of %s as rename to %s '
1178 '(%d%% similar)\n') %
1190 '(%d%% similar)\n') %
1179 (uipathfn(old), uipathfn(new),
1191 (uipathfn(old), uipathfn(new),
1180 score * 100))
1192 score * 100))
1181 renames[new] = old
1193 renames[new] = old
1182 return renames
1194 return renames
1183
1195
1184 def _markchanges(repo, unknown, deleted, renames):
1196 def _markchanges(repo, unknown, deleted, renames):
1185 '''Marks the files in unknown as added, the files in deleted as removed,
1197 '''Marks the files in unknown as added, the files in deleted as removed,
1186 and the files in renames as copied.'''
1198 and the files in renames as copied.'''
1187 wctx = repo[None]
1199 wctx = repo[None]
1188 with repo.wlock():
1200 with repo.wlock():
1189 wctx.forget(deleted)
1201 wctx.forget(deleted)
1190 wctx.add(unknown)
1202 wctx.add(unknown)
1191 for new, old in renames.iteritems():
1203 for new, old in renames.iteritems():
1192 wctx.copy(old, new)
1204 wctx.copy(old, new)
1193
1205
1194 def getrenamedfn(repo, endrev=None):
1206 def getrenamedfn(repo, endrev=None):
1195 rcache = {}
1207 rcache = {}
1196 if endrev is None:
1208 if endrev is None:
1197 endrev = len(repo)
1209 endrev = len(repo)
1198
1210
1199 def getrenamed(fn, rev):
1211 def getrenamed(fn, rev):
1200 '''looks up all renames for a file (up to endrev) the first
1212 '''looks up all renames for a file (up to endrev) the first
1201 time the file is given. It indexes on the changerev and only
1213 time the file is given. It indexes on the changerev and only
1202 parses the manifest if linkrev != changerev.
1214 parses the manifest if linkrev != changerev.
1203 Returns rename info for fn at changerev rev.'''
1215 Returns rename info for fn at changerev rev.'''
1204 if fn not in rcache:
1216 if fn not in rcache:
1205 rcache[fn] = {}
1217 rcache[fn] = {}
1206 fl = repo.file(fn)
1218 fl = repo.file(fn)
1207 for i in fl:
1219 for i in fl:
1208 lr = fl.linkrev(i)
1220 lr = fl.linkrev(i)
1209 renamed = fl.renamed(fl.node(i))
1221 renamed = fl.renamed(fl.node(i))
1210 rcache[fn][lr] = renamed and renamed[0]
1222 rcache[fn][lr] = renamed and renamed[0]
1211 if lr >= endrev:
1223 if lr >= endrev:
1212 break
1224 break
1213 if rev in rcache[fn]:
1225 if rev in rcache[fn]:
1214 return rcache[fn][rev]
1226 return rcache[fn][rev]
1215
1227
1216 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1228 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1217 # filectx logic.
1229 # filectx logic.
1218 try:
1230 try:
1219 return repo[rev][fn].copysource()
1231 return repo[rev][fn].copysource()
1220 except error.LookupError:
1232 except error.LookupError:
1221 return None
1233 return None
1222
1234
1223 return getrenamed
1235 return getrenamed
1224
1236
1225 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1237 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1226 """Update the dirstate to reflect the intent of copying src to dst. For
1238 """Update the dirstate to reflect the intent of copying src to dst. For
1227 different reasons it might not end with dst being marked as copied from src.
1239 different reasons it might not end with dst being marked as copied from src.
1228 """
1240 """
1229 origsrc = repo.dirstate.copied(src) or src
1241 origsrc = repo.dirstate.copied(src) or src
1230 if dst == origsrc: # copying back a copy?
1242 if dst == origsrc: # copying back a copy?
1231 if repo.dirstate[dst] not in 'mn' and not dryrun:
1243 if repo.dirstate[dst] not in 'mn' and not dryrun:
1232 repo.dirstate.normallookup(dst)
1244 repo.dirstate.normallookup(dst)
1233 else:
1245 else:
1234 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1246 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1235 if not ui.quiet:
1247 if not ui.quiet:
1236 ui.warn(_("%s has not been committed yet, so no copy "
1248 ui.warn(_("%s has not been committed yet, so no copy "
1237 "data will be stored for %s.\n")
1249 "data will be stored for %s.\n")
1238 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1250 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1239 if repo.dirstate[dst] in '?r' and not dryrun:
1251 if repo.dirstate[dst] in '?r' and not dryrun:
1240 wctx.add([dst])
1252 wctx.add([dst])
1241 elif not dryrun:
1253 elif not dryrun:
1242 wctx.copy(origsrc, dst)
1254 wctx.copy(origsrc, dst)
1243
1255
1244 def writerequires(opener, requirements):
1256 def writerequires(opener, requirements):
1245 with opener('requires', 'w', atomictemp=True) as fp:
1257 with opener('requires', 'w', atomictemp=True) as fp:
1246 for r in sorted(requirements):
1258 for r in sorted(requirements):
1247 fp.write("%s\n" % r)
1259 fp.write("%s\n" % r)
1248
1260
1249 class filecachesubentry(object):
1261 class filecachesubentry(object):
1250 def __init__(self, path, stat):
1262 def __init__(self, path, stat):
1251 self.path = path
1263 self.path = path
1252 self.cachestat = None
1264 self.cachestat = None
1253 self._cacheable = None
1265 self._cacheable = None
1254
1266
1255 if stat:
1267 if stat:
1256 self.cachestat = filecachesubentry.stat(self.path)
1268 self.cachestat = filecachesubentry.stat(self.path)
1257
1269
1258 if self.cachestat:
1270 if self.cachestat:
1259 self._cacheable = self.cachestat.cacheable()
1271 self._cacheable = self.cachestat.cacheable()
1260 else:
1272 else:
1261 # None means we don't know yet
1273 # None means we don't know yet
1262 self._cacheable = None
1274 self._cacheable = None
1263
1275
1264 def refresh(self):
1276 def refresh(self):
1265 if self.cacheable():
1277 if self.cacheable():
1266 self.cachestat = filecachesubentry.stat(self.path)
1278 self.cachestat = filecachesubentry.stat(self.path)
1267
1279
1268 def cacheable(self):
1280 def cacheable(self):
1269 if self._cacheable is not None:
1281 if self._cacheable is not None:
1270 return self._cacheable
1282 return self._cacheable
1271
1283
1272 # we don't know yet, assume it is for now
1284 # we don't know yet, assume it is for now
1273 return True
1285 return True
1274
1286
1275 def changed(self):
1287 def changed(self):
1276 # no point in going further if we can't cache it
1288 # no point in going further if we can't cache it
1277 if not self.cacheable():
1289 if not self.cacheable():
1278 return True
1290 return True
1279
1291
1280 newstat = filecachesubentry.stat(self.path)
1292 newstat = filecachesubentry.stat(self.path)
1281
1293
1282 # we may not know if it's cacheable yet, check again now
1294 # we may not know if it's cacheable yet, check again now
1283 if newstat and self._cacheable is None:
1295 if newstat and self._cacheable is None:
1284 self._cacheable = newstat.cacheable()
1296 self._cacheable = newstat.cacheable()
1285
1297
1286 # check again
1298 # check again
1287 if not self._cacheable:
1299 if not self._cacheable:
1288 return True
1300 return True
1289
1301
1290 if self.cachestat != newstat:
1302 if self.cachestat != newstat:
1291 self.cachestat = newstat
1303 self.cachestat = newstat
1292 return True
1304 return True
1293 else:
1305 else:
1294 return False
1306 return False
1295
1307
1296 @staticmethod
1308 @staticmethod
1297 def stat(path):
1309 def stat(path):
1298 try:
1310 try:
1299 return util.cachestat(path)
1311 return util.cachestat(path)
1300 except OSError as e:
1312 except OSError as e:
1301 if e.errno != errno.ENOENT:
1313 if e.errno != errno.ENOENT:
1302 raise
1314 raise
1303
1315
1304 class filecacheentry(object):
1316 class filecacheentry(object):
1305 def __init__(self, paths, stat=True):
1317 def __init__(self, paths, stat=True):
1306 self._entries = []
1318 self._entries = []
1307 for path in paths:
1319 for path in paths:
1308 self._entries.append(filecachesubentry(path, stat))
1320 self._entries.append(filecachesubentry(path, stat))
1309
1321
1310 def changed(self):
1322 def changed(self):
1311 '''true if any entry has changed'''
1323 '''true if any entry has changed'''
1312 for entry in self._entries:
1324 for entry in self._entries:
1313 if entry.changed():
1325 if entry.changed():
1314 return True
1326 return True
1315 return False
1327 return False
1316
1328
1317 def refresh(self):
1329 def refresh(self):
1318 for entry in self._entries:
1330 for entry in self._entries:
1319 entry.refresh()
1331 entry.refresh()
1320
1332
1321 class filecache(object):
1333 class filecache(object):
1322 """A property like decorator that tracks files under .hg/ for updates.
1334 """A property like decorator that tracks files under .hg/ for updates.
1323
1335
1324 On first access, the files defined as arguments are stat()ed and the
1336 On first access, the files defined as arguments are stat()ed and the
1325 results cached. The decorated function is called. The results are stashed
1337 results cached. The decorated function is called. The results are stashed
1326 away in a ``_filecache`` dict on the object whose method is decorated.
1338 away in a ``_filecache`` dict on the object whose method is decorated.
1327
1339
1328 On subsequent access, the cached result is used as it is set to the
1340 On subsequent access, the cached result is used as it is set to the
1329 instance dictionary.
1341 instance dictionary.
1330
1342
1331 On external property set/delete operations, the caller must update the
1343 On external property set/delete operations, the caller must update the
1332 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1344 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1333 instead of directly setting <attr>.
1345 instead of directly setting <attr>.
1334
1346
1335 When using the property API, the cached data is always used if available.
1347 When using the property API, the cached data is always used if available.
1336 No stat() is performed to check if the file has changed.
1348 No stat() is performed to check if the file has changed.
1337
1349
1338 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1350 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1339 can populate an entry before the property's getter is called. In this case,
1351 can populate an entry before the property's getter is called. In this case,
1340 entries in ``_filecache`` will be used during property operations,
1352 entries in ``_filecache`` will be used during property operations,
1341 if available. If the underlying file changes, it is up to external callers
1353 if available. If the underlying file changes, it is up to external callers
1342 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1354 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1343 method result as well as possibly calling ``del obj._filecache[attr]`` to
1355 method result as well as possibly calling ``del obj._filecache[attr]`` to
1344 remove the ``filecacheentry``.
1356 remove the ``filecacheentry``.
1345 """
1357 """
1346
1358
1347 def __init__(self, *paths):
1359 def __init__(self, *paths):
1348 self.paths = paths
1360 self.paths = paths
1349
1361
1350 def join(self, obj, fname):
1362 def join(self, obj, fname):
1351 """Used to compute the runtime path of a cached file.
1363 """Used to compute the runtime path of a cached file.
1352
1364
1353 Users should subclass filecache and provide their own version of this
1365 Users should subclass filecache and provide their own version of this
1354 function to call the appropriate join function on 'obj' (an instance
1366 function to call the appropriate join function on 'obj' (an instance
1355 of the class that its member function was decorated).
1367 of the class that its member function was decorated).
1356 """
1368 """
1357 raise NotImplementedError
1369 raise NotImplementedError
1358
1370
1359 def __call__(self, func):
1371 def __call__(self, func):
1360 self.func = func
1372 self.func = func
1361 self.sname = func.__name__
1373 self.sname = func.__name__
1362 self.name = pycompat.sysbytes(self.sname)
1374 self.name = pycompat.sysbytes(self.sname)
1363 return self
1375 return self
1364
1376
1365 def __get__(self, obj, type=None):
1377 def __get__(self, obj, type=None):
1366 # if accessed on the class, return the descriptor itself.
1378 # if accessed on the class, return the descriptor itself.
1367 if obj is None:
1379 if obj is None:
1368 return self
1380 return self
1369
1381
1370 assert self.sname not in obj.__dict__
1382 assert self.sname not in obj.__dict__
1371
1383
1372 entry = obj._filecache.get(self.name)
1384 entry = obj._filecache.get(self.name)
1373
1385
1374 if entry:
1386 if entry:
1375 if entry.changed():
1387 if entry.changed():
1376 entry.obj = self.func(obj)
1388 entry.obj = self.func(obj)
1377 else:
1389 else:
1378 paths = [self.join(obj, path) for path in self.paths]
1390 paths = [self.join(obj, path) for path in self.paths]
1379
1391
1380 # We stat -before- creating the object so our cache doesn't lie if
1392 # We stat -before- creating the object so our cache doesn't lie if
1381 # a writer modified between the time we read and stat
1393 # a writer modified between the time we read and stat
1382 entry = filecacheentry(paths, True)
1394 entry = filecacheentry(paths, True)
1383 entry.obj = self.func(obj)
1395 entry.obj = self.func(obj)
1384
1396
1385 obj._filecache[self.name] = entry
1397 obj._filecache[self.name] = entry
1386
1398
1387 obj.__dict__[self.sname] = entry.obj
1399 obj.__dict__[self.sname] = entry.obj
1388 return entry.obj
1400 return entry.obj
1389
1401
1390 # don't implement __set__(), which would make __dict__ lookup as slow as
1402 # don't implement __set__(), which would make __dict__ lookup as slow as
1391 # function call.
1403 # function call.
1392
1404
1393 def set(self, obj, value):
1405 def set(self, obj, value):
1394 if self.name not in obj._filecache:
1406 if self.name not in obj._filecache:
1395 # we add an entry for the missing value because X in __dict__
1407 # we add an entry for the missing value because X in __dict__
1396 # implies X in _filecache
1408 # implies X in _filecache
1397 paths = [self.join(obj, path) for path in self.paths]
1409 paths = [self.join(obj, path) for path in self.paths]
1398 ce = filecacheentry(paths, False)
1410 ce = filecacheentry(paths, False)
1399 obj._filecache[self.name] = ce
1411 obj._filecache[self.name] = ce
1400 else:
1412 else:
1401 ce = obj._filecache[self.name]
1413 ce = obj._filecache[self.name]
1402
1414
1403 ce.obj = value # update cached copy
1415 ce.obj = value # update cached copy
1404 obj.__dict__[self.sname] = value # update copy returned by obj.x
1416 obj.__dict__[self.sname] = value # update copy returned by obj.x
1405
1417
1406 def extdatasource(repo, source):
1418 def extdatasource(repo, source):
1407 """Gather a map of rev -> value dict from the specified source
1419 """Gather a map of rev -> value dict from the specified source
1408
1420
1409 A source spec is treated as a URL, with a special case shell: type
1421 A source spec is treated as a URL, with a special case shell: type
1410 for parsing the output from a shell command.
1422 for parsing the output from a shell command.
1411
1423
1412 The data is parsed as a series of newline-separated records where
1424 The data is parsed as a series of newline-separated records where
1413 each record is a revision specifier optionally followed by a space
1425 each record is a revision specifier optionally followed by a space
1414 and a freeform string value. If the revision is known locally, it
1426 and a freeform string value. If the revision is known locally, it
1415 is converted to a rev, otherwise the record is skipped.
1427 is converted to a rev, otherwise the record is skipped.
1416
1428
1417 Note that both key and value are treated as UTF-8 and converted to
1429 Note that both key and value are treated as UTF-8 and converted to
1418 the local encoding. This allows uniformity between local and
1430 the local encoding. This allows uniformity between local and
1419 remote data sources.
1431 remote data sources.
1420 """
1432 """
1421
1433
1422 spec = repo.ui.config("extdata", source)
1434 spec = repo.ui.config("extdata", source)
1423 if not spec:
1435 if not spec:
1424 raise error.Abort(_("unknown extdata source '%s'") % source)
1436 raise error.Abort(_("unknown extdata source '%s'") % source)
1425
1437
1426 data = {}
1438 data = {}
1427 src = proc = None
1439 src = proc = None
1428 try:
1440 try:
1429 if spec.startswith("shell:"):
1441 if spec.startswith("shell:"):
1430 # external commands should be run relative to the repo root
1442 # external commands should be run relative to the repo root
1431 cmd = spec[6:]
1443 cmd = spec[6:]
1432 proc = subprocess.Popen(procutil.tonativestr(cmd),
1444 proc = subprocess.Popen(procutil.tonativestr(cmd),
1433 shell=True, bufsize=-1,
1445 shell=True, bufsize=-1,
1434 close_fds=procutil.closefds,
1446 close_fds=procutil.closefds,
1435 stdout=subprocess.PIPE,
1447 stdout=subprocess.PIPE,
1436 cwd=procutil.tonativestr(repo.root))
1448 cwd=procutil.tonativestr(repo.root))
1437 src = proc.stdout
1449 src = proc.stdout
1438 else:
1450 else:
1439 # treat as a URL or file
1451 # treat as a URL or file
1440 src = url.open(repo.ui, spec)
1452 src = url.open(repo.ui, spec)
1441 for l in src:
1453 for l in src:
1442 if " " in l:
1454 if " " in l:
1443 k, v = l.strip().split(" ", 1)
1455 k, v = l.strip().split(" ", 1)
1444 else:
1456 else:
1445 k, v = l.strip(), ""
1457 k, v = l.strip(), ""
1446
1458
1447 k = encoding.tolocal(k)
1459 k = encoding.tolocal(k)
1448 try:
1460 try:
1449 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1461 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1450 except (error.LookupError, error.RepoLookupError):
1462 except (error.LookupError, error.RepoLookupError):
1451 pass # we ignore data for nodes that don't exist locally
1463 pass # we ignore data for nodes that don't exist locally
1452 finally:
1464 finally:
1453 if proc:
1465 if proc:
1454 proc.communicate()
1466 proc.communicate()
1455 if src:
1467 if src:
1456 src.close()
1468 src.close()
1457 if proc and proc.returncode != 0:
1469 if proc and proc.returncode != 0:
1458 raise error.Abort(_("extdata command '%s' failed: %s")
1470 raise error.Abort(_("extdata command '%s' failed: %s")
1459 % (cmd, procutil.explainexit(proc.returncode)))
1471 % (cmd, procutil.explainexit(proc.returncode)))
1460
1472
1461 return data
1473 return data
1462
1474
1463 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1475 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1464 if lock is None:
1476 if lock is None:
1465 raise error.LockInheritanceContractViolation(
1477 raise error.LockInheritanceContractViolation(
1466 'lock can only be inherited while held')
1478 'lock can only be inherited while held')
1467 if environ is None:
1479 if environ is None:
1468 environ = {}
1480 environ = {}
1469 with lock.inherit() as locker:
1481 with lock.inherit() as locker:
1470 environ[envvar] = locker
1482 environ[envvar] = locker
1471 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1483 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1472
1484
1473 def wlocksub(repo, cmd, *args, **kwargs):
1485 def wlocksub(repo, cmd, *args, **kwargs):
1474 """run cmd as a subprocess that allows inheriting repo's wlock
1486 """run cmd as a subprocess that allows inheriting repo's wlock
1475
1487
1476 This can only be called while the wlock is held. This takes all the
1488 This can only be called while the wlock is held. This takes all the
1477 arguments that ui.system does, and returns the exit code of the
1489 arguments that ui.system does, and returns the exit code of the
1478 subprocess."""
1490 subprocess."""
1479 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1491 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1480 **kwargs)
1492 **kwargs)
1481
1493
1482 class progress(object):
1494 class progress(object):
1483 def __init__(self, ui, updatebar, topic, unit="", total=None):
1495 def __init__(self, ui, updatebar, topic, unit="", total=None):
1484 self.ui = ui
1496 self.ui = ui
1485 self.pos = 0
1497 self.pos = 0
1486 self.topic = topic
1498 self.topic = topic
1487 self.unit = unit
1499 self.unit = unit
1488 self.total = total
1500 self.total = total
1489 self.debug = ui.configbool('progress', 'debug')
1501 self.debug = ui.configbool('progress', 'debug')
1490 self._updatebar = updatebar
1502 self._updatebar = updatebar
1491
1503
1492 def __enter__(self):
1504 def __enter__(self):
1493 return self
1505 return self
1494
1506
1495 def __exit__(self, exc_type, exc_value, exc_tb):
1507 def __exit__(self, exc_type, exc_value, exc_tb):
1496 self.complete()
1508 self.complete()
1497
1509
1498 def update(self, pos, item="", total=None):
1510 def update(self, pos, item="", total=None):
1499 assert pos is not None
1511 assert pos is not None
1500 if total:
1512 if total:
1501 self.total = total
1513 self.total = total
1502 self.pos = pos
1514 self.pos = pos
1503 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1515 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1504 if self.debug:
1516 if self.debug:
1505 self._printdebug(item)
1517 self._printdebug(item)
1506
1518
1507 def increment(self, step=1, item="", total=None):
1519 def increment(self, step=1, item="", total=None):
1508 self.update(self.pos + step, item, total)
1520 self.update(self.pos + step, item, total)
1509
1521
1510 def complete(self):
1522 def complete(self):
1511 self.pos = None
1523 self.pos = None
1512 self.unit = ""
1524 self.unit = ""
1513 self.total = None
1525 self.total = None
1514 self._updatebar(self.topic, self.pos, "", self.unit, self.total)
1526 self._updatebar(self.topic, self.pos, "", self.unit, self.total)
1515
1527
1516 def _printdebug(self, item):
1528 def _printdebug(self, item):
1517 if self.unit:
1529 if self.unit:
1518 unit = ' ' + self.unit
1530 unit = ' ' + self.unit
1519 if item:
1531 if item:
1520 item = ' ' + item
1532 item = ' ' + item
1521
1533
1522 if self.total:
1534 if self.total:
1523 pct = 100.0 * self.pos / self.total
1535 pct = 100.0 * self.pos / self.total
1524 self.ui.debug('%s:%s %d/%d%s (%4.2f%%)\n'
1536 self.ui.debug('%s:%s %d/%d%s (%4.2f%%)\n'
1525 % (self.topic, item, self.pos, self.total, unit, pct))
1537 % (self.topic, item, self.pos, self.total, unit, pct))
1526 else:
1538 else:
1527 self.ui.debug('%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1539 self.ui.debug('%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1528
1540
1529 def gdinitconfig(ui):
1541 def gdinitconfig(ui):
1530 """helper function to know if a repo should be created as general delta
1542 """helper function to know if a repo should be created as general delta
1531 """
1543 """
1532 # experimental config: format.generaldelta
1544 # experimental config: format.generaldelta
1533 return (ui.configbool('format', 'generaldelta')
1545 return (ui.configbool('format', 'generaldelta')
1534 or ui.configbool('format', 'usegeneraldelta'))
1546 or ui.configbool('format', 'usegeneraldelta'))
1535
1547
1536 def gddeltaconfig(ui):
1548 def gddeltaconfig(ui):
1537 """helper function to know if incoming delta should be optimised
1549 """helper function to know if incoming delta should be optimised
1538 """
1550 """
1539 # experimental config: format.generaldelta
1551 # experimental config: format.generaldelta
1540 return ui.configbool('format', 'generaldelta')
1552 return ui.configbool('format', 'generaldelta')
1541
1553
1542 class simplekeyvaluefile(object):
1554 class simplekeyvaluefile(object):
1543 """A simple file with key=value lines
1555 """A simple file with key=value lines
1544
1556
1545 Keys must be alphanumerics and start with a letter, values must not
1557 Keys must be alphanumerics and start with a letter, values must not
1546 contain '\n' characters"""
1558 contain '\n' characters"""
1547 firstlinekey = '__firstline'
1559 firstlinekey = '__firstline'
1548
1560
1549 def __init__(self, vfs, path, keys=None):
1561 def __init__(self, vfs, path, keys=None):
1550 self.vfs = vfs
1562 self.vfs = vfs
1551 self.path = path
1563 self.path = path
1552
1564
1553 def read(self, firstlinenonkeyval=False):
1565 def read(self, firstlinenonkeyval=False):
1554 """Read the contents of a simple key-value file
1566 """Read the contents of a simple key-value file
1555
1567
1556 'firstlinenonkeyval' indicates whether the first line of file should
1568 'firstlinenonkeyval' indicates whether the first line of file should
1557 be treated as a key-value pair or reuturned fully under the
1569 be treated as a key-value pair or reuturned fully under the
1558 __firstline key."""
1570 __firstline key."""
1559 lines = self.vfs.readlines(self.path)
1571 lines = self.vfs.readlines(self.path)
1560 d = {}
1572 d = {}
1561 if firstlinenonkeyval:
1573 if firstlinenonkeyval:
1562 if not lines:
1574 if not lines:
1563 e = _("empty simplekeyvalue file")
1575 e = _("empty simplekeyvalue file")
1564 raise error.CorruptedState(e)
1576 raise error.CorruptedState(e)
1565 # we don't want to include '\n' in the __firstline
1577 # we don't want to include '\n' in the __firstline
1566 d[self.firstlinekey] = lines[0][:-1]
1578 d[self.firstlinekey] = lines[0][:-1]
1567 del lines[0]
1579 del lines[0]
1568
1580
1569 try:
1581 try:
1570 # the 'if line.strip()' part prevents us from failing on empty
1582 # the 'if line.strip()' part prevents us from failing on empty
1571 # lines which only contain '\n' therefore are not skipped
1583 # lines which only contain '\n' therefore are not skipped
1572 # by 'if line'
1584 # by 'if line'
1573 updatedict = dict(line[:-1].split('=', 1) for line in lines
1585 updatedict = dict(line[:-1].split('=', 1) for line in lines
1574 if line.strip())
1586 if line.strip())
1575 if self.firstlinekey in updatedict:
1587 if self.firstlinekey in updatedict:
1576 e = _("%r can't be used as a key")
1588 e = _("%r can't be used as a key")
1577 raise error.CorruptedState(e % self.firstlinekey)
1589 raise error.CorruptedState(e % self.firstlinekey)
1578 d.update(updatedict)
1590 d.update(updatedict)
1579 except ValueError as e:
1591 except ValueError as e:
1580 raise error.CorruptedState(str(e))
1592 raise error.CorruptedState(str(e))
1581 return d
1593 return d
1582
1594
1583 def write(self, data, firstline=None):
1595 def write(self, data, firstline=None):
1584 """Write key=>value mapping to a file
1596 """Write key=>value mapping to a file
1585 data is a dict. Keys must be alphanumerical and start with a letter.
1597 data is a dict. Keys must be alphanumerical and start with a letter.
1586 Values must not contain newline characters.
1598 Values must not contain newline characters.
1587
1599
1588 If 'firstline' is not None, it is written to file before
1600 If 'firstline' is not None, it is written to file before
1589 everything else, as it is, not in a key=value form"""
1601 everything else, as it is, not in a key=value form"""
1590 lines = []
1602 lines = []
1591 if firstline is not None:
1603 if firstline is not None:
1592 lines.append('%s\n' % firstline)
1604 lines.append('%s\n' % firstline)
1593
1605
1594 for k, v in data.items():
1606 for k, v in data.items():
1595 if k == self.firstlinekey:
1607 if k == self.firstlinekey:
1596 e = "key name '%s' is reserved" % self.firstlinekey
1608 e = "key name '%s' is reserved" % self.firstlinekey
1597 raise error.ProgrammingError(e)
1609 raise error.ProgrammingError(e)
1598 if not k[0:1].isalpha():
1610 if not k[0:1].isalpha():
1599 e = "keys must start with a letter in a key-value file"
1611 e = "keys must start with a letter in a key-value file"
1600 raise error.ProgrammingError(e)
1612 raise error.ProgrammingError(e)
1601 if not k.isalnum():
1613 if not k.isalnum():
1602 e = "invalid key name in a simple key-value file"
1614 e = "invalid key name in a simple key-value file"
1603 raise error.ProgrammingError(e)
1615 raise error.ProgrammingError(e)
1604 if '\n' in v:
1616 if '\n' in v:
1605 e = "invalid value in a simple key-value file"
1617 e = "invalid value in a simple key-value file"
1606 raise error.ProgrammingError(e)
1618 raise error.ProgrammingError(e)
1607 lines.append("%s=%s\n" % (k, v))
1619 lines.append("%s=%s\n" % (k, v))
1608 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1620 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1609 fp.write(''.join(lines))
1621 fp.write(''.join(lines))
1610
1622
1611 _reportobsoletedsource = [
1623 _reportobsoletedsource = [
1612 'debugobsolete',
1624 'debugobsolete',
1613 'pull',
1625 'pull',
1614 'push',
1626 'push',
1615 'serve',
1627 'serve',
1616 'unbundle',
1628 'unbundle',
1617 ]
1629 ]
1618
1630
1619 _reportnewcssource = [
1631 _reportnewcssource = [
1620 'pull',
1632 'pull',
1621 'unbundle',
1633 'unbundle',
1622 ]
1634 ]
1623
1635
1624 def prefetchfiles(repo, revs, match):
1636 def prefetchfiles(repo, revs, match):
1625 """Invokes the registered file prefetch functions, allowing extensions to
1637 """Invokes the registered file prefetch functions, allowing extensions to
1626 ensure the corresponding files are available locally, before the command
1638 ensure the corresponding files are available locally, before the command
1627 uses them."""
1639 uses them."""
1628 if match:
1640 if match:
1629 # The command itself will complain about files that don't exist, so
1641 # The command itself will complain about files that don't exist, so
1630 # don't duplicate the message.
1642 # don't duplicate the message.
1631 match = matchmod.badmatch(match, lambda fn, msg: None)
1643 match = matchmod.badmatch(match, lambda fn, msg: None)
1632 else:
1644 else:
1633 match = matchall(repo)
1645 match = matchall(repo)
1634
1646
1635 fileprefetchhooks(repo, revs, match)
1647 fileprefetchhooks(repo, revs, match)
1636
1648
1637 # a list of (repo, revs, match) prefetch functions
1649 # a list of (repo, revs, match) prefetch functions
1638 fileprefetchhooks = util.hooks()
1650 fileprefetchhooks = util.hooks()
1639
1651
1640 # A marker that tells the evolve extension to suppress its own reporting
1652 # A marker that tells the evolve extension to suppress its own reporting
1641 _reportstroubledchangesets = True
1653 _reportstroubledchangesets = True
1642
1654
1643 def registersummarycallback(repo, otr, txnname=''):
1655 def registersummarycallback(repo, otr, txnname=''):
1644 """register a callback to issue a summary after the transaction is closed
1656 """register a callback to issue a summary after the transaction is closed
1645 """
1657 """
1646 def txmatch(sources):
1658 def txmatch(sources):
1647 return any(txnname.startswith(source) for source in sources)
1659 return any(txnname.startswith(source) for source in sources)
1648
1660
1649 categories = []
1661 categories = []
1650
1662
1651 def reportsummary(func):
1663 def reportsummary(func):
1652 """decorator for report callbacks."""
1664 """decorator for report callbacks."""
1653 # The repoview life cycle is shorter than the one of the actual
1665 # The repoview life cycle is shorter than the one of the actual
1654 # underlying repository. So the filtered object can die before the
1666 # underlying repository. So the filtered object can die before the
1655 # weakref is used leading to troubles. We keep a reference to the
1667 # weakref is used leading to troubles. We keep a reference to the
1656 # unfiltered object and restore the filtering when retrieving the
1668 # unfiltered object and restore the filtering when retrieving the
1657 # repository through the weakref.
1669 # repository through the weakref.
1658 filtername = repo.filtername
1670 filtername = repo.filtername
1659 reporef = weakref.ref(repo.unfiltered())
1671 reporef = weakref.ref(repo.unfiltered())
1660 def wrapped(tr):
1672 def wrapped(tr):
1661 repo = reporef()
1673 repo = reporef()
1662 if filtername:
1674 if filtername:
1663 repo = repo.filtered(filtername)
1675 repo = repo.filtered(filtername)
1664 func(repo, tr)
1676 func(repo, tr)
1665 newcat = '%02i-txnreport' % len(categories)
1677 newcat = '%02i-txnreport' % len(categories)
1666 otr.addpostclose(newcat, wrapped)
1678 otr.addpostclose(newcat, wrapped)
1667 categories.append(newcat)
1679 categories.append(newcat)
1668 return wrapped
1680 return wrapped
1669
1681
1670 if txmatch(_reportobsoletedsource):
1682 if txmatch(_reportobsoletedsource):
1671 @reportsummary
1683 @reportsummary
1672 def reportobsoleted(repo, tr):
1684 def reportobsoleted(repo, tr):
1673 obsoleted = obsutil.getobsoleted(repo, tr)
1685 obsoleted = obsutil.getobsoleted(repo, tr)
1674 if obsoleted:
1686 if obsoleted:
1675 repo.ui.status(_('obsoleted %i changesets\n')
1687 repo.ui.status(_('obsoleted %i changesets\n')
1676 % len(obsoleted))
1688 % len(obsoleted))
1677
1689
1678 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1690 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1679 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1691 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1680 instabilitytypes = [
1692 instabilitytypes = [
1681 ('orphan', 'orphan'),
1693 ('orphan', 'orphan'),
1682 ('phase-divergent', 'phasedivergent'),
1694 ('phase-divergent', 'phasedivergent'),
1683 ('content-divergent', 'contentdivergent'),
1695 ('content-divergent', 'contentdivergent'),
1684 ]
1696 ]
1685
1697
1686 def getinstabilitycounts(repo):
1698 def getinstabilitycounts(repo):
1687 filtered = repo.changelog.filteredrevs
1699 filtered = repo.changelog.filteredrevs
1688 counts = {}
1700 counts = {}
1689 for instability, revset in instabilitytypes:
1701 for instability, revset in instabilitytypes:
1690 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1702 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1691 filtered)
1703 filtered)
1692 return counts
1704 return counts
1693
1705
1694 oldinstabilitycounts = getinstabilitycounts(repo)
1706 oldinstabilitycounts = getinstabilitycounts(repo)
1695 @reportsummary
1707 @reportsummary
1696 def reportnewinstabilities(repo, tr):
1708 def reportnewinstabilities(repo, tr):
1697 newinstabilitycounts = getinstabilitycounts(repo)
1709 newinstabilitycounts = getinstabilitycounts(repo)
1698 for instability, revset in instabilitytypes:
1710 for instability, revset in instabilitytypes:
1699 delta = (newinstabilitycounts[instability] -
1711 delta = (newinstabilitycounts[instability] -
1700 oldinstabilitycounts[instability])
1712 oldinstabilitycounts[instability])
1701 msg = getinstabilitymessage(delta, instability)
1713 msg = getinstabilitymessage(delta, instability)
1702 if msg:
1714 if msg:
1703 repo.ui.warn(msg)
1715 repo.ui.warn(msg)
1704
1716
1705 if txmatch(_reportnewcssource):
1717 if txmatch(_reportnewcssource):
1706 @reportsummary
1718 @reportsummary
1707 def reportnewcs(repo, tr):
1719 def reportnewcs(repo, tr):
1708 """Report the range of new revisions pulled/unbundled."""
1720 """Report the range of new revisions pulled/unbundled."""
1709 origrepolen = tr.changes.get('origrepolen', len(repo))
1721 origrepolen = tr.changes.get('origrepolen', len(repo))
1710 unfi = repo.unfiltered()
1722 unfi = repo.unfiltered()
1711 if origrepolen >= len(unfi):
1723 if origrepolen >= len(unfi):
1712 return
1724 return
1713
1725
1714 # Compute the bounds of new visible revisions' range.
1726 # Compute the bounds of new visible revisions' range.
1715 revs = smartset.spanset(repo, start=origrepolen)
1727 revs = smartset.spanset(repo, start=origrepolen)
1716 if revs:
1728 if revs:
1717 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1729 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1718
1730
1719 if minrev == maxrev:
1731 if minrev == maxrev:
1720 revrange = minrev
1732 revrange = minrev
1721 else:
1733 else:
1722 revrange = '%s:%s' % (minrev, maxrev)
1734 revrange = '%s:%s' % (minrev, maxrev)
1723 draft = len(repo.revs('%ld and draft()', revs))
1735 draft = len(repo.revs('%ld and draft()', revs))
1724 secret = len(repo.revs('%ld and secret()', revs))
1736 secret = len(repo.revs('%ld and secret()', revs))
1725 if not (draft or secret):
1737 if not (draft or secret):
1726 msg = _('new changesets %s\n') % revrange
1738 msg = _('new changesets %s\n') % revrange
1727 elif draft and secret:
1739 elif draft and secret:
1728 msg = _('new changesets %s (%d drafts, %d secrets)\n')
1740 msg = _('new changesets %s (%d drafts, %d secrets)\n')
1729 msg %= (revrange, draft, secret)
1741 msg %= (revrange, draft, secret)
1730 elif draft:
1742 elif draft:
1731 msg = _('new changesets %s (%d drafts)\n')
1743 msg = _('new changesets %s (%d drafts)\n')
1732 msg %= (revrange, draft)
1744 msg %= (revrange, draft)
1733 elif secret:
1745 elif secret:
1734 msg = _('new changesets %s (%d secrets)\n')
1746 msg = _('new changesets %s (%d secrets)\n')
1735 msg %= (revrange, secret)
1747 msg %= (revrange, secret)
1736 else:
1748 else:
1737 errormsg = 'entered unreachable condition'
1749 errormsg = 'entered unreachable condition'
1738 raise error.ProgrammingError(errormsg)
1750 raise error.ProgrammingError(errormsg)
1739 repo.ui.status(msg)
1751 repo.ui.status(msg)
1740
1752
1741 # search new changesets directly pulled as obsolete
1753 # search new changesets directly pulled as obsolete
1742 duplicates = tr.changes.get('revduplicates', ())
1754 duplicates = tr.changes.get('revduplicates', ())
1743 obsadded = unfi.revs('(%d: + %ld) and obsolete()',
1755 obsadded = unfi.revs('(%d: + %ld) and obsolete()',
1744 origrepolen, duplicates)
1756 origrepolen, duplicates)
1745 cl = repo.changelog
1757 cl = repo.changelog
1746 extinctadded = [r for r in obsadded if r not in cl]
1758 extinctadded = [r for r in obsadded if r not in cl]
1747 if extinctadded:
1759 if extinctadded:
1748 # They are not just obsolete, but obsolete and invisible
1760 # They are not just obsolete, but obsolete and invisible
1749 # we call them "extinct" internally but the terms have not been
1761 # we call them "extinct" internally but the terms have not been
1750 # exposed to users.
1762 # exposed to users.
1751 msg = '(%d other changesets obsolete on arrival)\n'
1763 msg = '(%d other changesets obsolete on arrival)\n'
1752 repo.ui.status(msg % len(extinctadded))
1764 repo.ui.status(msg % len(extinctadded))
1753
1765
1754 @reportsummary
1766 @reportsummary
1755 def reportphasechanges(repo, tr):
1767 def reportphasechanges(repo, tr):
1756 """Report statistics of phase changes for changesets pre-existing
1768 """Report statistics of phase changes for changesets pre-existing
1757 pull/unbundle.
1769 pull/unbundle.
1758 """
1770 """
1759 origrepolen = tr.changes.get('origrepolen', len(repo))
1771 origrepolen = tr.changes.get('origrepolen', len(repo))
1760 phasetracking = tr.changes.get('phases', {})
1772 phasetracking = tr.changes.get('phases', {})
1761 if not phasetracking:
1773 if not phasetracking:
1762 return
1774 return
1763 published = [
1775 published = [
1764 rev for rev, (old, new) in phasetracking.iteritems()
1776 rev for rev, (old, new) in phasetracking.iteritems()
1765 if new == phases.public and rev < origrepolen
1777 if new == phases.public and rev < origrepolen
1766 ]
1778 ]
1767 if not published:
1779 if not published:
1768 return
1780 return
1769 repo.ui.status(_('%d local changesets published\n')
1781 repo.ui.status(_('%d local changesets published\n')
1770 % len(published))
1782 % len(published))
1771
1783
1772 def getinstabilitymessage(delta, instability):
1784 def getinstabilitymessage(delta, instability):
1773 """function to return the message to show warning about new instabilities
1785 """function to return the message to show warning about new instabilities
1774
1786
1775 exists as a separate function so that extension can wrap to show more
1787 exists as a separate function so that extension can wrap to show more
1776 information like how to fix instabilities"""
1788 information like how to fix instabilities"""
1777 if delta > 0:
1789 if delta > 0:
1778 return _('%i new %s changesets\n') % (delta, instability)
1790 return _('%i new %s changesets\n') % (delta, instability)
1779
1791
1780 def nodesummaries(repo, nodes, maxnumnodes=4):
1792 def nodesummaries(repo, nodes, maxnumnodes=4):
1781 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1793 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1782 return ' '.join(short(h) for h in nodes)
1794 return ' '.join(short(h) for h in nodes)
1783 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1795 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1784 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1796 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1785
1797
1786 def enforcesinglehead(repo, tr, desc):
1798 def enforcesinglehead(repo, tr, desc):
1787 """check that no named branch has multiple heads"""
1799 """check that no named branch has multiple heads"""
1788 if desc in ('strip', 'repair'):
1800 if desc in ('strip', 'repair'):
1789 # skip the logic during strip
1801 # skip the logic during strip
1790 return
1802 return
1791 visible = repo.filtered('visible')
1803 visible = repo.filtered('visible')
1792 # possible improvement: we could restrict the check to affected branch
1804 # possible improvement: we could restrict the check to affected branch
1793 for name, heads in visible.branchmap().iteritems():
1805 for name, heads in visible.branchmap().iteritems():
1794 if len(heads) > 1:
1806 if len(heads) > 1:
1795 msg = _('rejecting multiple heads on branch "%s"')
1807 msg = _('rejecting multiple heads on branch "%s"')
1796 msg %= name
1808 msg %= name
1797 hint = _('%d heads: %s')
1809 hint = _('%d heads: %s')
1798 hint %= (len(heads), nodesummaries(repo, heads))
1810 hint %= (len(heads), nodesummaries(repo, heads))
1799 raise error.Abort(msg, hint=hint)
1811 raise error.Abort(msg, hint=hint)
1800
1812
1801 def wrapconvertsink(sink):
1813 def wrapconvertsink(sink):
1802 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1814 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1803 before it is used, whether or not the convert extension was formally loaded.
1815 before it is used, whether or not the convert extension was formally loaded.
1804 """
1816 """
1805 return sink
1817 return sink
1806
1818
1807 def unhidehashlikerevs(repo, specs, hiddentype):
1819 def unhidehashlikerevs(repo, specs, hiddentype):
1808 """parse the user specs and unhide changesets whose hash or revision number
1820 """parse the user specs and unhide changesets whose hash or revision number
1809 is passed.
1821 is passed.
1810
1822
1811 hiddentype can be: 1) 'warn': warn while unhiding changesets
1823 hiddentype can be: 1) 'warn': warn while unhiding changesets
1812 2) 'nowarn': don't warn while unhiding changesets
1824 2) 'nowarn': don't warn while unhiding changesets
1813
1825
1814 returns a repo object with the required changesets unhidden
1826 returns a repo object with the required changesets unhidden
1815 """
1827 """
1816 if not repo.filtername or not repo.ui.configbool('experimental',
1828 if not repo.filtername or not repo.ui.configbool('experimental',
1817 'directaccess'):
1829 'directaccess'):
1818 return repo
1830 return repo
1819
1831
1820 if repo.filtername not in ('visible', 'visible-hidden'):
1832 if repo.filtername not in ('visible', 'visible-hidden'):
1821 return repo
1833 return repo
1822
1834
1823 symbols = set()
1835 symbols = set()
1824 for spec in specs:
1836 for spec in specs:
1825 try:
1837 try:
1826 tree = revsetlang.parse(spec)
1838 tree = revsetlang.parse(spec)
1827 except error.ParseError: # will be reported by scmutil.revrange()
1839 except error.ParseError: # will be reported by scmutil.revrange()
1828 continue
1840 continue
1829
1841
1830 symbols.update(revsetlang.gethashlikesymbols(tree))
1842 symbols.update(revsetlang.gethashlikesymbols(tree))
1831
1843
1832 if not symbols:
1844 if not symbols:
1833 return repo
1845 return repo
1834
1846
1835 revs = _getrevsfromsymbols(repo, symbols)
1847 revs = _getrevsfromsymbols(repo, symbols)
1836
1848
1837 if not revs:
1849 if not revs:
1838 return repo
1850 return repo
1839
1851
1840 if hiddentype == 'warn':
1852 if hiddentype == 'warn':
1841 unfi = repo.unfiltered()
1853 unfi = repo.unfiltered()
1842 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1854 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1843 repo.ui.warn(_("warning: accessing hidden changesets for write "
1855 repo.ui.warn(_("warning: accessing hidden changesets for write "
1844 "operation: %s\n") % revstr)
1856 "operation: %s\n") % revstr)
1845
1857
1846 # we have to use new filtername to separate branch/tags cache until we can
1858 # we have to use new filtername to separate branch/tags cache until we can
1847 # disbale these cache when revisions are dynamically pinned.
1859 # disbale these cache when revisions are dynamically pinned.
1848 return repo.filtered('visible-hidden', revs)
1860 return repo.filtered('visible-hidden', revs)
1849
1861
1850 def _getrevsfromsymbols(repo, symbols):
1862 def _getrevsfromsymbols(repo, symbols):
1851 """parse the list of symbols and returns a set of revision numbers of hidden
1863 """parse the list of symbols and returns a set of revision numbers of hidden
1852 changesets present in symbols"""
1864 changesets present in symbols"""
1853 revs = set()
1865 revs = set()
1854 unfi = repo.unfiltered()
1866 unfi = repo.unfiltered()
1855 unficl = unfi.changelog
1867 unficl = unfi.changelog
1856 cl = repo.changelog
1868 cl = repo.changelog
1857 tiprev = len(unficl)
1869 tiprev = len(unficl)
1858 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1870 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1859 for s in symbols:
1871 for s in symbols:
1860 try:
1872 try:
1861 n = int(s)
1873 n = int(s)
1862 if n <= tiprev:
1874 if n <= tiprev:
1863 if not allowrevnums:
1875 if not allowrevnums:
1864 continue
1876 continue
1865 else:
1877 else:
1866 if n not in cl:
1878 if n not in cl:
1867 revs.add(n)
1879 revs.add(n)
1868 continue
1880 continue
1869 except ValueError:
1881 except ValueError:
1870 pass
1882 pass
1871
1883
1872 try:
1884 try:
1873 s = resolvehexnodeidprefix(unfi, s)
1885 s = resolvehexnodeidprefix(unfi, s)
1874 except (error.LookupError, error.WdirUnsupported):
1886 except (error.LookupError, error.WdirUnsupported):
1875 s = None
1887 s = None
1876
1888
1877 if s is not None:
1889 if s is not None:
1878 rev = unficl.rev(s)
1890 rev = unficl.rev(s)
1879 if rev not in cl:
1891 if rev not in cl:
1880 revs.add(rev)
1892 revs.add(rev)
1881
1893
1882 return revs
1894 return revs
1883
1895
1884 def bookmarkrevs(repo, mark):
1896 def bookmarkrevs(repo, mark):
1885 """
1897 """
1886 Select revisions reachable by a given bookmark
1898 Select revisions reachable by a given bookmark
1887 """
1899 """
1888 return repo.revs("ancestors(bookmark(%s)) - "
1900 return repo.revs("ancestors(bookmark(%s)) - "
1889 "ancestors(head() and not bookmark(%s)) - "
1901 "ancestors(head() and not bookmark(%s)) - "
1890 "ancestors(bookmark() and not bookmark(%s))",
1902 "ancestors(bookmark() and not bookmark(%s))",
1891 mark, mark, mark)
1903 mark, mark, mark)
@@ -1,77 +1,143 b''
1 =========================================================
1 =========================================================
2 Test features and behaviors related to the archived phase
2 Test features and behaviors related to the archived phase
3 =========================================================
3 =========================================================
4
4
5 $ cat << EOF >> $HGRCPATH
5 $ cat << EOF >> $HGRCPATH
6 > [format]
6 > [format]
7 > internal-phase=yes
7 > internal-phase=yes
8 > [extensions]
8 > [extensions]
9 > strip=
9 > strip=
10 > [experimental]
10 > [experimental]
11 > EOF
11 > EOF
12
12
13 $ hg init repo
13 $ hg init repo
14 $ cd repo
14 $ cd repo
15 $ echo root > a
15 $ echo root > a
16 $ hg add a
16 $ hg add a
17 $ hg ci -m 'root'
17 $ hg ci -m 'root'
18
18
19 Test that bundle can unarchive a changeset
19 Test that bundle can unarchive a changeset
20 ------------------------------------------
20 ------------------------------------------
21
21
22 $ echo foo >> a
22 $ echo foo >> a
23 $ hg st
23 $ hg st
24 M a
24 M a
25 $ hg ci -m 'unbundletesting'
25 $ hg ci -m 'unbundletesting'
26 $ hg log -G
26 $ hg log -G
27 @ changeset: 1:883aadbbf309
27 @ changeset: 1:883aadbbf309
28 | tag: tip
28 | tag: tip
29 | user: test
29 | user: test
30 | date: Thu Jan 01 00:00:00 1970 +0000
30 | date: Thu Jan 01 00:00:00 1970 +0000
31 | summary: unbundletesting
31 | summary: unbundletesting
32 |
32 |
33 o changeset: 0:c1863a3840c6
33 o changeset: 0:c1863a3840c6
34 user: test
34 user: test
35 date: Thu Jan 01 00:00:00 1970 +0000
35 date: Thu Jan 01 00:00:00 1970 +0000
36 summary: root
36 summary: root
37
37
38 $ hg strip --soft --rev '.'
38 $ hg strip --soft --rev '.'
39 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
39 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
40 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/883aadbbf309-efc55adc-backup.hg
40 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/883aadbbf309-efc55adc-backup.hg
41 $ hg log -G
41 $ hg log -G
42 @ changeset: 0:c1863a3840c6
42 @ changeset: 0:c1863a3840c6
43 tag: tip
43 tag: tip
44 user: test
44 user: test
45 date: Thu Jan 01 00:00:00 1970 +0000
45 date: Thu Jan 01 00:00:00 1970 +0000
46 summary: root
46 summary: root
47
47
48 $ hg log -G --hidden
48 $ hg log -G --hidden
49 o changeset: 1:883aadbbf309
49 o changeset: 1:883aadbbf309
50 | tag: tip
50 | tag: tip
51 | user: test
51 | user: test
52 | date: Thu Jan 01 00:00:00 1970 +0000
52 | date: Thu Jan 01 00:00:00 1970 +0000
53 | summary: unbundletesting
53 | summary: unbundletesting
54 |
54 |
55 @ changeset: 0:c1863a3840c6
55 @ changeset: 0:c1863a3840c6
56 user: test
56 user: test
57 date: Thu Jan 01 00:00:00 1970 +0000
57 date: Thu Jan 01 00:00:00 1970 +0000
58 summary: root
58 summary: root
59
59
60 $ hg unbundle .hg/strip-backup/883aadbbf309-efc55adc-backup.hg
60 $ hg unbundle .hg/strip-backup/883aadbbf309-efc55adc-backup.hg
61 adding changesets
61 adding changesets
62 adding manifests
62 adding manifests
63 adding file changes
63 adding file changes
64 added 0 changesets with 0 changes to 1 files
64 added 0 changesets with 0 changes to 1 files
65 (run 'hg update' to get a working copy)
65 (run 'hg update' to get a working copy)
66 $ hg log -G
66 $ hg log -G
67 o changeset: 1:883aadbbf309
67 o changeset: 1:883aadbbf309
68 | tag: tip
68 | tag: tip
69 | user: test
69 | user: test
70 | date: Thu Jan 01 00:00:00 1970 +0000
70 | date: Thu Jan 01 00:00:00 1970 +0000
71 | summary: unbundletesting
71 | summary: unbundletesting
72 |
72 |
73 @ changeset: 0:c1863a3840c6
73 @ changeset: 0:c1863a3840c6
74 user: test
74 user: test
75 date: Thu Jan 01 00:00:00 1970 +0000
75 date: Thu Jan 01 00:00:00 1970 +0000
76 summary: root
76 summary: root
77
77
78
79 Test that history rewriting command can use the archived phase when allowed to
80 ------------------------------------------------------------------------------
81
82 $ hg up 'desc(unbundletesting)'
83 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
84 $ echo bar >> a
85 $ hg commit --amend --config experimental.cleanup-as-archived=yes
86 $ hg log -G
87 @ changeset: 2:d1e73e428f29
88 | tag: tip
89 | parent: 0:c1863a3840c6
90 | user: test
91 | date: Thu Jan 01 00:00:00 1970 +0000
92 | summary: unbundletesting
93 |
94 o changeset: 0:c1863a3840c6
95 user: test
96 date: Thu Jan 01 00:00:00 1970 +0000
97 summary: root
98
99 $ hg log -G --hidden
100 @ changeset: 2:d1e73e428f29
101 | tag: tip
102 | parent: 0:c1863a3840c6
103 | user: test
104 | date: Thu Jan 01 00:00:00 1970 +0000
105 | summary: unbundletesting
106 |
107 | o changeset: 1:883aadbbf309
108 |/ user: test
109 | date: Thu Jan 01 00:00:00 1970 +0000
110 | summary: unbundletesting
111 |
112 o changeset: 0:c1863a3840c6
113 user: test
114 date: Thu Jan 01 00:00:00 1970 +0000
115 summary: root
116
117 $ ls -1 .hg/strip-backup/
118 883aadbbf309-efc55adc-amend.hg
119 883aadbbf309-efc55adc-backup.hg
120 $ hg unbundle .hg/strip-backup/883aadbbf309*amend.hg
121 adding changesets
122 adding manifests
123 adding file changes
124 added 0 changesets with 0 changes to 1 files
125 (run 'hg update' to get a working copy)
126 $ hg log -G
127 @ changeset: 2:d1e73e428f29
128 | tag: tip
129 | parent: 0:c1863a3840c6
130 | user: test
131 | date: Thu Jan 01 00:00:00 1970 +0000
132 | summary: unbundletesting
133 |
134 | o changeset: 1:883aadbbf309
135 |/ user: test
136 | date: Thu Jan 01 00:00:00 1970 +0000
137 | summary: unbundletesting
138 |
139 o changeset: 0:c1863a3840c6
140 user: test
141 date: Thu Jan 01 00:00:00 1970 +0000
142 summary: root
143
General Comments 0
You need to be logged in to leave comments. Login now