##// END OF EJS Templates
configitems: register the 'debug.dirstate.delaywrite' config
Boris Feld -
r34482:cbda631c default
parent child Browse files
Show More
@@ -1,692 +1,695
1 # configitems.py - centralized declaration of configuration option
1 # configitems.py - centralized declaration of configuration option
2 #
2 #
3 # Copyright 2017 Pierre-Yves David <pierre-yves.david@octobus.net>
3 # Copyright 2017 Pierre-Yves David <pierre-yves.david@octobus.net>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import functools
10 import functools
11
11
12 from . import (
12 from . import (
13 encoding,
13 encoding,
14 error,
14 error,
15 )
15 )
16
16
17 def loadconfigtable(ui, extname, configtable):
17 def loadconfigtable(ui, extname, configtable):
18 """update config item known to the ui with the extension ones"""
18 """update config item known to the ui with the extension ones"""
19 for section, items in configtable.items():
19 for section, items in configtable.items():
20 knownitems = ui._knownconfig.setdefault(section, {})
20 knownitems = ui._knownconfig.setdefault(section, {})
21 knownkeys = set(knownitems)
21 knownkeys = set(knownitems)
22 newkeys = set(items)
22 newkeys = set(items)
23 for key in sorted(knownkeys & newkeys):
23 for key in sorted(knownkeys & newkeys):
24 msg = "extension '%s' overwrite config item '%s.%s'"
24 msg = "extension '%s' overwrite config item '%s.%s'"
25 msg %= (extname, section, key)
25 msg %= (extname, section, key)
26 ui.develwarn(msg, config='warn-config')
26 ui.develwarn(msg, config='warn-config')
27
27
28 knownitems.update(items)
28 knownitems.update(items)
29
29
30 class configitem(object):
30 class configitem(object):
31 """represent a known config item
31 """represent a known config item
32
32
33 :section: the official config section where to find this item,
33 :section: the official config section where to find this item,
34 :name: the official name within the section,
34 :name: the official name within the section,
35 :default: default value for this item,
35 :default: default value for this item,
36 :alias: optional list of tuples as alternatives.
36 :alias: optional list of tuples as alternatives.
37 """
37 """
38
38
39 def __init__(self, section, name, default=None, alias=()):
39 def __init__(self, section, name, default=None, alias=()):
40 self.section = section
40 self.section = section
41 self.name = name
41 self.name = name
42 self.default = default
42 self.default = default
43 self.alias = list(alias)
43 self.alias = list(alias)
44
44
45 coreitems = {}
45 coreitems = {}
46
46
47 def _register(configtable, *args, **kwargs):
47 def _register(configtable, *args, **kwargs):
48 item = configitem(*args, **kwargs)
48 item = configitem(*args, **kwargs)
49 section = configtable.setdefault(item.section, {})
49 section = configtable.setdefault(item.section, {})
50 if item.name in section:
50 if item.name in section:
51 msg = "duplicated config item registration for '%s.%s'"
51 msg = "duplicated config item registration for '%s.%s'"
52 raise error.ProgrammingError(msg % (item.section, item.name))
52 raise error.ProgrammingError(msg % (item.section, item.name))
53 section[item.name] = item
53 section[item.name] = item
54
54
55 # special value for case where the default is derived from other values
55 # special value for case where the default is derived from other values
56 dynamicdefault = object()
56 dynamicdefault = object()
57
57
58 # Registering actual config items
58 # Registering actual config items
59
59
60 def getitemregister(configtable):
60 def getitemregister(configtable):
61 return functools.partial(_register, configtable)
61 return functools.partial(_register, configtable)
62
62
63 coreconfigitem = getitemregister(coreitems)
63 coreconfigitem = getitemregister(coreitems)
64
64
65 coreconfigitem('auth', 'cookiefile',
65 coreconfigitem('auth', 'cookiefile',
66 default=None,
66 default=None,
67 )
67 )
68 # bookmarks.pushing: internal hack for discovery
68 # bookmarks.pushing: internal hack for discovery
69 coreconfigitem('bookmarks', 'pushing',
69 coreconfigitem('bookmarks', 'pushing',
70 default=list,
70 default=list,
71 )
71 )
72 # bundle.mainreporoot: internal hack for bundlerepo
72 # bundle.mainreporoot: internal hack for bundlerepo
73 coreconfigitem('bundle', 'mainreporoot',
73 coreconfigitem('bundle', 'mainreporoot',
74 default='',
74 default='',
75 )
75 )
76 # bundle.reorder: experimental config
76 # bundle.reorder: experimental config
77 coreconfigitem('bundle', 'reorder',
77 coreconfigitem('bundle', 'reorder',
78 default='auto',
78 default='auto',
79 )
79 )
80 coreconfigitem('censor', 'policy',
80 coreconfigitem('censor', 'policy',
81 default='abort',
81 default='abort',
82 )
82 )
83 coreconfigitem('chgserver', 'idletimeout',
83 coreconfigitem('chgserver', 'idletimeout',
84 default=3600,
84 default=3600,
85 )
85 )
86 coreconfigitem('chgserver', 'skiphash',
86 coreconfigitem('chgserver', 'skiphash',
87 default=False,
87 default=False,
88 )
88 )
89 coreconfigitem('cmdserver', 'log',
89 coreconfigitem('cmdserver', 'log',
90 default=None,
90 default=None,
91 )
91 )
92 coreconfigitem('color', 'mode',
92 coreconfigitem('color', 'mode',
93 default='auto',
93 default='auto',
94 )
94 )
95 coreconfigitem('color', 'pagermode',
95 coreconfigitem('color', 'pagermode',
96 default=dynamicdefault,
96 default=dynamicdefault,
97 )
97 )
98 coreconfigitem('commands', 'status.relative',
98 coreconfigitem('commands', 'status.relative',
99 default=False,
99 default=False,
100 )
100 )
101 coreconfigitem('commands', 'status.skipstates',
101 coreconfigitem('commands', 'status.skipstates',
102 default=[],
102 default=[],
103 )
103 )
104 coreconfigitem('commands', 'status.verbose',
104 coreconfigitem('commands', 'status.verbose',
105 default=False,
105 default=False,
106 )
106 )
107 coreconfigitem('commands', 'update.requiredest',
107 coreconfigitem('commands', 'update.requiredest',
108 default=False,
108 default=False,
109 )
109 )
110 coreconfigitem('debug', 'dirstate.delaywrite',
111 default=0,
112 )
110 coreconfigitem('devel', 'all-warnings',
113 coreconfigitem('devel', 'all-warnings',
111 default=False,
114 default=False,
112 )
115 )
113 coreconfigitem('devel', 'bundle2.debug',
116 coreconfigitem('devel', 'bundle2.debug',
114 default=False,
117 default=False,
115 )
118 )
116 coreconfigitem('devel', 'check-locks',
119 coreconfigitem('devel', 'check-locks',
117 default=False,
120 default=False,
118 )
121 )
119 coreconfigitem('devel', 'check-relroot',
122 coreconfigitem('devel', 'check-relroot',
120 default=False,
123 default=False,
121 )
124 )
122 coreconfigitem('devel', 'default-date',
125 coreconfigitem('devel', 'default-date',
123 default=None,
126 default=None,
124 )
127 )
125 coreconfigitem('devel', 'deprec-warn',
128 coreconfigitem('devel', 'deprec-warn',
126 default=False,
129 default=False,
127 )
130 )
128 coreconfigitem('devel', 'disableloaddefaultcerts',
131 coreconfigitem('devel', 'disableloaddefaultcerts',
129 default=False,
132 default=False,
130 )
133 )
131 coreconfigitem('devel', 'legacy.exchange',
134 coreconfigitem('devel', 'legacy.exchange',
132 default=list,
135 default=list,
133 )
136 )
134 coreconfigitem('devel', 'servercafile',
137 coreconfigitem('devel', 'servercafile',
135 default='',
138 default='',
136 )
139 )
137 coreconfigitem('devel', 'serverexactprotocol',
140 coreconfigitem('devel', 'serverexactprotocol',
138 default='',
141 default='',
139 )
142 )
140 coreconfigitem('devel', 'serverrequirecert',
143 coreconfigitem('devel', 'serverrequirecert',
141 default=False,
144 default=False,
142 )
145 )
143 coreconfigitem('devel', 'strip-obsmarkers',
146 coreconfigitem('devel', 'strip-obsmarkers',
144 default=True,
147 default=True,
145 )
148 )
146 coreconfigitem('email', 'charsets',
149 coreconfigitem('email', 'charsets',
147 default=list,
150 default=list,
148 )
151 )
149 coreconfigitem('email', 'from',
152 coreconfigitem('email', 'from',
150 default=None,
153 default=None,
151 )
154 )
152 coreconfigitem('email', 'method',
155 coreconfigitem('email', 'method',
153 default='smtp',
156 default='smtp',
154 )
157 )
155 coreconfigitem('experimental', 'bundle-phases',
158 coreconfigitem('experimental', 'bundle-phases',
156 default=False,
159 default=False,
157 )
160 )
158 coreconfigitem('experimental', 'bundle2-advertise',
161 coreconfigitem('experimental', 'bundle2-advertise',
159 default=True,
162 default=True,
160 )
163 )
161 coreconfigitem('experimental', 'bundle2-output-capture',
164 coreconfigitem('experimental', 'bundle2-output-capture',
162 default=False,
165 default=False,
163 )
166 )
164 coreconfigitem('experimental', 'bundle2.pushback',
167 coreconfigitem('experimental', 'bundle2.pushback',
165 default=False,
168 default=False,
166 )
169 )
167 coreconfigitem('experimental', 'bundle2lazylocking',
170 coreconfigitem('experimental', 'bundle2lazylocking',
168 default=False,
171 default=False,
169 )
172 )
170 coreconfigitem('experimental', 'bundlecomplevel',
173 coreconfigitem('experimental', 'bundlecomplevel',
171 default=None,
174 default=None,
172 )
175 )
173 coreconfigitem('experimental', 'changegroup3',
176 coreconfigitem('experimental', 'changegroup3',
174 default=False,
177 default=False,
175 )
178 )
176 coreconfigitem('experimental', 'clientcompressionengines',
179 coreconfigitem('experimental', 'clientcompressionengines',
177 default=list,
180 default=list,
178 )
181 )
179 coreconfigitem('experimental', 'copytrace',
182 coreconfigitem('experimental', 'copytrace',
180 default='on',
183 default='on',
181 )
184 )
182 coreconfigitem('experimental', 'copytrace.sourcecommitlimit',
185 coreconfigitem('experimental', 'copytrace.sourcecommitlimit',
183 default=100,
186 default=100,
184 )
187 )
185 coreconfigitem('experimental', 'crecordtest',
188 coreconfigitem('experimental', 'crecordtest',
186 default=None,
189 default=None,
187 )
190 )
188 coreconfigitem('experimental', 'editortmpinhg',
191 coreconfigitem('experimental', 'editortmpinhg',
189 default=False,
192 default=False,
190 )
193 )
191 coreconfigitem('experimental', 'stabilization',
194 coreconfigitem('experimental', 'stabilization',
192 default=list,
195 default=list,
193 alias=[('experimental', 'evolution')],
196 alias=[('experimental', 'evolution')],
194 )
197 )
195 coreconfigitem('experimental', 'stabilization.bundle-obsmarker',
198 coreconfigitem('experimental', 'stabilization.bundle-obsmarker',
196 default=False,
199 default=False,
197 alias=[('experimental', 'evolution.bundle-obsmarker')],
200 alias=[('experimental', 'evolution.bundle-obsmarker')],
198 )
201 )
199 coreconfigitem('experimental', 'stabilization.track-operation',
202 coreconfigitem('experimental', 'stabilization.track-operation',
200 default=True,
203 default=True,
201 alias=[('experimental', 'evolution.track-operation')]
204 alias=[('experimental', 'evolution.track-operation')]
202 )
205 )
203 coreconfigitem('experimental', 'exportableenviron',
206 coreconfigitem('experimental', 'exportableenviron',
204 default=list,
207 default=list,
205 )
208 )
206 coreconfigitem('experimental', 'extendedheader.index',
209 coreconfigitem('experimental', 'extendedheader.index',
207 default=None,
210 default=None,
208 )
211 )
209 coreconfigitem('experimental', 'extendedheader.similarity',
212 coreconfigitem('experimental', 'extendedheader.similarity',
210 default=False,
213 default=False,
211 )
214 )
212 coreconfigitem('experimental', 'format.compression',
215 coreconfigitem('experimental', 'format.compression',
213 default='zlib',
216 default='zlib',
214 )
217 )
215 coreconfigitem('experimental', 'graphshorten',
218 coreconfigitem('experimental', 'graphshorten',
216 default=False,
219 default=False,
217 )
220 )
218 coreconfigitem('experimental', 'hook-track-tags',
221 coreconfigitem('experimental', 'hook-track-tags',
219 default=False,
222 default=False,
220 )
223 )
221 coreconfigitem('experimental', 'httppostargs',
224 coreconfigitem('experimental', 'httppostargs',
222 default=False,
225 default=False,
223 )
226 )
224 coreconfigitem('experimental', 'manifestv2',
227 coreconfigitem('experimental', 'manifestv2',
225 default=False,
228 default=False,
226 )
229 )
227 coreconfigitem('experimental', 'mergedriver',
230 coreconfigitem('experimental', 'mergedriver',
228 default=None,
231 default=None,
229 )
232 )
230 coreconfigitem('experimental', 'obsmarkers-exchange-debug',
233 coreconfigitem('experimental', 'obsmarkers-exchange-debug',
231 default=False,
234 default=False,
232 )
235 )
233 coreconfigitem('experimental', 'rebase.multidest',
236 coreconfigitem('experimental', 'rebase.multidest',
234 default=False,
237 default=False,
235 )
238 )
236 coreconfigitem('experimental', 'revertalternateinteractivemode',
239 coreconfigitem('experimental', 'revertalternateinteractivemode',
237 default=True,
240 default=True,
238 )
241 )
239 coreconfigitem('experimental', 'revlogv2',
242 coreconfigitem('experimental', 'revlogv2',
240 default=None,
243 default=None,
241 )
244 )
242 coreconfigitem('experimental', 'spacemovesdown',
245 coreconfigitem('experimental', 'spacemovesdown',
243 default=False,
246 default=False,
244 )
247 )
245 coreconfigitem('experimental', 'treemanifest',
248 coreconfigitem('experimental', 'treemanifest',
246 default=False,
249 default=False,
247 )
250 )
248 coreconfigitem('experimental', 'updatecheck',
251 coreconfigitem('experimental', 'updatecheck',
249 default=None,
252 default=None,
250 )
253 )
251 coreconfigitem('format', 'aggressivemergedeltas',
254 coreconfigitem('format', 'aggressivemergedeltas',
252 default=False,
255 default=False,
253 )
256 )
254 coreconfigitem('format', 'chunkcachesize',
257 coreconfigitem('format', 'chunkcachesize',
255 default=None,
258 default=None,
256 )
259 )
257 coreconfigitem('format', 'dotencode',
260 coreconfigitem('format', 'dotencode',
258 default=True,
261 default=True,
259 )
262 )
260 coreconfigitem('format', 'generaldelta',
263 coreconfigitem('format', 'generaldelta',
261 default=False,
264 default=False,
262 )
265 )
263 coreconfigitem('format', 'manifestcachesize',
266 coreconfigitem('format', 'manifestcachesize',
264 default=None,
267 default=None,
265 )
268 )
266 coreconfigitem('format', 'maxchainlen',
269 coreconfigitem('format', 'maxchainlen',
267 default=None,
270 default=None,
268 )
271 )
269 coreconfigitem('format', 'obsstore-version',
272 coreconfigitem('format', 'obsstore-version',
270 default=None,
273 default=None,
271 )
274 )
272 coreconfigitem('format', 'usefncache',
275 coreconfigitem('format', 'usefncache',
273 default=True,
276 default=True,
274 )
277 )
275 coreconfigitem('format', 'usegeneraldelta',
278 coreconfigitem('format', 'usegeneraldelta',
276 default=True,
279 default=True,
277 )
280 )
278 coreconfigitem('format', 'usestore',
281 coreconfigitem('format', 'usestore',
279 default=True,
282 default=True,
280 )
283 )
281 coreconfigitem('hostsecurity', 'ciphers',
284 coreconfigitem('hostsecurity', 'ciphers',
282 default=None,
285 default=None,
283 )
286 )
284 coreconfigitem('hostsecurity', 'disabletls10warning',
287 coreconfigitem('hostsecurity', 'disabletls10warning',
285 default=False,
288 default=False,
286 )
289 )
287 coreconfigitem('http_proxy', 'always',
290 coreconfigitem('http_proxy', 'always',
288 default=False,
291 default=False,
289 )
292 )
290 coreconfigitem('http_proxy', 'host',
293 coreconfigitem('http_proxy', 'host',
291 default=None,
294 default=None,
292 )
295 )
293 coreconfigitem('http_proxy', 'no',
296 coreconfigitem('http_proxy', 'no',
294 default=list,
297 default=list,
295 )
298 )
296 coreconfigitem('http_proxy', 'passwd',
299 coreconfigitem('http_proxy', 'passwd',
297 default=None,
300 default=None,
298 )
301 )
299 coreconfigitem('http_proxy', 'user',
302 coreconfigitem('http_proxy', 'user',
300 default=None,
303 default=None,
301 )
304 )
302 coreconfigitem('merge', 'followcopies',
305 coreconfigitem('merge', 'followcopies',
303 default=True,
306 default=True,
304 )
307 )
305 coreconfigitem('merge', 'preferancestor',
308 coreconfigitem('merge', 'preferancestor',
306 default=lambda: ['*'],
309 default=lambda: ['*'],
307 )
310 )
308 coreconfigitem('pager', 'ignore',
311 coreconfigitem('pager', 'ignore',
309 default=list,
312 default=list,
310 )
313 )
311 coreconfigitem('patch', 'eol',
314 coreconfigitem('patch', 'eol',
312 default='strict',
315 default='strict',
313 )
316 )
314 coreconfigitem('patch', 'fuzz',
317 coreconfigitem('patch', 'fuzz',
315 default=2,
318 default=2,
316 )
319 )
317 coreconfigitem('paths', 'default',
320 coreconfigitem('paths', 'default',
318 default=None,
321 default=None,
319 )
322 )
320 coreconfigitem('paths', 'default-push',
323 coreconfigitem('paths', 'default-push',
321 default=None,
324 default=None,
322 )
325 )
323 coreconfigitem('phases', 'checksubrepos',
326 coreconfigitem('phases', 'checksubrepos',
324 default='follow',
327 default='follow',
325 )
328 )
326 coreconfigitem('phases', 'new-commit',
329 coreconfigitem('phases', 'new-commit',
327 default=dynamicdefault,
330 default=dynamicdefault,
328 )
331 )
329 coreconfigitem('phases', 'publish',
332 coreconfigitem('phases', 'publish',
330 default=True,
333 default=True,
331 )
334 )
332 coreconfigitem('profiling', 'enabled',
335 coreconfigitem('profiling', 'enabled',
333 default=False,
336 default=False,
334 )
337 )
335 coreconfigitem('profiling', 'format',
338 coreconfigitem('profiling', 'format',
336 default='text',
339 default='text',
337 )
340 )
338 coreconfigitem('profiling', 'freq',
341 coreconfigitem('profiling', 'freq',
339 default=1000,
342 default=1000,
340 )
343 )
341 coreconfigitem('profiling', 'limit',
344 coreconfigitem('profiling', 'limit',
342 default=30,
345 default=30,
343 )
346 )
344 coreconfigitem('profiling', 'nested',
347 coreconfigitem('profiling', 'nested',
345 default=0,
348 default=0,
346 )
349 )
347 coreconfigitem('profiling', 'output',
350 coreconfigitem('profiling', 'output',
348 default=None,
351 default=None,
349 )
352 )
350 coreconfigitem('profiling', 'showmax',
353 coreconfigitem('profiling', 'showmax',
351 default=0.999,
354 default=0.999,
352 )
355 )
353 coreconfigitem('profiling', 'showmin',
356 coreconfigitem('profiling', 'showmin',
354 default=dynamicdefault,
357 default=dynamicdefault,
355 )
358 )
356 coreconfigitem('profiling', 'sort',
359 coreconfigitem('profiling', 'sort',
357 default='inlinetime',
360 default='inlinetime',
358 )
361 )
359 coreconfigitem('profiling', 'statformat',
362 coreconfigitem('profiling', 'statformat',
360 default='hotpath',
363 default='hotpath',
361 )
364 )
362 coreconfigitem('profiling', 'type',
365 coreconfigitem('profiling', 'type',
363 default='stat',
366 default='stat',
364 )
367 )
365 coreconfigitem('progress', 'assume-tty',
368 coreconfigitem('progress', 'assume-tty',
366 default=False,
369 default=False,
367 )
370 )
368 coreconfigitem('progress', 'changedelay',
371 coreconfigitem('progress', 'changedelay',
369 default=1,
372 default=1,
370 )
373 )
371 coreconfigitem('progress', 'clear-complete',
374 coreconfigitem('progress', 'clear-complete',
372 default=True,
375 default=True,
373 )
376 )
374 coreconfigitem('progress', 'debug',
377 coreconfigitem('progress', 'debug',
375 default=False,
378 default=False,
376 )
379 )
377 coreconfigitem('progress', 'delay',
380 coreconfigitem('progress', 'delay',
378 default=3,
381 default=3,
379 )
382 )
380 coreconfigitem('progress', 'disable',
383 coreconfigitem('progress', 'disable',
381 default=False,
384 default=False,
382 )
385 )
383 coreconfigitem('progress', 'estimateinterval',
386 coreconfigitem('progress', 'estimateinterval',
384 default=60.0,
387 default=60.0,
385 )
388 )
386 coreconfigitem('progress', 'refresh',
389 coreconfigitem('progress', 'refresh',
387 default=0.1,
390 default=0.1,
388 )
391 )
389 coreconfigitem('progress', 'width',
392 coreconfigitem('progress', 'width',
390 default=dynamicdefault,
393 default=dynamicdefault,
391 )
394 )
392 coreconfigitem('push', 'pushvars.server',
395 coreconfigitem('push', 'pushvars.server',
393 default=False,
396 default=False,
394 )
397 )
395 coreconfigitem('server', 'bundle1',
398 coreconfigitem('server', 'bundle1',
396 default=True,
399 default=True,
397 )
400 )
398 coreconfigitem('server', 'bundle1gd',
401 coreconfigitem('server', 'bundle1gd',
399 default=None,
402 default=None,
400 )
403 )
401 coreconfigitem('server', 'compressionengines',
404 coreconfigitem('server', 'compressionengines',
402 default=list,
405 default=list,
403 )
406 )
404 coreconfigitem('server', 'concurrent-push-mode',
407 coreconfigitem('server', 'concurrent-push-mode',
405 default='strict',
408 default='strict',
406 )
409 )
407 coreconfigitem('server', 'disablefullbundle',
410 coreconfigitem('server', 'disablefullbundle',
408 default=False,
411 default=False,
409 )
412 )
410 coreconfigitem('server', 'maxhttpheaderlen',
413 coreconfigitem('server', 'maxhttpheaderlen',
411 default=1024,
414 default=1024,
412 )
415 )
413 coreconfigitem('server', 'preferuncompressed',
416 coreconfigitem('server', 'preferuncompressed',
414 default=False,
417 default=False,
415 )
418 )
416 coreconfigitem('server', 'uncompressed',
419 coreconfigitem('server', 'uncompressed',
417 default=True,
420 default=True,
418 )
421 )
419 coreconfigitem('server', 'uncompressedallowsecret',
422 coreconfigitem('server', 'uncompressedallowsecret',
420 default=False,
423 default=False,
421 )
424 )
422 coreconfigitem('server', 'validate',
425 coreconfigitem('server', 'validate',
423 default=False,
426 default=False,
424 )
427 )
425 coreconfigitem('server', 'zliblevel',
428 coreconfigitem('server', 'zliblevel',
426 default=-1,
429 default=-1,
427 )
430 )
428 coreconfigitem('smtp', 'host',
431 coreconfigitem('smtp', 'host',
429 default=None,
432 default=None,
430 )
433 )
431 coreconfigitem('smtp', 'local_hostname',
434 coreconfigitem('smtp', 'local_hostname',
432 default=None,
435 default=None,
433 )
436 )
434 coreconfigitem('smtp', 'password',
437 coreconfigitem('smtp', 'password',
435 default=None,
438 default=None,
436 )
439 )
437 coreconfigitem('smtp', 'port',
440 coreconfigitem('smtp', 'port',
438 default=dynamicdefault,
441 default=dynamicdefault,
439 )
442 )
440 coreconfigitem('smtp', 'tls',
443 coreconfigitem('smtp', 'tls',
441 default='none',
444 default='none',
442 )
445 )
443 coreconfigitem('smtp', 'username',
446 coreconfigitem('smtp', 'username',
444 default=None,
447 default=None,
445 )
448 )
446 coreconfigitem('sparse', 'missingwarning',
449 coreconfigitem('sparse', 'missingwarning',
447 default=True,
450 default=True,
448 )
451 )
449 coreconfigitem('trusted', 'groups',
452 coreconfigitem('trusted', 'groups',
450 default=list,
453 default=list,
451 )
454 )
452 coreconfigitem('trusted', 'users',
455 coreconfigitem('trusted', 'users',
453 default=list,
456 default=list,
454 )
457 )
455 coreconfigitem('ui', '_usedassubrepo',
458 coreconfigitem('ui', '_usedassubrepo',
456 default=False,
459 default=False,
457 )
460 )
458 coreconfigitem('ui', 'allowemptycommit',
461 coreconfigitem('ui', 'allowemptycommit',
459 default=False,
462 default=False,
460 )
463 )
461 coreconfigitem('ui', 'archivemeta',
464 coreconfigitem('ui', 'archivemeta',
462 default=True,
465 default=True,
463 )
466 )
464 coreconfigitem('ui', 'askusername',
467 coreconfigitem('ui', 'askusername',
465 default=False,
468 default=False,
466 )
469 )
467 coreconfigitem('ui', 'clonebundlefallback',
470 coreconfigitem('ui', 'clonebundlefallback',
468 default=False,
471 default=False,
469 )
472 )
470 coreconfigitem('ui', 'clonebundleprefers',
473 coreconfigitem('ui', 'clonebundleprefers',
471 default=list,
474 default=list,
472 )
475 )
473 coreconfigitem('ui', 'clonebundles',
476 coreconfigitem('ui', 'clonebundles',
474 default=True,
477 default=True,
475 )
478 )
476 coreconfigitem('ui', 'color',
479 coreconfigitem('ui', 'color',
477 default='auto',
480 default='auto',
478 )
481 )
479 coreconfigitem('ui', 'commitsubrepos',
482 coreconfigitem('ui', 'commitsubrepos',
480 default=False,
483 default=False,
481 )
484 )
482 coreconfigitem('ui', 'debug',
485 coreconfigitem('ui', 'debug',
483 default=False,
486 default=False,
484 )
487 )
485 coreconfigitem('ui', 'debugger',
488 coreconfigitem('ui', 'debugger',
486 default=None,
489 default=None,
487 )
490 )
488 coreconfigitem('ui', 'fallbackencoding',
491 coreconfigitem('ui', 'fallbackencoding',
489 default=None,
492 default=None,
490 )
493 )
491 coreconfigitem('ui', 'forcecwd',
494 coreconfigitem('ui', 'forcecwd',
492 default=None,
495 default=None,
493 )
496 )
494 coreconfigitem('ui', 'forcemerge',
497 coreconfigitem('ui', 'forcemerge',
495 default=None,
498 default=None,
496 )
499 )
497 coreconfigitem('ui', 'formatdebug',
500 coreconfigitem('ui', 'formatdebug',
498 default=False,
501 default=False,
499 )
502 )
500 coreconfigitem('ui', 'formatjson',
503 coreconfigitem('ui', 'formatjson',
501 default=False,
504 default=False,
502 )
505 )
503 coreconfigitem('ui', 'formatted',
506 coreconfigitem('ui', 'formatted',
504 default=None,
507 default=None,
505 )
508 )
506 coreconfigitem('ui', 'graphnodetemplate',
509 coreconfigitem('ui', 'graphnodetemplate',
507 default=None,
510 default=None,
508 )
511 )
509 coreconfigitem('ui', 'http2debuglevel',
512 coreconfigitem('ui', 'http2debuglevel',
510 default=None,
513 default=None,
511 )
514 )
512 coreconfigitem('ui', 'interactive',
515 coreconfigitem('ui', 'interactive',
513 default=None,
516 default=None,
514 )
517 )
515 coreconfigitem('ui', 'interface',
518 coreconfigitem('ui', 'interface',
516 default=None,
519 default=None,
517 )
520 )
518 coreconfigitem('ui', 'logblockedtimes',
521 coreconfigitem('ui', 'logblockedtimes',
519 default=False,
522 default=False,
520 )
523 )
521 coreconfigitem('ui', 'logtemplate',
524 coreconfigitem('ui', 'logtemplate',
522 default=None,
525 default=None,
523 )
526 )
524 coreconfigitem('ui', 'merge',
527 coreconfigitem('ui', 'merge',
525 default=None,
528 default=None,
526 )
529 )
527 coreconfigitem('ui', 'mergemarkers',
530 coreconfigitem('ui', 'mergemarkers',
528 default='basic',
531 default='basic',
529 )
532 )
530 coreconfigitem('ui', 'mergemarkertemplate',
533 coreconfigitem('ui', 'mergemarkertemplate',
531 default=('{node|short} '
534 default=('{node|short} '
532 '{ifeq(tags, "tip", "", '
535 '{ifeq(tags, "tip", "", '
533 'ifeq(tags, "", "", "{tags} "))}'
536 'ifeq(tags, "", "", "{tags} "))}'
534 '{if(bookmarks, "{bookmarks} ")}'
537 '{if(bookmarks, "{bookmarks} ")}'
535 '{ifeq(branch, "default", "", "{branch} ")}'
538 '{ifeq(branch, "default", "", "{branch} ")}'
536 '- {author|user}: {desc|firstline}')
539 '- {author|user}: {desc|firstline}')
537 )
540 )
538 coreconfigitem('ui', 'nontty',
541 coreconfigitem('ui', 'nontty',
539 default=False,
542 default=False,
540 )
543 )
541 coreconfigitem('ui', 'origbackuppath',
544 coreconfigitem('ui', 'origbackuppath',
542 default=None,
545 default=None,
543 )
546 )
544 coreconfigitem('ui', 'paginate',
547 coreconfigitem('ui', 'paginate',
545 default=True,
548 default=True,
546 )
549 )
547 coreconfigitem('ui', 'patch',
550 coreconfigitem('ui', 'patch',
548 default=None,
551 default=None,
549 )
552 )
550 coreconfigitem('ui', 'portablefilenames',
553 coreconfigitem('ui', 'portablefilenames',
551 default='warn',
554 default='warn',
552 )
555 )
553 coreconfigitem('ui', 'promptecho',
556 coreconfigitem('ui', 'promptecho',
554 default=False,
557 default=False,
555 )
558 )
556 coreconfigitem('ui', 'quiet',
559 coreconfigitem('ui', 'quiet',
557 default=False,
560 default=False,
558 )
561 )
559 coreconfigitem('ui', 'quietbookmarkmove',
562 coreconfigitem('ui', 'quietbookmarkmove',
560 default=False,
563 default=False,
561 )
564 )
562 coreconfigitem('ui', 'remotecmd',
565 coreconfigitem('ui', 'remotecmd',
563 default='hg',
566 default='hg',
564 )
567 )
565 coreconfigitem('ui', 'report_untrusted',
568 coreconfigitem('ui', 'report_untrusted',
566 default=True,
569 default=True,
567 )
570 )
568 coreconfigitem('ui', 'rollback',
571 coreconfigitem('ui', 'rollback',
569 default=True,
572 default=True,
570 )
573 )
571 coreconfigitem('ui', 'slash',
574 coreconfigitem('ui', 'slash',
572 default=False,
575 default=False,
573 )
576 )
574 coreconfigitem('ui', 'ssh',
577 coreconfigitem('ui', 'ssh',
575 default='ssh',
578 default='ssh',
576 )
579 )
577 coreconfigitem('ui', 'statuscopies',
580 coreconfigitem('ui', 'statuscopies',
578 default=False,
581 default=False,
579 )
582 )
580 coreconfigitem('ui', 'strict',
583 coreconfigitem('ui', 'strict',
581 default=False,
584 default=False,
582 )
585 )
583 coreconfigitem('ui', 'style',
586 coreconfigitem('ui', 'style',
584 default='',
587 default='',
585 )
588 )
586 coreconfigitem('ui', 'supportcontact',
589 coreconfigitem('ui', 'supportcontact',
587 default=None,
590 default=None,
588 )
591 )
589 coreconfigitem('ui', 'textwidth',
592 coreconfigitem('ui', 'textwidth',
590 default=78,
593 default=78,
591 )
594 )
592 coreconfigitem('ui', 'timeout',
595 coreconfigitem('ui', 'timeout',
593 default='600',
596 default='600',
594 )
597 )
595 coreconfigitem('ui', 'traceback',
598 coreconfigitem('ui', 'traceback',
596 default=False,
599 default=False,
597 )
600 )
598 coreconfigitem('ui', 'tweakdefaults',
601 coreconfigitem('ui', 'tweakdefaults',
599 default=False,
602 default=False,
600 )
603 )
601 coreconfigitem('ui', 'usehttp2',
604 coreconfigitem('ui', 'usehttp2',
602 default=False,
605 default=False,
603 )
606 )
604 coreconfigitem('ui', 'username',
607 coreconfigitem('ui', 'username',
605 alias=[('ui', 'user')]
608 alias=[('ui', 'user')]
606 )
609 )
607 coreconfigitem('ui', 'verbose',
610 coreconfigitem('ui', 'verbose',
608 default=False,
611 default=False,
609 )
612 )
610 coreconfigitem('verify', 'skipflags',
613 coreconfigitem('verify', 'skipflags',
611 default=None,
614 default=None,
612 )
615 )
613 coreconfigitem('web', 'accesslog',
616 coreconfigitem('web', 'accesslog',
614 default='-',
617 default='-',
615 )
618 )
616 coreconfigitem('web', 'address',
619 coreconfigitem('web', 'address',
617 default='',
620 default='',
618 )
621 )
619 coreconfigitem('web', 'allow_archive',
622 coreconfigitem('web', 'allow_archive',
620 default=list,
623 default=list,
621 )
624 )
622 coreconfigitem('web', 'allow_read',
625 coreconfigitem('web', 'allow_read',
623 default=list,
626 default=list,
624 )
627 )
625 coreconfigitem('web', 'baseurl',
628 coreconfigitem('web', 'baseurl',
626 default=None,
629 default=None,
627 )
630 )
628 coreconfigitem('web', 'cacerts',
631 coreconfigitem('web', 'cacerts',
629 default=None,
632 default=None,
630 )
633 )
631 coreconfigitem('web', 'certificate',
634 coreconfigitem('web', 'certificate',
632 default=None,
635 default=None,
633 )
636 )
634 coreconfigitem('web', 'collapse',
637 coreconfigitem('web', 'collapse',
635 default=False,
638 default=False,
636 )
639 )
637 coreconfigitem('web', 'csp',
640 coreconfigitem('web', 'csp',
638 default=None,
641 default=None,
639 )
642 )
640 coreconfigitem('web', 'deny_read',
643 coreconfigitem('web', 'deny_read',
641 default=list,
644 default=list,
642 )
645 )
643 coreconfigitem('web', 'descend',
646 coreconfigitem('web', 'descend',
644 default=True,
647 default=True,
645 )
648 )
646 coreconfigitem('web', 'description',
649 coreconfigitem('web', 'description',
647 default="",
650 default="",
648 )
651 )
649 coreconfigitem('web', 'encoding',
652 coreconfigitem('web', 'encoding',
650 default=lambda: encoding.encoding,
653 default=lambda: encoding.encoding,
651 )
654 )
652 coreconfigitem('web', 'errorlog',
655 coreconfigitem('web', 'errorlog',
653 default='-',
656 default='-',
654 )
657 )
655 coreconfigitem('web', 'ipv6',
658 coreconfigitem('web', 'ipv6',
656 default=False,
659 default=False,
657 )
660 )
658 coreconfigitem('web', 'port',
661 coreconfigitem('web', 'port',
659 default=8000,
662 default=8000,
660 )
663 )
661 coreconfigitem('web', 'prefix',
664 coreconfigitem('web', 'prefix',
662 default='',
665 default='',
663 )
666 )
664 coreconfigitem('web', 'refreshinterval',
667 coreconfigitem('web', 'refreshinterval',
665 default=20,
668 default=20,
666 )
669 )
667 coreconfigitem('web', 'stripes',
670 coreconfigitem('web', 'stripes',
668 default=1,
671 default=1,
669 )
672 )
670 coreconfigitem('web', 'style',
673 coreconfigitem('web', 'style',
671 default='paper',
674 default='paper',
672 )
675 )
673 coreconfigitem('web', 'templates',
676 coreconfigitem('web', 'templates',
674 default=None,
677 default=None,
675 )
678 )
676 coreconfigitem('worker', 'backgroundclose',
679 coreconfigitem('worker', 'backgroundclose',
677 default=dynamicdefault,
680 default=dynamicdefault,
678 )
681 )
679 # Windows defaults to a limit of 512 open files. A buffer of 128
682 # Windows defaults to a limit of 512 open files. A buffer of 128
680 # should give us enough headway.
683 # should give us enough headway.
681 coreconfigitem('worker', 'backgroundclosemaxqueue',
684 coreconfigitem('worker', 'backgroundclosemaxqueue',
682 default=384,
685 default=384,
683 )
686 )
684 coreconfigitem('worker', 'backgroundcloseminfilecount',
687 coreconfigitem('worker', 'backgroundcloseminfilecount',
685 default=2048,
688 default=2048,
686 )
689 )
687 coreconfigitem('worker', 'backgroundclosethreadcount',
690 coreconfigitem('worker', 'backgroundclosethreadcount',
688 default=4,
691 default=4,
689 )
692 )
690 coreconfigitem('worker', 'numcpus',
693 coreconfigitem('worker', 'numcpus',
691 default=None,
694 default=None,
692 )
695 )
@@ -1,1401 +1,1401
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import nullid
17 from .node import nullid
18 from . import (
18 from . import (
19 encoding,
19 encoding,
20 error,
20 error,
21 match as matchmod,
21 match as matchmod,
22 pathutil,
22 pathutil,
23 policy,
23 policy,
24 pycompat,
24 pycompat,
25 scmutil,
25 scmutil,
26 txnutil,
26 txnutil,
27 util,
27 util,
28 )
28 )
29
29
30 parsers = policy.importmod(r'parsers')
30 parsers = policy.importmod(r'parsers')
31
31
32 propertycache = util.propertycache
32 propertycache = util.propertycache
33 filecache = scmutil.filecache
33 filecache = scmutil.filecache
34 _rangemask = 0x7fffffff
34 _rangemask = 0x7fffffff
35
35
36 dirstatetuple = parsers.dirstatetuple
36 dirstatetuple = parsers.dirstatetuple
37
37
38 class repocache(filecache):
38 class repocache(filecache):
39 """filecache for files in .hg/"""
39 """filecache for files in .hg/"""
40 def join(self, obj, fname):
40 def join(self, obj, fname):
41 return obj._opener.join(fname)
41 return obj._opener.join(fname)
42
42
43 class rootcache(filecache):
43 class rootcache(filecache):
44 """filecache for files in the repository root"""
44 """filecache for files in the repository root"""
45 def join(self, obj, fname):
45 def join(self, obj, fname):
46 return obj._join(fname)
46 return obj._join(fname)
47
47
48 def _getfsnow(vfs):
48 def _getfsnow(vfs):
49 '''Get "now" timestamp on filesystem'''
49 '''Get "now" timestamp on filesystem'''
50 tmpfd, tmpname = vfs.mkstemp()
50 tmpfd, tmpname = vfs.mkstemp()
51 try:
51 try:
52 return os.fstat(tmpfd).st_mtime
52 return os.fstat(tmpfd).st_mtime
53 finally:
53 finally:
54 os.close(tmpfd)
54 os.close(tmpfd)
55 vfs.unlink(tmpname)
55 vfs.unlink(tmpname)
56
56
57 class dirstate(object):
57 class dirstate(object):
58
58
59 def __init__(self, opener, ui, root, validate, sparsematchfn):
59 def __init__(self, opener, ui, root, validate, sparsematchfn):
60 '''Create a new dirstate object.
60 '''Create a new dirstate object.
61
61
62 opener is an open()-like callable that can be used to open the
62 opener is an open()-like callable that can be used to open the
63 dirstate file; root is the root of the directory tracked by
63 dirstate file; root is the root of the directory tracked by
64 the dirstate.
64 the dirstate.
65 '''
65 '''
66 self._opener = opener
66 self._opener = opener
67 self._validate = validate
67 self._validate = validate
68 self._root = root
68 self._root = root
69 self._sparsematchfn = sparsematchfn
69 self._sparsematchfn = sparsematchfn
70 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
70 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
71 # UNC path pointing to root share (issue4557)
71 # UNC path pointing to root share (issue4557)
72 self._rootdir = pathutil.normasprefix(root)
72 self._rootdir = pathutil.normasprefix(root)
73 self._dirty = False
73 self._dirty = False
74 self._lastnormaltime = 0
74 self._lastnormaltime = 0
75 self._ui = ui
75 self._ui = ui
76 self._filecache = {}
76 self._filecache = {}
77 self._parentwriters = 0
77 self._parentwriters = 0
78 self._filename = 'dirstate'
78 self._filename = 'dirstate'
79 self._pendingfilename = '%s.pending' % self._filename
79 self._pendingfilename = '%s.pending' % self._filename
80 self._plchangecallbacks = {}
80 self._plchangecallbacks = {}
81 self._origpl = None
81 self._origpl = None
82 self._updatedfiles = set()
82 self._updatedfiles = set()
83
83
84 @contextlib.contextmanager
84 @contextlib.contextmanager
85 def parentchange(self):
85 def parentchange(self):
86 '''Context manager for handling dirstate parents.
86 '''Context manager for handling dirstate parents.
87
87
88 If an exception occurs in the scope of the context manager,
88 If an exception occurs in the scope of the context manager,
89 the incoherent dirstate won't be written when wlock is
89 the incoherent dirstate won't be written when wlock is
90 released.
90 released.
91 '''
91 '''
92 self._parentwriters += 1
92 self._parentwriters += 1
93 yield
93 yield
94 # Typically we want the "undo" step of a context manager in a
94 # Typically we want the "undo" step of a context manager in a
95 # finally block so it happens even when an exception
95 # finally block so it happens even when an exception
96 # occurs. In this case, however, we only want to decrement
96 # occurs. In this case, however, we only want to decrement
97 # parentwriters if the code in the with statement exits
97 # parentwriters if the code in the with statement exits
98 # normally, so we don't have a try/finally here on purpose.
98 # normally, so we don't have a try/finally here on purpose.
99 self._parentwriters -= 1
99 self._parentwriters -= 1
100
100
101 def beginparentchange(self):
101 def beginparentchange(self):
102 '''Marks the beginning of a set of changes that involve changing
102 '''Marks the beginning of a set of changes that involve changing
103 the dirstate parents. If there is an exception during this time,
103 the dirstate parents. If there is an exception during this time,
104 the dirstate will not be written when the wlock is released. This
104 the dirstate will not be written when the wlock is released. This
105 prevents writing an incoherent dirstate where the parent doesn't
105 prevents writing an incoherent dirstate where the parent doesn't
106 match the contents.
106 match the contents.
107 '''
107 '''
108 self._ui.deprecwarn('beginparentchange is obsoleted by the '
108 self._ui.deprecwarn('beginparentchange is obsoleted by the '
109 'parentchange context manager.', '4.3')
109 'parentchange context manager.', '4.3')
110 self._parentwriters += 1
110 self._parentwriters += 1
111
111
112 def endparentchange(self):
112 def endparentchange(self):
113 '''Marks the end of a set of changes that involve changing the
113 '''Marks the end of a set of changes that involve changing the
114 dirstate parents. Once all parent changes have been marked done,
114 dirstate parents. Once all parent changes have been marked done,
115 the wlock will be free to write the dirstate on release.
115 the wlock will be free to write the dirstate on release.
116 '''
116 '''
117 self._ui.deprecwarn('endparentchange is obsoleted by the '
117 self._ui.deprecwarn('endparentchange is obsoleted by the '
118 'parentchange context manager.', '4.3')
118 'parentchange context manager.', '4.3')
119 if self._parentwriters > 0:
119 if self._parentwriters > 0:
120 self._parentwriters -= 1
120 self._parentwriters -= 1
121
121
122 def pendingparentchange(self):
122 def pendingparentchange(self):
123 '''Returns true if the dirstate is in the middle of a set of changes
123 '''Returns true if the dirstate is in the middle of a set of changes
124 that modify the dirstate parent.
124 that modify the dirstate parent.
125 '''
125 '''
126 return self._parentwriters > 0
126 return self._parentwriters > 0
127
127
128 @propertycache
128 @propertycache
129 def _map(self):
129 def _map(self):
130 '''Return the dirstate contents as a map from filename to
130 '''Return the dirstate contents as a map from filename to
131 (state, mode, size, time).'''
131 (state, mode, size, time).'''
132 self._read()
132 self._read()
133 return self._map
133 return self._map
134
134
135 @propertycache
135 @propertycache
136 def _identity(self):
136 def _identity(self):
137 self._read()
137 self._read()
138 return self._identity
138 return self._identity
139
139
140 @propertycache
140 @propertycache
141 def _nonnormalset(self):
141 def _nonnormalset(self):
142 nonnorm, otherparents = self._map.nonnormalentries()
142 nonnorm, otherparents = self._map.nonnormalentries()
143 self._otherparentset = otherparents
143 self._otherparentset = otherparents
144 return nonnorm
144 return nonnorm
145
145
146 @propertycache
146 @propertycache
147 def _otherparentset(self):
147 def _otherparentset(self):
148 nonnorm, otherparents = self._map.nonnormalentries()
148 nonnorm, otherparents = self._map.nonnormalentries()
149 self._nonnormalset = nonnorm
149 self._nonnormalset = nonnorm
150 return otherparents
150 return otherparents
151
151
152 @propertycache
152 @propertycache
153 def _filefoldmap(self):
153 def _filefoldmap(self):
154 return self._map.filefoldmap()
154 return self._map.filefoldmap()
155
155
156 @propertycache
156 @propertycache
157 def _dirfoldmap(self):
157 def _dirfoldmap(self):
158 f = {}
158 f = {}
159 normcase = util.normcase
159 normcase = util.normcase
160 for name in self._dirs:
160 for name in self._dirs:
161 f[normcase(name)] = name
161 f[normcase(name)] = name
162 return f
162 return f
163
163
164 @property
164 @property
165 def _sparsematcher(self):
165 def _sparsematcher(self):
166 """The matcher for the sparse checkout.
166 """The matcher for the sparse checkout.
167
167
168 The working directory may not include every file from a manifest. The
168 The working directory may not include every file from a manifest. The
169 matcher obtained by this property will match a path if it is to be
169 matcher obtained by this property will match a path if it is to be
170 included in the working directory.
170 included in the working directory.
171 """
171 """
172 # TODO there is potential to cache this property. For now, the matcher
172 # TODO there is potential to cache this property. For now, the matcher
173 # is resolved on every access. (But the called function does use a
173 # is resolved on every access. (But the called function does use a
174 # cache to keep the lookup fast.)
174 # cache to keep the lookup fast.)
175 return self._sparsematchfn()
175 return self._sparsematchfn()
176
176
177 @repocache('branch')
177 @repocache('branch')
178 def _branch(self):
178 def _branch(self):
179 try:
179 try:
180 return self._opener.read("branch").strip() or "default"
180 return self._opener.read("branch").strip() or "default"
181 except IOError as inst:
181 except IOError as inst:
182 if inst.errno != errno.ENOENT:
182 if inst.errno != errno.ENOENT:
183 raise
183 raise
184 return "default"
184 return "default"
185
185
186 @property
186 @property
187 def _pl(self):
187 def _pl(self):
188 return self._map.parents()
188 return self._map.parents()
189
189
190 @propertycache
190 @propertycache
191 def _dirs(self):
191 def _dirs(self):
192 return self._map.dirs()
192 return self._map.dirs()
193
193
194 def dirs(self):
194 def dirs(self):
195 return self._dirs
195 return self._dirs
196
196
197 @rootcache('.hgignore')
197 @rootcache('.hgignore')
198 def _ignore(self):
198 def _ignore(self):
199 files = self._ignorefiles()
199 files = self._ignorefiles()
200 if not files:
200 if not files:
201 return matchmod.never(self._root, '')
201 return matchmod.never(self._root, '')
202
202
203 pats = ['include:%s' % f for f in files]
203 pats = ['include:%s' % f for f in files]
204 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
204 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
205
205
206 @propertycache
206 @propertycache
207 def _slash(self):
207 def _slash(self):
208 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
208 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
209
209
210 @propertycache
210 @propertycache
211 def _checklink(self):
211 def _checklink(self):
212 return util.checklink(self._root)
212 return util.checklink(self._root)
213
213
214 @propertycache
214 @propertycache
215 def _checkexec(self):
215 def _checkexec(self):
216 return util.checkexec(self._root)
216 return util.checkexec(self._root)
217
217
218 @propertycache
218 @propertycache
219 def _checkcase(self):
219 def _checkcase(self):
220 return not util.fscasesensitive(self._join('.hg'))
220 return not util.fscasesensitive(self._join('.hg'))
221
221
222 def _join(self, f):
222 def _join(self, f):
223 # much faster than os.path.join()
223 # much faster than os.path.join()
224 # it's safe because f is always a relative path
224 # it's safe because f is always a relative path
225 return self._rootdir + f
225 return self._rootdir + f
226
226
227 def flagfunc(self, buildfallback):
227 def flagfunc(self, buildfallback):
228 if self._checklink and self._checkexec:
228 if self._checklink and self._checkexec:
229 def f(x):
229 def f(x):
230 try:
230 try:
231 st = os.lstat(self._join(x))
231 st = os.lstat(self._join(x))
232 if util.statislink(st):
232 if util.statislink(st):
233 return 'l'
233 return 'l'
234 if util.statisexec(st):
234 if util.statisexec(st):
235 return 'x'
235 return 'x'
236 except OSError:
236 except OSError:
237 pass
237 pass
238 return ''
238 return ''
239 return f
239 return f
240
240
241 fallback = buildfallback()
241 fallback = buildfallback()
242 if self._checklink:
242 if self._checklink:
243 def f(x):
243 def f(x):
244 if os.path.islink(self._join(x)):
244 if os.path.islink(self._join(x)):
245 return 'l'
245 return 'l'
246 if 'x' in fallback(x):
246 if 'x' in fallback(x):
247 return 'x'
247 return 'x'
248 return ''
248 return ''
249 return f
249 return f
250 if self._checkexec:
250 if self._checkexec:
251 def f(x):
251 def f(x):
252 if 'l' in fallback(x):
252 if 'l' in fallback(x):
253 return 'l'
253 return 'l'
254 if util.isexec(self._join(x)):
254 if util.isexec(self._join(x)):
255 return 'x'
255 return 'x'
256 return ''
256 return ''
257 return f
257 return f
258 else:
258 else:
259 return fallback
259 return fallback
260
260
261 @propertycache
261 @propertycache
262 def _cwd(self):
262 def _cwd(self):
263 # internal config: ui.forcecwd
263 # internal config: ui.forcecwd
264 forcecwd = self._ui.config('ui', 'forcecwd')
264 forcecwd = self._ui.config('ui', 'forcecwd')
265 if forcecwd:
265 if forcecwd:
266 return forcecwd
266 return forcecwd
267 return pycompat.getcwd()
267 return pycompat.getcwd()
268
268
269 def getcwd(self):
269 def getcwd(self):
270 '''Return the path from which a canonical path is calculated.
270 '''Return the path from which a canonical path is calculated.
271
271
272 This path should be used to resolve file patterns or to convert
272 This path should be used to resolve file patterns or to convert
273 canonical paths back to file paths for display. It shouldn't be
273 canonical paths back to file paths for display. It shouldn't be
274 used to get real file paths. Use vfs functions instead.
274 used to get real file paths. Use vfs functions instead.
275 '''
275 '''
276 cwd = self._cwd
276 cwd = self._cwd
277 if cwd == self._root:
277 if cwd == self._root:
278 return ''
278 return ''
279 # self._root ends with a path separator if self._root is '/' or 'C:\'
279 # self._root ends with a path separator if self._root is '/' or 'C:\'
280 rootsep = self._root
280 rootsep = self._root
281 if not util.endswithsep(rootsep):
281 if not util.endswithsep(rootsep):
282 rootsep += pycompat.ossep
282 rootsep += pycompat.ossep
283 if cwd.startswith(rootsep):
283 if cwd.startswith(rootsep):
284 return cwd[len(rootsep):]
284 return cwd[len(rootsep):]
285 else:
285 else:
286 # we're outside the repo. return an absolute path.
286 # we're outside the repo. return an absolute path.
287 return cwd
287 return cwd
288
288
289 def pathto(self, f, cwd=None):
289 def pathto(self, f, cwd=None):
290 if cwd is None:
290 if cwd is None:
291 cwd = self.getcwd()
291 cwd = self.getcwd()
292 path = util.pathto(self._root, cwd, f)
292 path = util.pathto(self._root, cwd, f)
293 if self._slash:
293 if self._slash:
294 return util.pconvert(path)
294 return util.pconvert(path)
295 return path
295 return path
296
296
297 def __getitem__(self, key):
297 def __getitem__(self, key):
298 '''Return the current state of key (a filename) in the dirstate.
298 '''Return the current state of key (a filename) in the dirstate.
299
299
300 States are:
300 States are:
301 n normal
301 n normal
302 m needs merging
302 m needs merging
303 r marked for removal
303 r marked for removal
304 a marked for addition
304 a marked for addition
305 ? not tracked
305 ? not tracked
306 '''
306 '''
307 return self._map.get(key, ("?",))[0]
307 return self._map.get(key, ("?",))[0]
308
308
309 def __contains__(self, key):
309 def __contains__(self, key):
310 return key in self._map
310 return key in self._map
311
311
312 def __iter__(self):
312 def __iter__(self):
313 return iter(sorted(self._map))
313 return iter(sorted(self._map))
314
314
315 def items(self):
315 def items(self):
316 return self._map.iteritems()
316 return self._map.iteritems()
317
317
318 iteritems = items
318 iteritems = items
319
319
320 def parents(self):
320 def parents(self):
321 return [self._validate(p) for p in self._pl]
321 return [self._validate(p) for p in self._pl]
322
322
323 def p1(self):
323 def p1(self):
324 return self._validate(self._pl[0])
324 return self._validate(self._pl[0])
325
325
326 def p2(self):
326 def p2(self):
327 return self._validate(self._pl[1])
327 return self._validate(self._pl[1])
328
328
329 def branch(self):
329 def branch(self):
330 return encoding.tolocal(self._branch)
330 return encoding.tolocal(self._branch)
331
331
332 def setparents(self, p1, p2=nullid):
332 def setparents(self, p1, p2=nullid):
333 """Set dirstate parents to p1 and p2.
333 """Set dirstate parents to p1 and p2.
334
334
335 When moving from two parents to one, 'm' merged entries a
335 When moving from two parents to one, 'm' merged entries a
336 adjusted to normal and previous copy records discarded and
336 adjusted to normal and previous copy records discarded and
337 returned by the call.
337 returned by the call.
338
338
339 See localrepo.setparents()
339 See localrepo.setparents()
340 """
340 """
341 if self._parentwriters == 0:
341 if self._parentwriters == 0:
342 raise ValueError("cannot set dirstate parent without "
342 raise ValueError("cannot set dirstate parent without "
343 "calling dirstate.beginparentchange")
343 "calling dirstate.beginparentchange")
344
344
345 self._dirty = True
345 self._dirty = True
346 oldp2 = self._pl[1]
346 oldp2 = self._pl[1]
347 if self._origpl is None:
347 if self._origpl is None:
348 self._origpl = self._pl
348 self._origpl = self._pl
349 self._map.setparents(p1, p2)
349 self._map.setparents(p1, p2)
350 copies = {}
350 copies = {}
351 if oldp2 != nullid and p2 == nullid:
351 if oldp2 != nullid and p2 == nullid:
352 candidatefiles = self._nonnormalset.union(self._otherparentset)
352 candidatefiles = self._nonnormalset.union(self._otherparentset)
353 for f in candidatefiles:
353 for f in candidatefiles:
354 s = self._map.get(f)
354 s = self._map.get(f)
355 if s is None:
355 if s is None:
356 continue
356 continue
357
357
358 # Discard 'm' markers when moving away from a merge state
358 # Discard 'm' markers when moving away from a merge state
359 if s[0] == 'm':
359 if s[0] == 'm':
360 source = self._map.copymap.get(f)
360 source = self._map.copymap.get(f)
361 if source:
361 if source:
362 copies[f] = source
362 copies[f] = source
363 self.normallookup(f)
363 self.normallookup(f)
364 # Also fix up otherparent markers
364 # Also fix up otherparent markers
365 elif s[0] == 'n' and s[2] == -2:
365 elif s[0] == 'n' and s[2] == -2:
366 source = self._map.copymap.get(f)
366 source = self._map.copymap.get(f)
367 if source:
367 if source:
368 copies[f] = source
368 copies[f] = source
369 self.add(f)
369 self.add(f)
370 return copies
370 return copies
371
371
372 def setbranch(self, branch):
372 def setbranch(self, branch):
373 self._branch = encoding.fromlocal(branch)
373 self._branch = encoding.fromlocal(branch)
374 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
374 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
375 try:
375 try:
376 f.write(self._branch + '\n')
376 f.write(self._branch + '\n')
377 f.close()
377 f.close()
378
378
379 # make sure filecache has the correct stat info for _branch after
379 # make sure filecache has the correct stat info for _branch after
380 # replacing the underlying file
380 # replacing the underlying file
381 ce = self._filecache['_branch']
381 ce = self._filecache['_branch']
382 if ce:
382 if ce:
383 ce.refresh()
383 ce.refresh()
384 except: # re-raises
384 except: # re-raises
385 f.discard()
385 f.discard()
386 raise
386 raise
387
387
388 def _read(self):
388 def _read(self):
389 self._map = dirstatemap(self._ui, self._opener, self._root)
389 self._map = dirstatemap(self._ui, self._opener, self._root)
390
390
391 # ignore HG_PENDING because identity is used only for writing
391 # ignore HG_PENDING because identity is used only for writing
392 self._identity = util.filestat.frompath(
392 self._identity = util.filestat.frompath(
393 self._opener.join(self._filename))
393 self._opener.join(self._filename))
394 try:
394 try:
395 fp = self._map._opendirstatefile()
395 fp = self._map._opendirstatefile()
396 try:
396 try:
397 st = fp.read()
397 st = fp.read()
398 finally:
398 finally:
399 fp.close()
399 fp.close()
400 except IOError as err:
400 except IOError as err:
401 if err.errno != errno.ENOENT:
401 if err.errno != errno.ENOENT:
402 raise
402 raise
403 return
403 return
404 if not st:
404 if not st:
405 return
405 return
406
406
407 if util.safehasattr(parsers, 'dict_new_presized'):
407 if util.safehasattr(parsers, 'dict_new_presized'):
408 # Make an estimate of the number of files in the dirstate based on
408 # Make an estimate of the number of files in the dirstate based on
409 # its size. From a linear regression on a set of real-world repos,
409 # its size. From a linear regression on a set of real-world repos,
410 # all over 10,000 files, the size of a dirstate entry is 85
410 # all over 10,000 files, the size of a dirstate entry is 85
411 # bytes. The cost of resizing is significantly higher than the cost
411 # bytes. The cost of resizing is significantly higher than the cost
412 # of filling in a larger presized dict, so subtract 20% from the
412 # of filling in a larger presized dict, so subtract 20% from the
413 # size.
413 # size.
414 #
414 #
415 # This heuristic is imperfect in many ways, so in a future dirstate
415 # This heuristic is imperfect in many ways, so in a future dirstate
416 # format update it makes sense to just record the number of entries
416 # format update it makes sense to just record the number of entries
417 # on write.
417 # on write.
418 self._map._map = parsers.dict_new_presized(len(st) / 71)
418 self._map._map = parsers.dict_new_presized(len(st) / 71)
419
419
420 # Python's garbage collector triggers a GC each time a certain number
420 # Python's garbage collector triggers a GC each time a certain number
421 # of container objects (the number being defined by
421 # of container objects (the number being defined by
422 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
422 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
423 # for each file in the dirstate. The C version then immediately marks
423 # for each file in the dirstate. The C version then immediately marks
424 # them as not to be tracked by the collector. However, this has no
424 # them as not to be tracked by the collector. However, this has no
425 # effect on when GCs are triggered, only on what objects the GC looks
425 # effect on when GCs are triggered, only on what objects the GC looks
426 # into. This means that O(number of files) GCs are unavoidable.
426 # into. This means that O(number of files) GCs are unavoidable.
427 # Depending on when in the process's lifetime the dirstate is parsed,
427 # Depending on when in the process's lifetime the dirstate is parsed,
428 # this can get very expensive. As a workaround, disable GC while
428 # this can get very expensive. As a workaround, disable GC while
429 # parsing the dirstate.
429 # parsing the dirstate.
430 #
430 #
431 # (we cannot decorate the function directly since it is in a C module)
431 # (we cannot decorate the function directly since it is in a C module)
432 parse_dirstate = util.nogc(parsers.parse_dirstate)
432 parse_dirstate = util.nogc(parsers.parse_dirstate)
433 p = parse_dirstate(self._map._map, self._map.copymap, st)
433 p = parse_dirstate(self._map._map, self._map.copymap, st)
434 if not self._map._dirtyparents:
434 if not self._map._dirtyparents:
435 self._map.setparents(*p)
435 self._map.setparents(*p)
436
436
437 def invalidate(self):
437 def invalidate(self):
438 '''Causes the next access to reread the dirstate.
438 '''Causes the next access to reread the dirstate.
439
439
440 This is different from localrepo.invalidatedirstate() because it always
440 This is different from localrepo.invalidatedirstate() because it always
441 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
441 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
442 check whether the dirstate has changed before rereading it.'''
442 check whether the dirstate has changed before rereading it.'''
443
443
444 for a in ("_map", "_identity",
444 for a in ("_map", "_identity",
445 "_filefoldmap", "_dirfoldmap", "_branch",
445 "_filefoldmap", "_dirfoldmap", "_branch",
446 "_dirs", "_ignore", "_nonnormalset",
446 "_dirs", "_ignore", "_nonnormalset",
447 "_otherparentset"):
447 "_otherparentset"):
448 if a in self.__dict__:
448 if a in self.__dict__:
449 delattr(self, a)
449 delattr(self, a)
450 self._lastnormaltime = 0
450 self._lastnormaltime = 0
451 self._dirty = False
451 self._dirty = False
452 self._updatedfiles.clear()
452 self._updatedfiles.clear()
453 self._parentwriters = 0
453 self._parentwriters = 0
454 self._origpl = None
454 self._origpl = None
455
455
456 def copy(self, source, dest):
456 def copy(self, source, dest):
457 """Mark dest as a copy of source. Unmark dest if source is None."""
457 """Mark dest as a copy of source. Unmark dest if source is None."""
458 if source == dest:
458 if source == dest:
459 return
459 return
460 self._dirty = True
460 self._dirty = True
461 if source is not None:
461 if source is not None:
462 self._map.copymap[dest] = source
462 self._map.copymap[dest] = source
463 self._updatedfiles.add(source)
463 self._updatedfiles.add(source)
464 self._updatedfiles.add(dest)
464 self._updatedfiles.add(dest)
465 elif self._map.copymap.pop(dest, None):
465 elif self._map.copymap.pop(dest, None):
466 self._updatedfiles.add(dest)
466 self._updatedfiles.add(dest)
467
467
468 def copied(self, file):
468 def copied(self, file):
469 return self._map.copymap.get(file, None)
469 return self._map.copymap.get(file, None)
470
470
471 def copies(self):
471 def copies(self):
472 return self._map.copymap
472 return self._map.copymap
473
473
474 def _droppath(self, f):
474 def _droppath(self, f):
475 if self[f] not in "?r" and "_dirs" in self.__dict__:
475 if self[f] not in "?r" and "_dirs" in self.__dict__:
476 self._dirs.delpath(f)
476 self._dirs.delpath(f)
477
477
478 if "_filefoldmap" in self.__dict__:
478 if "_filefoldmap" in self.__dict__:
479 normed = util.normcase(f)
479 normed = util.normcase(f)
480 if normed in self._filefoldmap:
480 if normed in self._filefoldmap:
481 del self._filefoldmap[normed]
481 del self._filefoldmap[normed]
482
482
483 self._updatedfiles.add(f)
483 self._updatedfiles.add(f)
484
484
485 def _addpath(self, f, state, mode, size, mtime):
485 def _addpath(self, f, state, mode, size, mtime):
486 oldstate = self[f]
486 oldstate = self[f]
487 if state == 'a' or oldstate == 'r':
487 if state == 'a' or oldstate == 'r':
488 scmutil.checkfilename(f)
488 scmutil.checkfilename(f)
489 if f in self._dirs:
489 if f in self._dirs:
490 raise error.Abort(_('directory %r already in dirstate') % f)
490 raise error.Abort(_('directory %r already in dirstate') % f)
491 # shadows
491 # shadows
492 for d in util.finddirs(f):
492 for d in util.finddirs(f):
493 if d in self._dirs:
493 if d in self._dirs:
494 break
494 break
495 entry = self._map.get(d)
495 entry = self._map.get(d)
496 if entry is not None and entry[0] != 'r':
496 if entry is not None and entry[0] != 'r':
497 raise error.Abort(
497 raise error.Abort(
498 _('file %r in dirstate clashes with %r') % (d, f))
498 _('file %r in dirstate clashes with %r') % (d, f))
499 if oldstate in "?r" and "_dirs" in self.__dict__:
499 if oldstate in "?r" and "_dirs" in self.__dict__:
500 self._dirs.addpath(f)
500 self._dirs.addpath(f)
501 self._dirty = True
501 self._dirty = True
502 self._updatedfiles.add(f)
502 self._updatedfiles.add(f)
503 self._map[f] = dirstatetuple(state, mode, size, mtime)
503 self._map[f] = dirstatetuple(state, mode, size, mtime)
504 if state != 'n' or mtime == -1:
504 if state != 'n' or mtime == -1:
505 self._nonnormalset.add(f)
505 self._nonnormalset.add(f)
506 if size == -2:
506 if size == -2:
507 self._otherparentset.add(f)
507 self._otherparentset.add(f)
508
508
509 def normal(self, f):
509 def normal(self, f):
510 '''Mark a file normal and clean.'''
510 '''Mark a file normal and clean.'''
511 s = os.lstat(self._join(f))
511 s = os.lstat(self._join(f))
512 mtime = s.st_mtime
512 mtime = s.st_mtime
513 self._addpath(f, 'n', s.st_mode,
513 self._addpath(f, 'n', s.st_mode,
514 s.st_size & _rangemask, mtime & _rangemask)
514 s.st_size & _rangemask, mtime & _rangemask)
515 self._map.copymap.pop(f, None)
515 self._map.copymap.pop(f, None)
516 if f in self._nonnormalset:
516 if f in self._nonnormalset:
517 self._nonnormalset.remove(f)
517 self._nonnormalset.remove(f)
518 if mtime > self._lastnormaltime:
518 if mtime > self._lastnormaltime:
519 # Remember the most recent modification timeslot for status(),
519 # Remember the most recent modification timeslot for status(),
520 # to make sure we won't miss future size-preserving file content
520 # to make sure we won't miss future size-preserving file content
521 # modifications that happen within the same timeslot.
521 # modifications that happen within the same timeslot.
522 self._lastnormaltime = mtime
522 self._lastnormaltime = mtime
523
523
524 def normallookup(self, f):
524 def normallookup(self, f):
525 '''Mark a file normal, but possibly dirty.'''
525 '''Mark a file normal, but possibly dirty.'''
526 if self._pl[1] != nullid:
526 if self._pl[1] != nullid:
527 # if there is a merge going on and the file was either
527 # if there is a merge going on and the file was either
528 # in state 'm' (-1) or coming from other parent (-2) before
528 # in state 'm' (-1) or coming from other parent (-2) before
529 # being removed, restore that state.
529 # being removed, restore that state.
530 entry = self._map.get(f)
530 entry = self._map.get(f)
531 if entry is not None:
531 if entry is not None:
532 if entry[0] == 'r' and entry[2] in (-1, -2):
532 if entry[0] == 'r' and entry[2] in (-1, -2):
533 source = self._map.copymap.get(f)
533 source = self._map.copymap.get(f)
534 if entry[2] == -1:
534 if entry[2] == -1:
535 self.merge(f)
535 self.merge(f)
536 elif entry[2] == -2:
536 elif entry[2] == -2:
537 self.otherparent(f)
537 self.otherparent(f)
538 if source:
538 if source:
539 self.copy(source, f)
539 self.copy(source, f)
540 return
540 return
541 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
541 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
542 return
542 return
543 self._addpath(f, 'n', 0, -1, -1)
543 self._addpath(f, 'n', 0, -1, -1)
544 self._map.copymap.pop(f, None)
544 self._map.copymap.pop(f, None)
545 if f in self._nonnormalset:
545 if f in self._nonnormalset:
546 self._nonnormalset.remove(f)
546 self._nonnormalset.remove(f)
547
547
548 def otherparent(self, f):
548 def otherparent(self, f):
549 '''Mark as coming from the other parent, always dirty.'''
549 '''Mark as coming from the other parent, always dirty.'''
550 if self._pl[1] == nullid:
550 if self._pl[1] == nullid:
551 raise error.Abort(_("setting %r to other parent "
551 raise error.Abort(_("setting %r to other parent "
552 "only allowed in merges") % f)
552 "only allowed in merges") % f)
553 if f in self and self[f] == 'n':
553 if f in self and self[f] == 'n':
554 # merge-like
554 # merge-like
555 self._addpath(f, 'm', 0, -2, -1)
555 self._addpath(f, 'm', 0, -2, -1)
556 else:
556 else:
557 # add-like
557 # add-like
558 self._addpath(f, 'n', 0, -2, -1)
558 self._addpath(f, 'n', 0, -2, -1)
559 self._map.copymap.pop(f, None)
559 self._map.copymap.pop(f, None)
560
560
561 def add(self, f):
561 def add(self, f):
562 '''Mark a file added.'''
562 '''Mark a file added.'''
563 self._addpath(f, 'a', 0, -1, -1)
563 self._addpath(f, 'a', 0, -1, -1)
564 self._map.copymap.pop(f, None)
564 self._map.copymap.pop(f, None)
565
565
566 def remove(self, f):
566 def remove(self, f):
567 '''Mark a file removed.'''
567 '''Mark a file removed.'''
568 self._dirty = True
568 self._dirty = True
569 self._droppath(f)
569 self._droppath(f)
570 size = 0
570 size = 0
571 if self._pl[1] != nullid:
571 if self._pl[1] != nullid:
572 entry = self._map.get(f)
572 entry = self._map.get(f)
573 if entry is not None:
573 if entry is not None:
574 # backup the previous state
574 # backup the previous state
575 if entry[0] == 'm': # merge
575 if entry[0] == 'm': # merge
576 size = -1
576 size = -1
577 elif entry[0] == 'n' and entry[2] == -2: # other parent
577 elif entry[0] == 'n' and entry[2] == -2: # other parent
578 size = -2
578 size = -2
579 self._otherparentset.add(f)
579 self._otherparentset.add(f)
580 self._map[f] = dirstatetuple('r', 0, size, 0)
580 self._map[f] = dirstatetuple('r', 0, size, 0)
581 self._nonnormalset.add(f)
581 self._nonnormalset.add(f)
582 if size == 0:
582 if size == 0:
583 self._map.copymap.pop(f, None)
583 self._map.copymap.pop(f, None)
584
584
585 def merge(self, f):
585 def merge(self, f):
586 '''Mark a file merged.'''
586 '''Mark a file merged.'''
587 if self._pl[1] == nullid:
587 if self._pl[1] == nullid:
588 return self.normallookup(f)
588 return self.normallookup(f)
589 return self.otherparent(f)
589 return self.otherparent(f)
590
590
591 def drop(self, f):
591 def drop(self, f):
592 '''Drop a file from the dirstate'''
592 '''Drop a file from the dirstate'''
593 if f in self._map:
593 if f in self._map:
594 self._dirty = True
594 self._dirty = True
595 self._droppath(f)
595 self._droppath(f)
596 del self._map[f]
596 del self._map[f]
597 if f in self._nonnormalset:
597 if f in self._nonnormalset:
598 self._nonnormalset.remove(f)
598 self._nonnormalset.remove(f)
599 self._map.copymap.pop(f, None)
599 self._map.copymap.pop(f, None)
600
600
601 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
601 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
602 if exists is None:
602 if exists is None:
603 exists = os.path.lexists(os.path.join(self._root, path))
603 exists = os.path.lexists(os.path.join(self._root, path))
604 if not exists:
604 if not exists:
605 # Maybe a path component exists
605 # Maybe a path component exists
606 if not ignoremissing and '/' in path:
606 if not ignoremissing and '/' in path:
607 d, f = path.rsplit('/', 1)
607 d, f = path.rsplit('/', 1)
608 d = self._normalize(d, False, ignoremissing, None)
608 d = self._normalize(d, False, ignoremissing, None)
609 folded = d + "/" + f
609 folded = d + "/" + f
610 else:
610 else:
611 # No path components, preserve original case
611 # No path components, preserve original case
612 folded = path
612 folded = path
613 else:
613 else:
614 # recursively normalize leading directory components
614 # recursively normalize leading directory components
615 # against dirstate
615 # against dirstate
616 if '/' in normed:
616 if '/' in normed:
617 d, f = normed.rsplit('/', 1)
617 d, f = normed.rsplit('/', 1)
618 d = self._normalize(d, False, ignoremissing, True)
618 d = self._normalize(d, False, ignoremissing, True)
619 r = self._root + "/" + d
619 r = self._root + "/" + d
620 folded = d + "/" + util.fspath(f, r)
620 folded = d + "/" + util.fspath(f, r)
621 else:
621 else:
622 folded = util.fspath(normed, self._root)
622 folded = util.fspath(normed, self._root)
623 storemap[normed] = folded
623 storemap[normed] = folded
624
624
625 return folded
625 return folded
626
626
627 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
627 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
628 normed = util.normcase(path)
628 normed = util.normcase(path)
629 folded = self._filefoldmap.get(normed, None)
629 folded = self._filefoldmap.get(normed, None)
630 if folded is None:
630 if folded is None:
631 if isknown:
631 if isknown:
632 folded = path
632 folded = path
633 else:
633 else:
634 folded = self._discoverpath(path, normed, ignoremissing, exists,
634 folded = self._discoverpath(path, normed, ignoremissing, exists,
635 self._filefoldmap)
635 self._filefoldmap)
636 return folded
636 return folded
637
637
638 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
638 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
639 normed = util.normcase(path)
639 normed = util.normcase(path)
640 folded = self._filefoldmap.get(normed, None)
640 folded = self._filefoldmap.get(normed, None)
641 if folded is None:
641 if folded is None:
642 folded = self._dirfoldmap.get(normed, None)
642 folded = self._dirfoldmap.get(normed, None)
643 if folded is None:
643 if folded is None:
644 if isknown:
644 if isknown:
645 folded = path
645 folded = path
646 else:
646 else:
647 # store discovered result in dirfoldmap so that future
647 # store discovered result in dirfoldmap so that future
648 # normalizefile calls don't start matching directories
648 # normalizefile calls don't start matching directories
649 folded = self._discoverpath(path, normed, ignoremissing, exists,
649 folded = self._discoverpath(path, normed, ignoremissing, exists,
650 self._dirfoldmap)
650 self._dirfoldmap)
651 return folded
651 return folded
652
652
653 def normalize(self, path, isknown=False, ignoremissing=False):
653 def normalize(self, path, isknown=False, ignoremissing=False):
654 '''
654 '''
655 normalize the case of a pathname when on a casefolding filesystem
655 normalize the case of a pathname when on a casefolding filesystem
656
656
657 isknown specifies whether the filename came from walking the
657 isknown specifies whether the filename came from walking the
658 disk, to avoid extra filesystem access.
658 disk, to avoid extra filesystem access.
659
659
660 If ignoremissing is True, missing path are returned
660 If ignoremissing is True, missing path are returned
661 unchanged. Otherwise, we try harder to normalize possibly
661 unchanged. Otherwise, we try harder to normalize possibly
662 existing path components.
662 existing path components.
663
663
664 The normalized case is determined based on the following precedence:
664 The normalized case is determined based on the following precedence:
665
665
666 - version of name already stored in the dirstate
666 - version of name already stored in the dirstate
667 - version of name stored on disk
667 - version of name stored on disk
668 - version provided via command arguments
668 - version provided via command arguments
669 '''
669 '''
670
670
671 if self._checkcase:
671 if self._checkcase:
672 return self._normalize(path, isknown, ignoremissing)
672 return self._normalize(path, isknown, ignoremissing)
673 return path
673 return path
674
674
675 def clear(self):
675 def clear(self):
676 self._map = dirstatemap(self._ui, self._opener, self._root)
676 self._map = dirstatemap(self._ui, self._opener, self._root)
677 self._nonnormalset = set()
677 self._nonnormalset = set()
678 self._otherparentset = set()
678 self._otherparentset = set()
679 if "_dirs" in self.__dict__:
679 if "_dirs" in self.__dict__:
680 delattr(self, "_dirs")
680 delattr(self, "_dirs")
681 self._map.setparents(nullid, nullid)
681 self._map.setparents(nullid, nullid)
682 self._lastnormaltime = 0
682 self._lastnormaltime = 0
683 self._updatedfiles.clear()
683 self._updatedfiles.clear()
684 self._dirty = True
684 self._dirty = True
685
685
686 def rebuild(self, parent, allfiles, changedfiles=None):
686 def rebuild(self, parent, allfiles, changedfiles=None):
687 if changedfiles is None:
687 if changedfiles is None:
688 # Rebuild entire dirstate
688 # Rebuild entire dirstate
689 changedfiles = allfiles
689 changedfiles = allfiles
690 lastnormaltime = self._lastnormaltime
690 lastnormaltime = self._lastnormaltime
691 self.clear()
691 self.clear()
692 self._lastnormaltime = lastnormaltime
692 self._lastnormaltime = lastnormaltime
693
693
694 if self._origpl is None:
694 if self._origpl is None:
695 self._origpl = self._pl
695 self._origpl = self._pl
696 self._map.setparents(parent, nullid)
696 self._map.setparents(parent, nullid)
697 for f in changedfiles:
697 for f in changedfiles:
698 if f in allfiles:
698 if f in allfiles:
699 self.normallookup(f)
699 self.normallookup(f)
700 else:
700 else:
701 self.drop(f)
701 self.drop(f)
702
702
703 self._dirty = True
703 self._dirty = True
704
704
705 def identity(self):
705 def identity(self):
706 '''Return identity of dirstate itself to detect changing in storage
706 '''Return identity of dirstate itself to detect changing in storage
707
707
708 If identity of previous dirstate is equal to this, writing
708 If identity of previous dirstate is equal to this, writing
709 changes based on the former dirstate out can keep consistency.
709 changes based on the former dirstate out can keep consistency.
710 '''
710 '''
711 return self._identity
711 return self._identity
712
712
713 def write(self, tr):
713 def write(self, tr):
714 if not self._dirty:
714 if not self._dirty:
715 return
715 return
716
716
717 filename = self._filename
717 filename = self._filename
718 if tr:
718 if tr:
719 # 'dirstate.write()' is not only for writing in-memory
719 # 'dirstate.write()' is not only for writing in-memory
720 # changes out, but also for dropping ambiguous timestamp.
720 # changes out, but also for dropping ambiguous timestamp.
721 # delayed writing re-raise "ambiguous timestamp issue".
721 # delayed writing re-raise "ambiguous timestamp issue".
722 # See also the wiki page below for detail:
722 # See also the wiki page below for detail:
723 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
723 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
724
724
725 # emulate dropping timestamp in 'parsers.pack_dirstate'
725 # emulate dropping timestamp in 'parsers.pack_dirstate'
726 now = _getfsnow(self._opener)
726 now = _getfsnow(self._opener)
727 dmap = self._map
727 dmap = self._map
728 for f in self._updatedfiles:
728 for f in self._updatedfiles:
729 e = dmap.get(f)
729 e = dmap.get(f)
730 if e is not None and e[0] == 'n' and e[3] == now:
730 if e is not None and e[0] == 'n' and e[3] == now:
731 dmap[f] = dirstatetuple(e[0], e[1], e[2], -1)
731 dmap[f] = dirstatetuple(e[0], e[1], e[2], -1)
732 self._nonnormalset.add(f)
732 self._nonnormalset.add(f)
733
733
734 # emulate that all 'dirstate.normal' results are written out
734 # emulate that all 'dirstate.normal' results are written out
735 self._lastnormaltime = 0
735 self._lastnormaltime = 0
736 self._updatedfiles.clear()
736 self._updatedfiles.clear()
737
737
738 # delay writing in-memory changes out
738 # delay writing in-memory changes out
739 tr.addfilegenerator('dirstate', (self._filename,),
739 tr.addfilegenerator('dirstate', (self._filename,),
740 self._writedirstate, location='plain')
740 self._writedirstate, location='plain')
741 return
741 return
742
742
743 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
743 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
744 self._writedirstate(st)
744 self._writedirstate(st)
745
745
746 def addparentchangecallback(self, category, callback):
746 def addparentchangecallback(self, category, callback):
747 """add a callback to be called when the wd parents are changed
747 """add a callback to be called when the wd parents are changed
748
748
749 Callback will be called with the following arguments:
749 Callback will be called with the following arguments:
750 dirstate, (oldp1, oldp2), (newp1, newp2)
750 dirstate, (oldp1, oldp2), (newp1, newp2)
751
751
752 Category is a unique identifier to allow overwriting an old callback
752 Category is a unique identifier to allow overwriting an old callback
753 with a newer callback.
753 with a newer callback.
754 """
754 """
755 self._plchangecallbacks[category] = callback
755 self._plchangecallbacks[category] = callback
756
756
757 def _writedirstate(self, st):
757 def _writedirstate(self, st):
758 # notify callbacks about parents change
758 # notify callbacks about parents change
759 if self._origpl is not None and self._origpl != self._pl:
759 if self._origpl is not None and self._origpl != self._pl:
760 for c, callback in sorted(self._plchangecallbacks.iteritems()):
760 for c, callback in sorted(self._plchangecallbacks.iteritems()):
761 callback(self, self._origpl, self._pl)
761 callback(self, self._origpl, self._pl)
762 self._origpl = None
762 self._origpl = None
763 # use the modification time of the newly created temporary file as the
763 # use the modification time of the newly created temporary file as the
764 # filesystem's notion of 'now'
764 # filesystem's notion of 'now'
765 now = util.fstat(st).st_mtime & _rangemask
765 now = util.fstat(st).st_mtime & _rangemask
766
766
767 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
767 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
768 # timestamp of each entries in dirstate, because of 'now > mtime'
768 # timestamp of each entries in dirstate, because of 'now > mtime'
769 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite', 0)
769 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite')
770 if delaywrite > 0:
770 if delaywrite > 0:
771 # do we have any files to delay for?
771 # do we have any files to delay for?
772 for f, e in self._map.iteritems():
772 for f, e in self._map.iteritems():
773 if e[0] == 'n' and e[3] == now:
773 if e[0] == 'n' and e[3] == now:
774 import time # to avoid useless import
774 import time # to avoid useless import
775 # rather than sleep n seconds, sleep until the next
775 # rather than sleep n seconds, sleep until the next
776 # multiple of n seconds
776 # multiple of n seconds
777 clock = time.time()
777 clock = time.time()
778 start = int(clock) - (int(clock) % delaywrite)
778 start = int(clock) - (int(clock) % delaywrite)
779 end = start + delaywrite
779 end = start + delaywrite
780 time.sleep(end - clock)
780 time.sleep(end - clock)
781 now = end # trust our estimate that the end is near now
781 now = end # trust our estimate that the end is near now
782 break
782 break
783
783
784 st.write(parsers.pack_dirstate(self._map._map, self._map.copymap,
784 st.write(parsers.pack_dirstate(self._map._map, self._map.copymap,
785 self._pl, now))
785 self._pl, now))
786 self._nonnormalset, self._otherparentset = self._map.nonnormalentries()
786 self._nonnormalset, self._otherparentset = self._map.nonnormalentries()
787 st.close()
787 st.close()
788 self._lastnormaltime = 0
788 self._lastnormaltime = 0
789 self._dirty = self._map._dirtyparents = False
789 self._dirty = self._map._dirtyparents = False
790
790
791 def _dirignore(self, f):
791 def _dirignore(self, f):
792 if f == '.':
792 if f == '.':
793 return False
793 return False
794 if self._ignore(f):
794 if self._ignore(f):
795 return True
795 return True
796 for p in util.finddirs(f):
796 for p in util.finddirs(f):
797 if self._ignore(p):
797 if self._ignore(p):
798 return True
798 return True
799 return False
799 return False
800
800
801 def _ignorefiles(self):
801 def _ignorefiles(self):
802 files = []
802 files = []
803 if os.path.exists(self._join('.hgignore')):
803 if os.path.exists(self._join('.hgignore')):
804 files.append(self._join('.hgignore'))
804 files.append(self._join('.hgignore'))
805 for name, path in self._ui.configitems("ui"):
805 for name, path in self._ui.configitems("ui"):
806 if name == 'ignore' or name.startswith('ignore.'):
806 if name == 'ignore' or name.startswith('ignore.'):
807 # we need to use os.path.join here rather than self._join
807 # we need to use os.path.join here rather than self._join
808 # because path is arbitrary and user-specified
808 # because path is arbitrary and user-specified
809 files.append(os.path.join(self._rootdir, util.expandpath(path)))
809 files.append(os.path.join(self._rootdir, util.expandpath(path)))
810 return files
810 return files
811
811
812 def _ignorefileandline(self, f):
812 def _ignorefileandline(self, f):
813 files = collections.deque(self._ignorefiles())
813 files = collections.deque(self._ignorefiles())
814 visited = set()
814 visited = set()
815 while files:
815 while files:
816 i = files.popleft()
816 i = files.popleft()
817 patterns = matchmod.readpatternfile(i, self._ui.warn,
817 patterns = matchmod.readpatternfile(i, self._ui.warn,
818 sourceinfo=True)
818 sourceinfo=True)
819 for pattern, lineno, line in patterns:
819 for pattern, lineno, line in patterns:
820 kind, p = matchmod._patsplit(pattern, 'glob')
820 kind, p = matchmod._patsplit(pattern, 'glob')
821 if kind == "subinclude":
821 if kind == "subinclude":
822 if p not in visited:
822 if p not in visited:
823 files.append(p)
823 files.append(p)
824 continue
824 continue
825 m = matchmod.match(self._root, '', [], [pattern],
825 m = matchmod.match(self._root, '', [], [pattern],
826 warn=self._ui.warn)
826 warn=self._ui.warn)
827 if m(f):
827 if m(f):
828 return (i, lineno, line)
828 return (i, lineno, line)
829 visited.add(i)
829 visited.add(i)
830 return (None, -1, "")
830 return (None, -1, "")
831
831
832 def _walkexplicit(self, match, subrepos):
832 def _walkexplicit(self, match, subrepos):
833 '''Get stat data about the files explicitly specified by match.
833 '''Get stat data about the files explicitly specified by match.
834
834
835 Return a triple (results, dirsfound, dirsnotfound).
835 Return a triple (results, dirsfound, dirsnotfound).
836 - results is a mapping from filename to stat result. It also contains
836 - results is a mapping from filename to stat result. It also contains
837 listings mapping subrepos and .hg to None.
837 listings mapping subrepos and .hg to None.
838 - dirsfound is a list of files found to be directories.
838 - dirsfound is a list of files found to be directories.
839 - dirsnotfound is a list of files that the dirstate thinks are
839 - dirsnotfound is a list of files that the dirstate thinks are
840 directories and that were not found.'''
840 directories and that were not found.'''
841
841
842 def badtype(mode):
842 def badtype(mode):
843 kind = _('unknown')
843 kind = _('unknown')
844 if stat.S_ISCHR(mode):
844 if stat.S_ISCHR(mode):
845 kind = _('character device')
845 kind = _('character device')
846 elif stat.S_ISBLK(mode):
846 elif stat.S_ISBLK(mode):
847 kind = _('block device')
847 kind = _('block device')
848 elif stat.S_ISFIFO(mode):
848 elif stat.S_ISFIFO(mode):
849 kind = _('fifo')
849 kind = _('fifo')
850 elif stat.S_ISSOCK(mode):
850 elif stat.S_ISSOCK(mode):
851 kind = _('socket')
851 kind = _('socket')
852 elif stat.S_ISDIR(mode):
852 elif stat.S_ISDIR(mode):
853 kind = _('directory')
853 kind = _('directory')
854 return _('unsupported file type (type is %s)') % kind
854 return _('unsupported file type (type is %s)') % kind
855
855
856 matchedir = match.explicitdir
856 matchedir = match.explicitdir
857 badfn = match.bad
857 badfn = match.bad
858 dmap = self._map
858 dmap = self._map
859 lstat = os.lstat
859 lstat = os.lstat
860 getkind = stat.S_IFMT
860 getkind = stat.S_IFMT
861 dirkind = stat.S_IFDIR
861 dirkind = stat.S_IFDIR
862 regkind = stat.S_IFREG
862 regkind = stat.S_IFREG
863 lnkkind = stat.S_IFLNK
863 lnkkind = stat.S_IFLNK
864 join = self._join
864 join = self._join
865 dirsfound = []
865 dirsfound = []
866 foundadd = dirsfound.append
866 foundadd = dirsfound.append
867 dirsnotfound = []
867 dirsnotfound = []
868 notfoundadd = dirsnotfound.append
868 notfoundadd = dirsnotfound.append
869
869
870 if not match.isexact() and self._checkcase:
870 if not match.isexact() and self._checkcase:
871 normalize = self._normalize
871 normalize = self._normalize
872 else:
872 else:
873 normalize = None
873 normalize = None
874
874
875 files = sorted(match.files())
875 files = sorted(match.files())
876 subrepos.sort()
876 subrepos.sort()
877 i, j = 0, 0
877 i, j = 0, 0
878 while i < len(files) and j < len(subrepos):
878 while i < len(files) and j < len(subrepos):
879 subpath = subrepos[j] + "/"
879 subpath = subrepos[j] + "/"
880 if files[i] < subpath:
880 if files[i] < subpath:
881 i += 1
881 i += 1
882 continue
882 continue
883 while i < len(files) and files[i].startswith(subpath):
883 while i < len(files) and files[i].startswith(subpath):
884 del files[i]
884 del files[i]
885 j += 1
885 j += 1
886
886
887 if not files or '.' in files:
887 if not files or '.' in files:
888 files = ['.']
888 files = ['.']
889 results = dict.fromkeys(subrepos)
889 results = dict.fromkeys(subrepos)
890 results['.hg'] = None
890 results['.hg'] = None
891
891
892 alldirs = None
892 alldirs = None
893 for ff in files:
893 for ff in files:
894 # constructing the foldmap is expensive, so don't do it for the
894 # constructing the foldmap is expensive, so don't do it for the
895 # common case where files is ['.']
895 # common case where files is ['.']
896 if normalize and ff != '.':
896 if normalize and ff != '.':
897 nf = normalize(ff, False, True)
897 nf = normalize(ff, False, True)
898 else:
898 else:
899 nf = ff
899 nf = ff
900 if nf in results:
900 if nf in results:
901 continue
901 continue
902
902
903 try:
903 try:
904 st = lstat(join(nf))
904 st = lstat(join(nf))
905 kind = getkind(st.st_mode)
905 kind = getkind(st.st_mode)
906 if kind == dirkind:
906 if kind == dirkind:
907 if nf in dmap:
907 if nf in dmap:
908 # file replaced by dir on disk but still in dirstate
908 # file replaced by dir on disk but still in dirstate
909 results[nf] = None
909 results[nf] = None
910 if matchedir:
910 if matchedir:
911 matchedir(nf)
911 matchedir(nf)
912 foundadd((nf, ff))
912 foundadd((nf, ff))
913 elif kind == regkind or kind == lnkkind:
913 elif kind == regkind or kind == lnkkind:
914 results[nf] = st
914 results[nf] = st
915 else:
915 else:
916 badfn(ff, badtype(kind))
916 badfn(ff, badtype(kind))
917 if nf in dmap:
917 if nf in dmap:
918 results[nf] = None
918 results[nf] = None
919 except OSError as inst: # nf not found on disk - it is dirstate only
919 except OSError as inst: # nf not found on disk - it is dirstate only
920 if nf in dmap: # does it exactly match a missing file?
920 if nf in dmap: # does it exactly match a missing file?
921 results[nf] = None
921 results[nf] = None
922 else: # does it match a missing directory?
922 else: # does it match a missing directory?
923 if alldirs is None:
923 if alldirs is None:
924 alldirs = util.dirs(dmap._map)
924 alldirs = util.dirs(dmap._map)
925 if nf in alldirs:
925 if nf in alldirs:
926 if matchedir:
926 if matchedir:
927 matchedir(nf)
927 matchedir(nf)
928 notfoundadd(nf)
928 notfoundadd(nf)
929 else:
929 else:
930 badfn(ff, encoding.strtolocal(inst.strerror))
930 badfn(ff, encoding.strtolocal(inst.strerror))
931
931
932 # Case insensitive filesystems cannot rely on lstat() failing to detect
932 # Case insensitive filesystems cannot rely on lstat() failing to detect
933 # a case-only rename. Prune the stat object for any file that does not
933 # a case-only rename. Prune the stat object for any file that does not
934 # match the case in the filesystem, if there are multiple files that
934 # match the case in the filesystem, if there are multiple files that
935 # normalize to the same path.
935 # normalize to the same path.
936 if match.isexact() and self._checkcase:
936 if match.isexact() and self._checkcase:
937 normed = {}
937 normed = {}
938
938
939 for f, st in results.iteritems():
939 for f, st in results.iteritems():
940 if st is None:
940 if st is None:
941 continue
941 continue
942
942
943 nc = util.normcase(f)
943 nc = util.normcase(f)
944 paths = normed.get(nc)
944 paths = normed.get(nc)
945
945
946 if paths is None:
946 if paths is None:
947 paths = set()
947 paths = set()
948 normed[nc] = paths
948 normed[nc] = paths
949
949
950 paths.add(f)
950 paths.add(f)
951
951
952 for norm, paths in normed.iteritems():
952 for norm, paths in normed.iteritems():
953 if len(paths) > 1:
953 if len(paths) > 1:
954 for path in paths:
954 for path in paths:
955 folded = self._discoverpath(path, norm, True, None,
955 folded = self._discoverpath(path, norm, True, None,
956 self._dirfoldmap)
956 self._dirfoldmap)
957 if path != folded:
957 if path != folded:
958 results[path] = None
958 results[path] = None
959
959
960 return results, dirsfound, dirsnotfound
960 return results, dirsfound, dirsnotfound
961
961
962 def walk(self, match, subrepos, unknown, ignored, full=True):
962 def walk(self, match, subrepos, unknown, ignored, full=True):
963 '''
963 '''
964 Walk recursively through the directory tree, finding all files
964 Walk recursively through the directory tree, finding all files
965 matched by match.
965 matched by match.
966
966
967 If full is False, maybe skip some known-clean files.
967 If full is False, maybe skip some known-clean files.
968
968
969 Return a dict mapping filename to stat-like object (either
969 Return a dict mapping filename to stat-like object (either
970 mercurial.osutil.stat instance or return value of os.stat()).
970 mercurial.osutil.stat instance or return value of os.stat()).
971
971
972 '''
972 '''
973 # full is a flag that extensions that hook into walk can use -- this
973 # full is a flag that extensions that hook into walk can use -- this
974 # implementation doesn't use it at all. This satisfies the contract
974 # implementation doesn't use it at all. This satisfies the contract
975 # because we only guarantee a "maybe".
975 # because we only guarantee a "maybe".
976
976
977 if ignored:
977 if ignored:
978 ignore = util.never
978 ignore = util.never
979 dirignore = util.never
979 dirignore = util.never
980 elif unknown:
980 elif unknown:
981 ignore = self._ignore
981 ignore = self._ignore
982 dirignore = self._dirignore
982 dirignore = self._dirignore
983 else:
983 else:
984 # if not unknown and not ignored, drop dir recursion and step 2
984 # if not unknown and not ignored, drop dir recursion and step 2
985 ignore = util.always
985 ignore = util.always
986 dirignore = util.always
986 dirignore = util.always
987
987
988 matchfn = match.matchfn
988 matchfn = match.matchfn
989 matchalways = match.always()
989 matchalways = match.always()
990 matchtdir = match.traversedir
990 matchtdir = match.traversedir
991 dmap = self._map
991 dmap = self._map
992 listdir = util.listdir
992 listdir = util.listdir
993 lstat = os.lstat
993 lstat = os.lstat
994 dirkind = stat.S_IFDIR
994 dirkind = stat.S_IFDIR
995 regkind = stat.S_IFREG
995 regkind = stat.S_IFREG
996 lnkkind = stat.S_IFLNK
996 lnkkind = stat.S_IFLNK
997 join = self._join
997 join = self._join
998
998
999 exact = skipstep3 = False
999 exact = skipstep3 = False
1000 if match.isexact(): # match.exact
1000 if match.isexact(): # match.exact
1001 exact = True
1001 exact = True
1002 dirignore = util.always # skip step 2
1002 dirignore = util.always # skip step 2
1003 elif match.prefix(): # match.match, no patterns
1003 elif match.prefix(): # match.match, no patterns
1004 skipstep3 = True
1004 skipstep3 = True
1005
1005
1006 if not exact and self._checkcase:
1006 if not exact and self._checkcase:
1007 normalize = self._normalize
1007 normalize = self._normalize
1008 normalizefile = self._normalizefile
1008 normalizefile = self._normalizefile
1009 skipstep3 = False
1009 skipstep3 = False
1010 else:
1010 else:
1011 normalize = self._normalize
1011 normalize = self._normalize
1012 normalizefile = None
1012 normalizefile = None
1013
1013
1014 # step 1: find all explicit files
1014 # step 1: find all explicit files
1015 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1015 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1016
1016
1017 skipstep3 = skipstep3 and not (work or dirsnotfound)
1017 skipstep3 = skipstep3 and not (work or dirsnotfound)
1018 work = [d for d in work if not dirignore(d[0])]
1018 work = [d for d in work if not dirignore(d[0])]
1019
1019
1020 # step 2: visit subdirectories
1020 # step 2: visit subdirectories
1021 def traverse(work, alreadynormed):
1021 def traverse(work, alreadynormed):
1022 wadd = work.append
1022 wadd = work.append
1023 while work:
1023 while work:
1024 nd = work.pop()
1024 nd = work.pop()
1025 if not match.visitdir(nd):
1025 if not match.visitdir(nd):
1026 continue
1026 continue
1027 skip = None
1027 skip = None
1028 if nd == '.':
1028 if nd == '.':
1029 nd = ''
1029 nd = ''
1030 else:
1030 else:
1031 skip = '.hg'
1031 skip = '.hg'
1032 try:
1032 try:
1033 entries = listdir(join(nd), stat=True, skip=skip)
1033 entries = listdir(join(nd), stat=True, skip=skip)
1034 except OSError as inst:
1034 except OSError as inst:
1035 if inst.errno in (errno.EACCES, errno.ENOENT):
1035 if inst.errno in (errno.EACCES, errno.ENOENT):
1036 match.bad(self.pathto(nd),
1036 match.bad(self.pathto(nd),
1037 encoding.strtolocal(inst.strerror))
1037 encoding.strtolocal(inst.strerror))
1038 continue
1038 continue
1039 raise
1039 raise
1040 for f, kind, st in entries:
1040 for f, kind, st in entries:
1041 if normalizefile:
1041 if normalizefile:
1042 # even though f might be a directory, we're only
1042 # even though f might be a directory, we're only
1043 # interested in comparing it to files currently in the
1043 # interested in comparing it to files currently in the
1044 # dmap -- therefore normalizefile is enough
1044 # dmap -- therefore normalizefile is enough
1045 nf = normalizefile(nd and (nd + "/" + f) or f, True,
1045 nf = normalizefile(nd and (nd + "/" + f) or f, True,
1046 True)
1046 True)
1047 else:
1047 else:
1048 nf = nd and (nd + "/" + f) or f
1048 nf = nd and (nd + "/" + f) or f
1049 if nf not in results:
1049 if nf not in results:
1050 if kind == dirkind:
1050 if kind == dirkind:
1051 if not ignore(nf):
1051 if not ignore(nf):
1052 if matchtdir:
1052 if matchtdir:
1053 matchtdir(nf)
1053 matchtdir(nf)
1054 wadd(nf)
1054 wadd(nf)
1055 if nf in dmap and (matchalways or matchfn(nf)):
1055 if nf in dmap and (matchalways or matchfn(nf)):
1056 results[nf] = None
1056 results[nf] = None
1057 elif kind == regkind or kind == lnkkind:
1057 elif kind == regkind or kind == lnkkind:
1058 if nf in dmap:
1058 if nf in dmap:
1059 if matchalways or matchfn(nf):
1059 if matchalways or matchfn(nf):
1060 results[nf] = st
1060 results[nf] = st
1061 elif ((matchalways or matchfn(nf))
1061 elif ((matchalways or matchfn(nf))
1062 and not ignore(nf)):
1062 and not ignore(nf)):
1063 # unknown file -- normalize if necessary
1063 # unknown file -- normalize if necessary
1064 if not alreadynormed:
1064 if not alreadynormed:
1065 nf = normalize(nf, False, True)
1065 nf = normalize(nf, False, True)
1066 results[nf] = st
1066 results[nf] = st
1067 elif nf in dmap and (matchalways or matchfn(nf)):
1067 elif nf in dmap and (matchalways or matchfn(nf)):
1068 results[nf] = None
1068 results[nf] = None
1069
1069
1070 for nd, d in work:
1070 for nd, d in work:
1071 # alreadynormed means that processwork doesn't have to do any
1071 # alreadynormed means that processwork doesn't have to do any
1072 # expensive directory normalization
1072 # expensive directory normalization
1073 alreadynormed = not normalize or nd == d
1073 alreadynormed = not normalize or nd == d
1074 traverse([d], alreadynormed)
1074 traverse([d], alreadynormed)
1075
1075
1076 for s in subrepos:
1076 for s in subrepos:
1077 del results[s]
1077 del results[s]
1078 del results['.hg']
1078 del results['.hg']
1079
1079
1080 # step 3: visit remaining files from dmap
1080 # step 3: visit remaining files from dmap
1081 if not skipstep3 and not exact:
1081 if not skipstep3 and not exact:
1082 # If a dmap file is not in results yet, it was either
1082 # If a dmap file is not in results yet, it was either
1083 # a) not matching matchfn b) ignored, c) missing, or d) under a
1083 # a) not matching matchfn b) ignored, c) missing, or d) under a
1084 # symlink directory.
1084 # symlink directory.
1085 if not results and matchalways:
1085 if not results and matchalways:
1086 visit = [f for f in dmap]
1086 visit = [f for f in dmap]
1087 else:
1087 else:
1088 visit = [f for f in dmap if f not in results and matchfn(f)]
1088 visit = [f for f in dmap if f not in results and matchfn(f)]
1089 visit.sort()
1089 visit.sort()
1090
1090
1091 if unknown:
1091 if unknown:
1092 # unknown == True means we walked all dirs under the roots
1092 # unknown == True means we walked all dirs under the roots
1093 # that wasn't ignored, and everything that matched was stat'ed
1093 # that wasn't ignored, and everything that matched was stat'ed
1094 # and is already in results.
1094 # and is already in results.
1095 # The rest must thus be ignored or under a symlink.
1095 # The rest must thus be ignored or under a symlink.
1096 audit_path = pathutil.pathauditor(self._root, cached=True)
1096 audit_path = pathutil.pathauditor(self._root, cached=True)
1097
1097
1098 for nf in iter(visit):
1098 for nf in iter(visit):
1099 # If a stat for the same file was already added with a
1099 # If a stat for the same file was already added with a
1100 # different case, don't add one for this, since that would
1100 # different case, don't add one for this, since that would
1101 # make it appear as if the file exists under both names
1101 # make it appear as if the file exists under both names
1102 # on disk.
1102 # on disk.
1103 if (normalizefile and
1103 if (normalizefile and
1104 normalizefile(nf, True, True) in results):
1104 normalizefile(nf, True, True) in results):
1105 results[nf] = None
1105 results[nf] = None
1106 # Report ignored items in the dmap as long as they are not
1106 # Report ignored items in the dmap as long as they are not
1107 # under a symlink directory.
1107 # under a symlink directory.
1108 elif audit_path.check(nf):
1108 elif audit_path.check(nf):
1109 try:
1109 try:
1110 results[nf] = lstat(join(nf))
1110 results[nf] = lstat(join(nf))
1111 # file was just ignored, no links, and exists
1111 # file was just ignored, no links, and exists
1112 except OSError:
1112 except OSError:
1113 # file doesn't exist
1113 # file doesn't exist
1114 results[nf] = None
1114 results[nf] = None
1115 else:
1115 else:
1116 # It's either missing or under a symlink directory
1116 # It's either missing or under a symlink directory
1117 # which we in this case report as missing
1117 # which we in this case report as missing
1118 results[nf] = None
1118 results[nf] = None
1119 else:
1119 else:
1120 # We may not have walked the full directory tree above,
1120 # We may not have walked the full directory tree above,
1121 # so stat and check everything we missed.
1121 # so stat and check everything we missed.
1122 iv = iter(visit)
1122 iv = iter(visit)
1123 for st in util.statfiles([join(i) for i in visit]):
1123 for st in util.statfiles([join(i) for i in visit]):
1124 results[next(iv)] = st
1124 results[next(iv)] = st
1125 return results
1125 return results
1126
1126
1127 def status(self, match, subrepos, ignored, clean, unknown):
1127 def status(self, match, subrepos, ignored, clean, unknown):
1128 '''Determine the status of the working copy relative to the
1128 '''Determine the status of the working copy relative to the
1129 dirstate and return a pair of (unsure, status), where status is of type
1129 dirstate and return a pair of (unsure, status), where status is of type
1130 scmutil.status and:
1130 scmutil.status and:
1131
1131
1132 unsure:
1132 unsure:
1133 files that might have been modified since the dirstate was
1133 files that might have been modified since the dirstate was
1134 written, but need to be read to be sure (size is the same
1134 written, but need to be read to be sure (size is the same
1135 but mtime differs)
1135 but mtime differs)
1136 status.modified:
1136 status.modified:
1137 files that have definitely been modified since the dirstate
1137 files that have definitely been modified since the dirstate
1138 was written (different size or mode)
1138 was written (different size or mode)
1139 status.clean:
1139 status.clean:
1140 files that have definitely not been modified since the
1140 files that have definitely not been modified since the
1141 dirstate was written
1141 dirstate was written
1142 '''
1142 '''
1143 listignored, listclean, listunknown = ignored, clean, unknown
1143 listignored, listclean, listunknown = ignored, clean, unknown
1144 lookup, modified, added, unknown, ignored = [], [], [], [], []
1144 lookup, modified, added, unknown, ignored = [], [], [], [], []
1145 removed, deleted, clean = [], [], []
1145 removed, deleted, clean = [], [], []
1146
1146
1147 dmap = self._map
1147 dmap = self._map
1148 ladd = lookup.append # aka "unsure"
1148 ladd = lookup.append # aka "unsure"
1149 madd = modified.append
1149 madd = modified.append
1150 aadd = added.append
1150 aadd = added.append
1151 uadd = unknown.append
1151 uadd = unknown.append
1152 iadd = ignored.append
1152 iadd = ignored.append
1153 radd = removed.append
1153 radd = removed.append
1154 dadd = deleted.append
1154 dadd = deleted.append
1155 cadd = clean.append
1155 cadd = clean.append
1156 mexact = match.exact
1156 mexact = match.exact
1157 dirignore = self._dirignore
1157 dirignore = self._dirignore
1158 checkexec = self._checkexec
1158 checkexec = self._checkexec
1159 copymap = self._map.copymap
1159 copymap = self._map.copymap
1160 lastnormaltime = self._lastnormaltime
1160 lastnormaltime = self._lastnormaltime
1161
1161
1162 # We need to do full walks when either
1162 # We need to do full walks when either
1163 # - we're listing all clean files, or
1163 # - we're listing all clean files, or
1164 # - match.traversedir does something, because match.traversedir should
1164 # - match.traversedir does something, because match.traversedir should
1165 # be called for every dir in the working dir
1165 # be called for every dir in the working dir
1166 full = listclean or match.traversedir is not None
1166 full = listclean or match.traversedir is not None
1167 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1167 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1168 full=full).iteritems():
1168 full=full).iteritems():
1169 if fn not in dmap:
1169 if fn not in dmap:
1170 if (listignored or mexact(fn)) and dirignore(fn):
1170 if (listignored or mexact(fn)) and dirignore(fn):
1171 if listignored:
1171 if listignored:
1172 iadd(fn)
1172 iadd(fn)
1173 else:
1173 else:
1174 uadd(fn)
1174 uadd(fn)
1175 continue
1175 continue
1176
1176
1177 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1177 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1178 # written like that for performance reasons. dmap[fn] is not a
1178 # written like that for performance reasons. dmap[fn] is not a
1179 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1179 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1180 # opcode has fast paths when the value to be unpacked is a tuple or
1180 # opcode has fast paths when the value to be unpacked is a tuple or
1181 # a list, but falls back to creating a full-fledged iterator in
1181 # a list, but falls back to creating a full-fledged iterator in
1182 # general. That is much slower than simply accessing and storing the
1182 # general. That is much slower than simply accessing and storing the
1183 # tuple members one by one.
1183 # tuple members one by one.
1184 t = dmap[fn]
1184 t = dmap[fn]
1185 state = t[0]
1185 state = t[0]
1186 mode = t[1]
1186 mode = t[1]
1187 size = t[2]
1187 size = t[2]
1188 time = t[3]
1188 time = t[3]
1189
1189
1190 if not st and state in "nma":
1190 if not st and state in "nma":
1191 dadd(fn)
1191 dadd(fn)
1192 elif state == 'n':
1192 elif state == 'n':
1193 if (size >= 0 and
1193 if (size >= 0 and
1194 ((size != st.st_size and size != st.st_size & _rangemask)
1194 ((size != st.st_size and size != st.st_size & _rangemask)
1195 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1195 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1196 or size == -2 # other parent
1196 or size == -2 # other parent
1197 or fn in copymap):
1197 or fn in copymap):
1198 madd(fn)
1198 madd(fn)
1199 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1199 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1200 ladd(fn)
1200 ladd(fn)
1201 elif st.st_mtime == lastnormaltime:
1201 elif st.st_mtime == lastnormaltime:
1202 # fn may have just been marked as normal and it may have
1202 # fn may have just been marked as normal and it may have
1203 # changed in the same second without changing its size.
1203 # changed in the same second without changing its size.
1204 # This can happen if we quickly do multiple commits.
1204 # This can happen if we quickly do multiple commits.
1205 # Force lookup, so we don't miss such a racy file change.
1205 # Force lookup, so we don't miss such a racy file change.
1206 ladd(fn)
1206 ladd(fn)
1207 elif listclean:
1207 elif listclean:
1208 cadd(fn)
1208 cadd(fn)
1209 elif state == 'm':
1209 elif state == 'm':
1210 madd(fn)
1210 madd(fn)
1211 elif state == 'a':
1211 elif state == 'a':
1212 aadd(fn)
1212 aadd(fn)
1213 elif state == 'r':
1213 elif state == 'r':
1214 radd(fn)
1214 radd(fn)
1215
1215
1216 return (lookup, scmutil.status(modified, added, removed, deleted,
1216 return (lookup, scmutil.status(modified, added, removed, deleted,
1217 unknown, ignored, clean))
1217 unknown, ignored, clean))
1218
1218
1219 def matches(self, match):
1219 def matches(self, match):
1220 '''
1220 '''
1221 return files in the dirstate (in whatever state) filtered by match
1221 return files in the dirstate (in whatever state) filtered by match
1222 '''
1222 '''
1223 dmap = self._map
1223 dmap = self._map
1224 if match.always():
1224 if match.always():
1225 return dmap.keys()
1225 return dmap.keys()
1226 files = match.files()
1226 files = match.files()
1227 if match.isexact():
1227 if match.isexact():
1228 # fast path -- filter the other way around, since typically files is
1228 # fast path -- filter the other way around, since typically files is
1229 # much smaller than dmap
1229 # much smaller than dmap
1230 return [f for f in files if f in dmap]
1230 return [f for f in files if f in dmap]
1231 if match.prefix() and all(fn in dmap for fn in files):
1231 if match.prefix() and all(fn in dmap for fn in files):
1232 # fast path -- all the values are known to be files, so just return
1232 # fast path -- all the values are known to be files, so just return
1233 # that
1233 # that
1234 return list(files)
1234 return list(files)
1235 return [f for f in dmap if match(f)]
1235 return [f for f in dmap if match(f)]
1236
1236
1237 def _actualfilename(self, tr):
1237 def _actualfilename(self, tr):
1238 if tr:
1238 if tr:
1239 return self._pendingfilename
1239 return self._pendingfilename
1240 else:
1240 else:
1241 return self._filename
1241 return self._filename
1242
1242
1243 def savebackup(self, tr, backupname):
1243 def savebackup(self, tr, backupname):
1244 '''Save current dirstate into backup file'''
1244 '''Save current dirstate into backup file'''
1245 filename = self._actualfilename(tr)
1245 filename = self._actualfilename(tr)
1246 assert backupname != filename
1246 assert backupname != filename
1247
1247
1248 # use '_writedirstate' instead of 'write' to write changes certainly,
1248 # use '_writedirstate' instead of 'write' to write changes certainly,
1249 # because the latter omits writing out if transaction is running.
1249 # because the latter omits writing out if transaction is running.
1250 # output file will be used to create backup of dirstate at this point.
1250 # output file will be used to create backup of dirstate at this point.
1251 if self._dirty or not self._opener.exists(filename):
1251 if self._dirty or not self._opener.exists(filename):
1252 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1252 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1253 checkambig=True))
1253 checkambig=True))
1254
1254
1255 if tr:
1255 if tr:
1256 # ensure that subsequent tr.writepending returns True for
1256 # ensure that subsequent tr.writepending returns True for
1257 # changes written out above, even if dirstate is never
1257 # changes written out above, even if dirstate is never
1258 # changed after this
1258 # changed after this
1259 tr.addfilegenerator('dirstate', (self._filename,),
1259 tr.addfilegenerator('dirstate', (self._filename,),
1260 self._writedirstate, location='plain')
1260 self._writedirstate, location='plain')
1261
1261
1262 # ensure that pending file written above is unlinked at
1262 # ensure that pending file written above is unlinked at
1263 # failure, even if tr.writepending isn't invoked until the
1263 # failure, even if tr.writepending isn't invoked until the
1264 # end of this transaction
1264 # end of this transaction
1265 tr.registertmp(filename, location='plain')
1265 tr.registertmp(filename, location='plain')
1266
1266
1267 self._opener.tryunlink(backupname)
1267 self._opener.tryunlink(backupname)
1268 # hardlink backup is okay because _writedirstate is always called
1268 # hardlink backup is okay because _writedirstate is always called
1269 # with an "atomictemp=True" file.
1269 # with an "atomictemp=True" file.
1270 util.copyfile(self._opener.join(filename),
1270 util.copyfile(self._opener.join(filename),
1271 self._opener.join(backupname), hardlink=True)
1271 self._opener.join(backupname), hardlink=True)
1272
1272
1273 def restorebackup(self, tr, backupname):
1273 def restorebackup(self, tr, backupname):
1274 '''Restore dirstate by backup file'''
1274 '''Restore dirstate by backup file'''
1275 # this "invalidate()" prevents "wlock.release()" from writing
1275 # this "invalidate()" prevents "wlock.release()" from writing
1276 # changes of dirstate out after restoring from backup file
1276 # changes of dirstate out after restoring from backup file
1277 self.invalidate()
1277 self.invalidate()
1278 filename = self._actualfilename(tr)
1278 filename = self._actualfilename(tr)
1279 self._opener.rename(backupname, filename, checkambig=True)
1279 self._opener.rename(backupname, filename, checkambig=True)
1280
1280
1281 def clearbackup(self, tr, backupname):
1281 def clearbackup(self, tr, backupname):
1282 '''Clear backup file'''
1282 '''Clear backup file'''
1283 self._opener.unlink(backupname)
1283 self._opener.unlink(backupname)
1284
1284
1285 class dirstatemap(object):
1285 class dirstatemap(object):
1286 def __init__(self, ui, opener, root):
1286 def __init__(self, ui, opener, root):
1287 self._ui = ui
1287 self._ui = ui
1288 self._opener = opener
1288 self._opener = opener
1289 self._root = root
1289 self._root = root
1290 self._filename = 'dirstate'
1290 self._filename = 'dirstate'
1291
1291
1292 self._map = {}
1292 self._map = {}
1293 self.copymap = {}
1293 self.copymap = {}
1294 self._parents = None
1294 self._parents = None
1295 self._dirtyparents = False
1295 self._dirtyparents = False
1296
1296
1297 # for consistent view between _pl() and _read() invocations
1297 # for consistent view between _pl() and _read() invocations
1298 self._pendingmode = None
1298 self._pendingmode = None
1299
1299
1300 def iteritems(self):
1300 def iteritems(self):
1301 return self._map.iteritems()
1301 return self._map.iteritems()
1302
1302
1303 def __len__(self):
1303 def __len__(self):
1304 return len(self._map)
1304 return len(self._map)
1305
1305
1306 def __iter__(self):
1306 def __iter__(self):
1307 return iter(self._map)
1307 return iter(self._map)
1308
1308
1309 def get(self, key, default=None):
1309 def get(self, key, default=None):
1310 return self._map.get(key, default)
1310 return self._map.get(key, default)
1311
1311
1312 def __contains__(self, key):
1312 def __contains__(self, key):
1313 return key in self._map
1313 return key in self._map
1314
1314
1315 def __setitem__(self, key, value):
1315 def __setitem__(self, key, value):
1316 self._map[key] = value
1316 self._map[key] = value
1317
1317
1318 def __getitem__(self, key):
1318 def __getitem__(self, key):
1319 return self._map[key]
1319 return self._map[key]
1320
1320
1321 def __delitem__(self, key):
1321 def __delitem__(self, key):
1322 del self._map[key]
1322 del self._map[key]
1323
1323
1324 def keys(self):
1324 def keys(self):
1325 return self._map.keys()
1325 return self._map.keys()
1326
1326
1327 def nonnormalentries(self):
1327 def nonnormalentries(self):
1328 '''Compute the nonnormal dirstate entries from the dmap'''
1328 '''Compute the nonnormal dirstate entries from the dmap'''
1329 try:
1329 try:
1330 return parsers.nonnormalotherparententries(self._map)
1330 return parsers.nonnormalotherparententries(self._map)
1331 except AttributeError:
1331 except AttributeError:
1332 nonnorm = set()
1332 nonnorm = set()
1333 otherparent = set()
1333 otherparent = set()
1334 for fname, e in self._map.iteritems():
1334 for fname, e in self._map.iteritems():
1335 if e[0] != 'n' or e[3] == -1:
1335 if e[0] != 'n' or e[3] == -1:
1336 nonnorm.add(fname)
1336 nonnorm.add(fname)
1337 if e[0] == 'n' and e[2] == -2:
1337 if e[0] == 'n' and e[2] == -2:
1338 otherparent.add(fname)
1338 otherparent.add(fname)
1339 return nonnorm, otherparent
1339 return nonnorm, otherparent
1340
1340
1341 def filefoldmap(self):
1341 def filefoldmap(self):
1342 """Returns a dictionary mapping normalized case paths to their
1342 """Returns a dictionary mapping normalized case paths to their
1343 non-normalized versions.
1343 non-normalized versions.
1344 """
1344 """
1345 try:
1345 try:
1346 makefilefoldmap = parsers.make_file_foldmap
1346 makefilefoldmap = parsers.make_file_foldmap
1347 except AttributeError:
1347 except AttributeError:
1348 pass
1348 pass
1349 else:
1349 else:
1350 return makefilefoldmap(self._map, util.normcasespec,
1350 return makefilefoldmap(self._map, util.normcasespec,
1351 util.normcasefallback)
1351 util.normcasefallback)
1352
1352
1353 f = {}
1353 f = {}
1354 normcase = util.normcase
1354 normcase = util.normcase
1355 for name, s in self._map.iteritems():
1355 for name, s in self._map.iteritems():
1356 if s[0] != 'r':
1356 if s[0] != 'r':
1357 f[normcase(name)] = name
1357 f[normcase(name)] = name
1358 f['.'] = '.' # prevents useless util.fspath() invocation
1358 f['.'] = '.' # prevents useless util.fspath() invocation
1359 return f
1359 return f
1360
1360
1361 def dirs(self):
1361 def dirs(self):
1362 """Returns a set-like object containing all the directories in the
1362 """Returns a set-like object containing all the directories in the
1363 current dirstate.
1363 current dirstate.
1364 """
1364 """
1365 return util.dirs(self._map, 'r')
1365 return util.dirs(self._map, 'r')
1366
1366
1367 def _opendirstatefile(self):
1367 def _opendirstatefile(self):
1368 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1368 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1369 if self._pendingmode is not None and self._pendingmode != mode:
1369 if self._pendingmode is not None and self._pendingmode != mode:
1370 fp.close()
1370 fp.close()
1371 raise error.Abort(_('working directory state may be '
1371 raise error.Abort(_('working directory state may be '
1372 'changed parallelly'))
1372 'changed parallelly'))
1373 self._pendingmode = mode
1373 self._pendingmode = mode
1374 return fp
1374 return fp
1375
1375
1376 def parents(self):
1376 def parents(self):
1377 if not self._parents:
1377 if not self._parents:
1378 try:
1378 try:
1379 fp = self._opendirstatefile()
1379 fp = self._opendirstatefile()
1380 st = fp.read(40)
1380 st = fp.read(40)
1381 fp.close()
1381 fp.close()
1382 except IOError as err:
1382 except IOError as err:
1383 if err.errno != errno.ENOENT:
1383 if err.errno != errno.ENOENT:
1384 raise
1384 raise
1385 # File doesn't exist, so the current state is empty
1385 # File doesn't exist, so the current state is empty
1386 st = ''
1386 st = ''
1387
1387
1388 l = len(st)
1388 l = len(st)
1389 if l == 40:
1389 if l == 40:
1390 self._parents = st[:20], st[20:40]
1390 self._parents = st[:20], st[20:40]
1391 elif l == 0:
1391 elif l == 0:
1392 self._parents = [nullid, nullid]
1392 self._parents = [nullid, nullid]
1393 else:
1393 else:
1394 raise error.Abort(_('working directory state appears '
1394 raise error.Abort(_('working directory state appears '
1395 'damaged!'))
1395 'damaged!'))
1396
1396
1397 return self._parents
1397 return self._parents
1398
1398
1399 def setparents(self, p1, p2):
1399 def setparents(self, p1, p2):
1400 self._parents = (p1, p2)
1400 self._parents = (p1, p2)
1401 self._dirtyparents = True
1401 self._dirtyparents = True
General Comments 0
You need to be logged in to leave comments. Login now