##// END OF EJS Templates
rust-status: use bare hg status fastpath from Python...
Raphaël Gomès -
r45017:4d1634e5 default
parent child Browse files
Show More
@@ -1,1866 +1,1901 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import nullid
17 from .node import nullid
18 from .pycompat import delattr
18 from .pycompat import delattr
19
19
20 from hgdemandimport import tracing
20 from hgdemandimport import tracing
21
21
22 from . import (
22 from . import (
23 encoding,
23 encoding,
24 error,
24 error,
25 match as matchmod,
25 match as matchmod,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 sparse,
30 txnutil,
31 txnutil,
31 util,
32 util,
32 )
33 )
33
34
34 from .interfaces import (
35 from .interfaces import (
35 dirstate as intdirstate,
36 dirstate as intdirstate,
36 util as interfaceutil,
37 util as interfaceutil,
37 )
38 )
38
39
39 parsers = policy.importmod('parsers')
40 parsers = policy.importmod('parsers')
40 rustmod = policy.importrust('dirstate')
41 rustmod = policy.importrust('dirstate')
41
42
42 propertycache = util.propertycache
43 propertycache = util.propertycache
43 filecache = scmutil.filecache
44 filecache = scmutil.filecache
44 _rangemask = 0x7FFFFFFF
45 _rangemask = 0x7FFFFFFF
45
46
46 dirstatetuple = parsers.dirstatetuple
47 dirstatetuple = parsers.dirstatetuple
47
48
48
49
49 class repocache(filecache):
50 class repocache(filecache):
50 """filecache for files in .hg/"""
51 """filecache for files in .hg/"""
51
52
52 def join(self, obj, fname):
53 def join(self, obj, fname):
53 return obj._opener.join(fname)
54 return obj._opener.join(fname)
54
55
55
56
56 class rootcache(filecache):
57 class rootcache(filecache):
57 """filecache for files in the repository root"""
58 """filecache for files in the repository root"""
58
59
59 def join(self, obj, fname):
60 def join(self, obj, fname):
60 return obj._join(fname)
61 return obj._join(fname)
61
62
62
63
63 def _getfsnow(vfs):
64 def _getfsnow(vfs):
64 '''Get "now" timestamp on filesystem'''
65 '''Get "now" timestamp on filesystem'''
65 tmpfd, tmpname = vfs.mkstemp()
66 tmpfd, tmpname = vfs.mkstemp()
66 try:
67 try:
67 return os.fstat(tmpfd)[stat.ST_MTIME]
68 return os.fstat(tmpfd)[stat.ST_MTIME]
68 finally:
69 finally:
69 os.close(tmpfd)
70 os.close(tmpfd)
70 vfs.unlink(tmpname)
71 vfs.unlink(tmpname)
71
72
72
73
73 @interfaceutil.implementer(intdirstate.idirstate)
74 @interfaceutil.implementer(intdirstate.idirstate)
74 class dirstate(object):
75 class dirstate(object):
75 def __init__(self, opener, ui, root, validate, sparsematchfn):
76 def __init__(self, opener, ui, root, validate, sparsematchfn):
76 '''Create a new dirstate object.
77 '''Create a new dirstate object.
77
78
78 opener is an open()-like callable that can be used to open the
79 opener is an open()-like callable that can be used to open the
79 dirstate file; root is the root of the directory tracked by
80 dirstate file; root is the root of the directory tracked by
80 the dirstate.
81 the dirstate.
81 '''
82 '''
82 self._opener = opener
83 self._opener = opener
83 self._validate = validate
84 self._validate = validate
84 self._root = root
85 self._root = root
85 self._sparsematchfn = sparsematchfn
86 self._sparsematchfn = sparsematchfn
86 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
87 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
87 # UNC path pointing to root share (issue4557)
88 # UNC path pointing to root share (issue4557)
88 self._rootdir = pathutil.normasprefix(root)
89 self._rootdir = pathutil.normasprefix(root)
89 self._dirty = False
90 self._dirty = False
90 self._lastnormaltime = 0
91 self._lastnormaltime = 0
91 self._ui = ui
92 self._ui = ui
92 self._filecache = {}
93 self._filecache = {}
93 self._parentwriters = 0
94 self._parentwriters = 0
94 self._filename = b'dirstate'
95 self._filename = b'dirstate'
95 self._pendingfilename = b'%s.pending' % self._filename
96 self._pendingfilename = b'%s.pending' % self._filename
96 self._plchangecallbacks = {}
97 self._plchangecallbacks = {}
97 self._origpl = None
98 self._origpl = None
98 self._updatedfiles = set()
99 self._updatedfiles = set()
99 self._mapcls = dirstatemap
100 self._mapcls = dirstatemap
100 # Access and cache cwd early, so we don't access it for the first time
101 # Access and cache cwd early, so we don't access it for the first time
101 # after a working-copy update caused it to not exist (accessing it then
102 # after a working-copy update caused it to not exist (accessing it then
102 # raises an exception).
103 # raises an exception).
103 self._cwd
104 self._cwd
104
105
105 @contextlib.contextmanager
106 @contextlib.contextmanager
106 def parentchange(self):
107 def parentchange(self):
107 '''Context manager for handling dirstate parents.
108 '''Context manager for handling dirstate parents.
108
109
109 If an exception occurs in the scope of the context manager,
110 If an exception occurs in the scope of the context manager,
110 the incoherent dirstate won't be written when wlock is
111 the incoherent dirstate won't be written when wlock is
111 released.
112 released.
112 '''
113 '''
113 self._parentwriters += 1
114 self._parentwriters += 1
114 yield
115 yield
115 # Typically we want the "undo" step of a context manager in a
116 # Typically we want the "undo" step of a context manager in a
116 # finally block so it happens even when an exception
117 # finally block so it happens even when an exception
117 # occurs. In this case, however, we only want to decrement
118 # occurs. In this case, however, we only want to decrement
118 # parentwriters if the code in the with statement exits
119 # parentwriters if the code in the with statement exits
119 # normally, so we don't have a try/finally here on purpose.
120 # normally, so we don't have a try/finally here on purpose.
120 self._parentwriters -= 1
121 self._parentwriters -= 1
121
122
122 def pendingparentchange(self):
123 def pendingparentchange(self):
123 '''Returns true if the dirstate is in the middle of a set of changes
124 '''Returns true if the dirstate is in the middle of a set of changes
124 that modify the dirstate parent.
125 that modify the dirstate parent.
125 '''
126 '''
126 return self._parentwriters > 0
127 return self._parentwriters > 0
127
128
128 @propertycache
129 @propertycache
129 def _map(self):
130 def _map(self):
130 """Return the dirstate contents (see documentation for dirstatemap)."""
131 """Return the dirstate contents (see documentation for dirstatemap)."""
131 self._map = self._mapcls(self._ui, self._opener, self._root)
132 self._map = self._mapcls(self._ui, self._opener, self._root)
132 return self._map
133 return self._map
133
134
134 @property
135 @property
135 def _sparsematcher(self):
136 def _sparsematcher(self):
136 """The matcher for the sparse checkout.
137 """The matcher for the sparse checkout.
137
138
138 The working directory may not include every file from a manifest. The
139 The working directory may not include every file from a manifest. The
139 matcher obtained by this property will match a path if it is to be
140 matcher obtained by this property will match a path if it is to be
140 included in the working directory.
141 included in the working directory.
141 """
142 """
142 # TODO there is potential to cache this property. For now, the matcher
143 # TODO there is potential to cache this property. For now, the matcher
143 # is resolved on every access. (But the called function does use a
144 # is resolved on every access. (But the called function does use a
144 # cache to keep the lookup fast.)
145 # cache to keep the lookup fast.)
145 return self._sparsematchfn()
146 return self._sparsematchfn()
146
147
147 @repocache(b'branch')
148 @repocache(b'branch')
148 def _branch(self):
149 def _branch(self):
149 try:
150 try:
150 return self._opener.read(b"branch").strip() or b"default"
151 return self._opener.read(b"branch").strip() or b"default"
151 except IOError as inst:
152 except IOError as inst:
152 if inst.errno != errno.ENOENT:
153 if inst.errno != errno.ENOENT:
153 raise
154 raise
154 return b"default"
155 return b"default"
155
156
156 @property
157 @property
157 def _pl(self):
158 def _pl(self):
158 return self._map.parents()
159 return self._map.parents()
159
160
160 def hasdir(self, d):
161 def hasdir(self, d):
161 return self._map.hastrackeddir(d)
162 return self._map.hastrackeddir(d)
162
163
163 @rootcache(b'.hgignore')
164 @rootcache(b'.hgignore')
164 def _ignore(self):
165 def _ignore(self):
165 files = self._ignorefiles()
166 files = self._ignorefiles()
166 if not files:
167 if not files:
167 return matchmod.never()
168 return matchmod.never()
168
169
169 pats = [b'include:%s' % f for f in files]
170 pats = [b'include:%s' % f for f in files]
170 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
171 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
171
172
172 @propertycache
173 @propertycache
173 def _slash(self):
174 def _slash(self):
174 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
175 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
175
176
176 @propertycache
177 @propertycache
177 def _checklink(self):
178 def _checklink(self):
178 return util.checklink(self._root)
179 return util.checklink(self._root)
179
180
180 @propertycache
181 @propertycache
181 def _checkexec(self):
182 def _checkexec(self):
182 return util.checkexec(self._root)
183 return util.checkexec(self._root)
183
184
184 @propertycache
185 @propertycache
185 def _checkcase(self):
186 def _checkcase(self):
186 return not util.fscasesensitive(self._join(b'.hg'))
187 return not util.fscasesensitive(self._join(b'.hg'))
187
188
188 def _join(self, f):
189 def _join(self, f):
189 # much faster than os.path.join()
190 # much faster than os.path.join()
190 # it's safe because f is always a relative path
191 # it's safe because f is always a relative path
191 return self._rootdir + f
192 return self._rootdir + f
192
193
193 def flagfunc(self, buildfallback):
194 def flagfunc(self, buildfallback):
194 if self._checklink and self._checkexec:
195 if self._checklink and self._checkexec:
195
196
196 def f(x):
197 def f(x):
197 try:
198 try:
198 st = os.lstat(self._join(x))
199 st = os.lstat(self._join(x))
199 if util.statislink(st):
200 if util.statislink(st):
200 return b'l'
201 return b'l'
201 if util.statisexec(st):
202 if util.statisexec(st):
202 return b'x'
203 return b'x'
203 except OSError:
204 except OSError:
204 pass
205 pass
205 return b''
206 return b''
206
207
207 return f
208 return f
208
209
209 fallback = buildfallback()
210 fallback = buildfallback()
210 if self._checklink:
211 if self._checklink:
211
212
212 def f(x):
213 def f(x):
213 if os.path.islink(self._join(x)):
214 if os.path.islink(self._join(x)):
214 return b'l'
215 return b'l'
215 if b'x' in fallback(x):
216 if b'x' in fallback(x):
216 return b'x'
217 return b'x'
217 return b''
218 return b''
218
219
219 return f
220 return f
220 if self._checkexec:
221 if self._checkexec:
221
222
222 def f(x):
223 def f(x):
223 if b'l' in fallback(x):
224 if b'l' in fallback(x):
224 return b'l'
225 return b'l'
225 if util.isexec(self._join(x)):
226 if util.isexec(self._join(x)):
226 return b'x'
227 return b'x'
227 return b''
228 return b''
228
229
229 return f
230 return f
230 else:
231 else:
231 return fallback
232 return fallback
232
233
233 @propertycache
234 @propertycache
234 def _cwd(self):
235 def _cwd(self):
235 # internal config: ui.forcecwd
236 # internal config: ui.forcecwd
236 forcecwd = self._ui.config(b'ui', b'forcecwd')
237 forcecwd = self._ui.config(b'ui', b'forcecwd')
237 if forcecwd:
238 if forcecwd:
238 return forcecwd
239 return forcecwd
239 return encoding.getcwd()
240 return encoding.getcwd()
240
241
241 def getcwd(self):
242 def getcwd(self):
242 '''Return the path from which a canonical path is calculated.
243 '''Return the path from which a canonical path is calculated.
243
244
244 This path should be used to resolve file patterns or to convert
245 This path should be used to resolve file patterns or to convert
245 canonical paths back to file paths for display. It shouldn't be
246 canonical paths back to file paths for display. It shouldn't be
246 used to get real file paths. Use vfs functions instead.
247 used to get real file paths. Use vfs functions instead.
247 '''
248 '''
248 cwd = self._cwd
249 cwd = self._cwd
249 if cwd == self._root:
250 if cwd == self._root:
250 return b''
251 return b''
251 # self._root ends with a path separator if self._root is '/' or 'C:\'
252 # self._root ends with a path separator if self._root is '/' or 'C:\'
252 rootsep = self._root
253 rootsep = self._root
253 if not util.endswithsep(rootsep):
254 if not util.endswithsep(rootsep):
254 rootsep += pycompat.ossep
255 rootsep += pycompat.ossep
255 if cwd.startswith(rootsep):
256 if cwd.startswith(rootsep):
256 return cwd[len(rootsep) :]
257 return cwd[len(rootsep) :]
257 else:
258 else:
258 # we're outside the repo. return an absolute path.
259 # we're outside the repo. return an absolute path.
259 return cwd
260 return cwd
260
261
261 def pathto(self, f, cwd=None):
262 def pathto(self, f, cwd=None):
262 if cwd is None:
263 if cwd is None:
263 cwd = self.getcwd()
264 cwd = self.getcwd()
264 path = util.pathto(self._root, cwd, f)
265 path = util.pathto(self._root, cwd, f)
265 if self._slash:
266 if self._slash:
266 return util.pconvert(path)
267 return util.pconvert(path)
267 return path
268 return path
268
269
269 def __getitem__(self, key):
270 def __getitem__(self, key):
270 '''Return the current state of key (a filename) in the dirstate.
271 '''Return the current state of key (a filename) in the dirstate.
271
272
272 States are:
273 States are:
273 n normal
274 n normal
274 m needs merging
275 m needs merging
275 r marked for removal
276 r marked for removal
276 a marked for addition
277 a marked for addition
277 ? not tracked
278 ? not tracked
278 '''
279 '''
279 return self._map.get(key, (b"?",))[0]
280 return self._map.get(key, (b"?",))[0]
280
281
281 def __contains__(self, key):
282 def __contains__(self, key):
282 return key in self._map
283 return key in self._map
283
284
284 def __iter__(self):
285 def __iter__(self):
285 return iter(sorted(self._map))
286 return iter(sorted(self._map))
286
287
287 def items(self):
288 def items(self):
288 return pycompat.iteritems(self._map)
289 return pycompat.iteritems(self._map)
289
290
290 iteritems = items
291 iteritems = items
291
292
292 def parents(self):
293 def parents(self):
293 return [self._validate(p) for p in self._pl]
294 return [self._validate(p) for p in self._pl]
294
295
295 def p1(self):
296 def p1(self):
296 return self._validate(self._pl[0])
297 return self._validate(self._pl[0])
297
298
298 def p2(self):
299 def p2(self):
299 return self._validate(self._pl[1])
300 return self._validate(self._pl[1])
300
301
301 def branch(self):
302 def branch(self):
302 return encoding.tolocal(self._branch)
303 return encoding.tolocal(self._branch)
303
304
304 def setparents(self, p1, p2=nullid):
305 def setparents(self, p1, p2=nullid):
305 """Set dirstate parents to p1 and p2.
306 """Set dirstate parents to p1 and p2.
306
307
307 When moving from two parents to one, 'm' merged entries a
308 When moving from two parents to one, 'm' merged entries a
308 adjusted to normal and previous copy records discarded and
309 adjusted to normal and previous copy records discarded and
309 returned by the call.
310 returned by the call.
310
311
311 See localrepo.setparents()
312 See localrepo.setparents()
312 """
313 """
313 if self._parentwriters == 0:
314 if self._parentwriters == 0:
314 raise ValueError(
315 raise ValueError(
315 b"cannot set dirstate parent outside of "
316 b"cannot set dirstate parent outside of "
316 b"dirstate.parentchange context manager"
317 b"dirstate.parentchange context manager"
317 )
318 )
318
319
319 self._dirty = True
320 self._dirty = True
320 oldp2 = self._pl[1]
321 oldp2 = self._pl[1]
321 if self._origpl is None:
322 if self._origpl is None:
322 self._origpl = self._pl
323 self._origpl = self._pl
323 self._map.setparents(p1, p2)
324 self._map.setparents(p1, p2)
324 copies = {}
325 copies = {}
325 if oldp2 != nullid and p2 == nullid:
326 if oldp2 != nullid and p2 == nullid:
326 candidatefiles = self._map.nonnormalset.union(
327 candidatefiles = self._map.nonnormalset.union(
327 self._map.otherparentset
328 self._map.otherparentset
328 )
329 )
329 for f in candidatefiles:
330 for f in candidatefiles:
330 s = self._map.get(f)
331 s = self._map.get(f)
331 if s is None:
332 if s is None:
332 continue
333 continue
333
334
334 # Discard 'm' markers when moving away from a merge state
335 # Discard 'm' markers when moving away from a merge state
335 if s[0] == b'm':
336 if s[0] == b'm':
336 source = self._map.copymap.get(f)
337 source = self._map.copymap.get(f)
337 if source:
338 if source:
338 copies[f] = source
339 copies[f] = source
339 self.normallookup(f)
340 self.normallookup(f)
340 # Also fix up otherparent markers
341 # Also fix up otherparent markers
341 elif s[0] == b'n' and s[2] == -2:
342 elif s[0] == b'n' and s[2] == -2:
342 source = self._map.copymap.get(f)
343 source = self._map.copymap.get(f)
343 if source:
344 if source:
344 copies[f] = source
345 copies[f] = source
345 self.add(f)
346 self.add(f)
346 return copies
347 return copies
347
348
348 def setbranch(self, branch):
349 def setbranch(self, branch):
349 self.__class__._branch.set(self, encoding.fromlocal(branch))
350 self.__class__._branch.set(self, encoding.fromlocal(branch))
350 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
351 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
351 try:
352 try:
352 f.write(self._branch + b'\n')
353 f.write(self._branch + b'\n')
353 f.close()
354 f.close()
354
355
355 # make sure filecache has the correct stat info for _branch after
356 # make sure filecache has the correct stat info for _branch after
356 # replacing the underlying file
357 # replacing the underlying file
357 ce = self._filecache[b'_branch']
358 ce = self._filecache[b'_branch']
358 if ce:
359 if ce:
359 ce.refresh()
360 ce.refresh()
360 except: # re-raises
361 except: # re-raises
361 f.discard()
362 f.discard()
362 raise
363 raise
363
364
364 def invalidate(self):
365 def invalidate(self):
365 '''Causes the next access to reread the dirstate.
366 '''Causes the next access to reread the dirstate.
366
367
367 This is different from localrepo.invalidatedirstate() because it always
368 This is different from localrepo.invalidatedirstate() because it always
368 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
369 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
369 check whether the dirstate has changed before rereading it.'''
370 check whether the dirstate has changed before rereading it.'''
370
371
371 for a in ("_map", "_branch", "_ignore"):
372 for a in ("_map", "_branch", "_ignore"):
372 if a in self.__dict__:
373 if a in self.__dict__:
373 delattr(self, a)
374 delattr(self, a)
374 self._lastnormaltime = 0
375 self._lastnormaltime = 0
375 self._dirty = False
376 self._dirty = False
376 self._updatedfiles.clear()
377 self._updatedfiles.clear()
377 self._parentwriters = 0
378 self._parentwriters = 0
378 self._origpl = None
379 self._origpl = None
379
380
380 def copy(self, source, dest):
381 def copy(self, source, dest):
381 """Mark dest as a copy of source. Unmark dest if source is None."""
382 """Mark dest as a copy of source. Unmark dest if source is None."""
382 if source == dest:
383 if source == dest:
383 return
384 return
384 self._dirty = True
385 self._dirty = True
385 if source is not None:
386 if source is not None:
386 self._map.copymap[dest] = source
387 self._map.copymap[dest] = source
387 self._updatedfiles.add(source)
388 self._updatedfiles.add(source)
388 self._updatedfiles.add(dest)
389 self._updatedfiles.add(dest)
389 elif self._map.copymap.pop(dest, None):
390 elif self._map.copymap.pop(dest, None):
390 self._updatedfiles.add(dest)
391 self._updatedfiles.add(dest)
391
392
392 def copied(self, file):
393 def copied(self, file):
393 return self._map.copymap.get(file, None)
394 return self._map.copymap.get(file, None)
394
395
395 def copies(self):
396 def copies(self):
396 return self._map.copymap
397 return self._map.copymap
397
398
398 def _addpath(self, f, state, mode, size, mtime):
399 def _addpath(self, f, state, mode, size, mtime):
399 oldstate = self[f]
400 oldstate = self[f]
400 if state == b'a' or oldstate == b'r':
401 if state == b'a' or oldstate == b'r':
401 scmutil.checkfilename(f)
402 scmutil.checkfilename(f)
402 if self._map.hastrackeddir(f):
403 if self._map.hastrackeddir(f):
403 raise error.Abort(
404 raise error.Abort(
404 _(b'directory %r already in dirstate') % pycompat.bytestr(f)
405 _(b'directory %r already in dirstate') % pycompat.bytestr(f)
405 )
406 )
406 # shadows
407 # shadows
407 for d in pathutil.finddirs(f):
408 for d in pathutil.finddirs(f):
408 if self._map.hastrackeddir(d):
409 if self._map.hastrackeddir(d):
409 break
410 break
410 entry = self._map.get(d)
411 entry = self._map.get(d)
411 if entry is not None and entry[0] != b'r':
412 if entry is not None and entry[0] != b'r':
412 raise error.Abort(
413 raise error.Abort(
413 _(b'file %r in dirstate clashes with %r')
414 _(b'file %r in dirstate clashes with %r')
414 % (pycompat.bytestr(d), pycompat.bytestr(f))
415 % (pycompat.bytestr(d), pycompat.bytestr(f))
415 )
416 )
416 self._dirty = True
417 self._dirty = True
417 self._updatedfiles.add(f)
418 self._updatedfiles.add(f)
418 self._map.addfile(f, oldstate, state, mode, size, mtime)
419 self._map.addfile(f, oldstate, state, mode, size, mtime)
419
420
420 def normal(self, f, parentfiledata=None):
421 def normal(self, f, parentfiledata=None):
421 '''Mark a file normal and clean.
422 '''Mark a file normal and clean.
422
423
423 parentfiledata: (mode, size, mtime) of the clean file
424 parentfiledata: (mode, size, mtime) of the clean file
424
425
425 parentfiledata should be computed from memory (for mode,
426 parentfiledata should be computed from memory (for mode,
426 size), as or close as possible from the point where we
427 size), as or close as possible from the point where we
427 determined the file was clean, to limit the risk of the
428 determined the file was clean, to limit the risk of the
428 file having been changed by an external process between the
429 file having been changed by an external process between the
429 moment where the file was determined to be clean and now.'''
430 moment where the file was determined to be clean and now.'''
430 if parentfiledata:
431 if parentfiledata:
431 (mode, size, mtime) = parentfiledata
432 (mode, size, mtime) = parentfiledata
432 else:
433 else:
433 s = os.lstat(self._join(f))
434 s = os.lstat(self._join(f))
434 mode = s.st_mode
435 mode = s.st_mode
435 size = s.st_size
436 size = s.st_size
436 mtime = s[stat.ST_MTIME]
437 mtime = s[stat.ST_MTIME]
437 self._addpath(f, b'n', mode, size & _rangemask, mtime & _rangemask)
438 self._addpath(f, b'n', mode, size & _rangemask, mtime & _rangemask)
438 self._map.copymap.pop(f, None)
439 self._map.copymap.pop(f, None)
439 if f in self._map.nonnormalset:
440 if f in self._map.nonnormalset:
440 self._map.nonnormalset.remove(f)
441 self._map.nonnormalset.remove(f)
441 if mtime > self._lastnormaltime:
442 if mtime > self._lastnormaltime:
442 # Remember the most recent modification timeslot for status(),
443 # Remember the most recent modification timeslot for status(),
443 # to make sure we won't miss future size-preserving file content
444 # to make sure we won't miss future size-preserving file content
444 # modifications that happen within the same timeslot.
445 # modifications that happen within the same timeslot.
445 self._lastnormaltime = mtime
446 self._lastnormaltime = mtime
446
447
447 def normallookup(self, f):
448 def normallookup(self, f):
448 '''Mark a file normal, but possibly dirty.'''
449 '''Mark a file normal, but possibly dirty.'''
449 if self._pl[1] != nullid:
450 if self._pl[1] != nullid:
450 # if there is a merge going on and the file was either
451 # if there is a merge going on and the file was either
451 # in state 'm' (-1) or coming from other parent (-2) before
452 # in state 'm' (-1) or coming from other parent (-2) before
452 # being removed, restore that state.
453 # being removed, restore that state.
453 entry = self._map.get(f)
454 entry = self._map.get(f)
454 if entry is not None:
455 if entry is not None:
455 if entry[0] == b'r' and entry[2] in (-1, -2):
456 if entry[0] == b'r' and entry[2] in (-1, -2):
456 source = self._map.copymap.get(f)
457 source = self._map.copymap.get(f)
457 if entry[2] == -1:
458 if entry[2] == -1:
458 self.merge(f)
459 self.merge(f)
459 elif entry[2] == -2:
460 elif entry[2] == -2:
460 self.otherparent(f)
461 self.otherparent(f)
461 if source:
462 if source:
462 self.copy(source, f)
463 self.copy(source, f)
463 return
464 return
464 if entry[0] == b'm' or entry[0] == b'n' and entry[2] == -2:
465 if entry[0] == b'm' or entry[0] == b'n' and entry[2] == -2:
465 return
466 return
466 self._addpath(f, b'n', 0, -1, -1)
467 self._addpath(f, b'n', 0, -1, -1)
467 self._map.copymap.pop(f, None)
468 self._map.copymap.pop(f, None)
468
469
469 def otherparent(self, f):
470 def otherparent(self, f):
470 '''Mark as coming from the other parent, always dirty.'''
471 '''Mark as coming from the other parent, always dirty.'''
471 if self._pl[1] == nullid:
472 if self._pl[1] == nullid:
472 raise error.Abort(
473 raise error.Abort(
473 _(b"setting %r to other parent only allowed in merges") % f
474 _(b"setting %r to other parent only allowed in merges") % f
474 )
475 )
475 if f in self and self[f] == b'n':
476 if f in self and self[f] == b'n':
476 # merge-like
477 # merge-like
477 self._addpath(f, b'm', 0, -2, -1)
478 self._addpath(f, b'm', 0, -2, -1)
478 else:
479 else:
479 # add-like
480 # add-like
480 self._addpath(f, b'n', 0, -2, -1)
481 self._addpath(f, b'n', 0, -2, -1)
481 self._map.copymap.pop(f, None)
482 self._map.copymap.pop(f, None)
482
483
483 def add(self, f):
484 def add(self, f):
484 '''Mark a file added.'''
485 '''Mark a file added.'''
485 self._addpath(f, b'a', 0, -1, -1)
486 self._addpath(f, b'a', 0, -1, -1)
486 self._map.copymap.pop(f, None)
487 self._map.copymap.pop(f, None)
487
488
488 def remove(self, f):
489 def remove(self, f):
489 '''Mark a file removed.'''
490 '''Mark a file removed.'''
490 self._dirty = True
491 self._dirty = True
491 oldstate = self[f]
492 oldstate = self[f]
492 size = 0
493 size = 0
493 if self._pl[1] != nullid:
494 if self._pl[1] != nullid:
494 entry = self._map.get(f)
495 entry = self._map.get(f)
495 if entry is not None:
496 if entry is not None:
496 # backup the previous state
497 # backup the previous state
497 if entry[0] == b'm': # merge
498 if entry[0] == b'm': # merge
498 size = -1
499 size = -1
499 elif entry[0] == b'n' and entry[2] == -2: # other parent
500 elif entry[0] == b'n' and entry[2] == -2: # other parent
500 size = -2
501 size = -2
501 self._map.otherparentset.add(f)
502 self._map.otherparentset.add(f)
502 self._updatedfiles.add(f)
503 self._updatedfiles.add(f)
503 self._map.removefile(f, oldstate, size)
504 self._map.removefile(f, oldstate, size)
504 if size == 0:
505 if size == 0:
505 self._map.copymap.pop(f, None)
506 self._map.copymap.pop(f, None)
506
507
507 def merge(self, f):
508 def merge(self, f):
508 '''Mark a file merged.'''
509 '''Mark a file merged.'''
509 if self._pl[1] == nullid:
510 if self._pl[1] == nullid:
510 return self.normallookup(f)
511 return self.normallookup(f)
511 return self.otherparent(f)
512 return self.otherparent(f)
512
513
513 def drop(self, f):
514 def drop(self, f):
514 '''Drop a file from the dirstate'''
515 '''Drop a file from the dirstate'''
515 oldstate = self[f]
516 oldstate = self[f]
516 if self._map.dropfile(f, oldstate):
517 if self._map.dropfile(f, oldstate):
517 self._dirty = True
518 self._dirty = True
518 self._updatedfiles.add(f)
519 self._updatedfiles.add(f)
519 self._map.copymap.pop(f, None)
520 self._map.copymap.pop(f, None)
520
521
521 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
522 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
522 if exists is None:
523 if exists is None:
523 exists = os.path.lexists(os.path.join(self._root, path))
524 exists = os.path.lexists(os.path.join(self._root, path))
524 if not exists:
525 if not exists:
525 # Maybe a path component exists
526 # Maybe a path component exists
526 if not ignoremissing and b'/' in path:
527 if not ignoremissing and b'/' in path:
527 d, f = path.rsplit(b'/', 1)
528 d, f = path.rsplit(b'/', 1)
528 d = self._normalize(d, False, ignoremissing, None)
529 d = self._normalize(d, False, ignoremissing, None)
529 folded = d + b"/" + f
530 folded = d + b"/" + f
530 else:
531 else:
531 # No path components, preserve original case
532 # No path components, preserve original case
532 folded = path
533 folded = path
533 else:
534 else:
534 # recursively normalize leading directory components
535 # recursively normalize leading directory components
535 # against dirstate
536 # against dirstate
536 if b'/' in normed:
537 if b'/' in normed:
537 d, f = normed.rsplit(b'/', 1)
538 d, f = normed.rsplit(b'/', 1)
538 d = self._normalize(d, False, ignoremissing, True)
539 d = self._normalize(d, False, ignoremissing, True)
539 r = self._root + b"/" + d
540 r = self._root + b"/" + d
540 folded = d + b"/" + util.fspath(f, r)
541 folded = d + b"/" + util.fspath(f, r)
541 else:
542 else:
542 folded = util.fspath(normed, self._root)
543 folded = util.fspath(normed, self._root)
543 storemap[normed] = folded
544 storemap[normed] = folded
544
545
545 return folded
546 return folded
546
547
547 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
548 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
548 normed = util.normcase(path)
549 normed = util.normcase(path)
549 folded = self._map.filefoldmap.get(normed, None)
550 folded = self._map.filefoldmap.get(normed, None)
550 if folded is None:
551 if folded is None:
551 if isknown:
552 if isknown:
552 folded = path
553 folded = path
553 else:
554 else:
554 folded = self._discoverpath(
555 folded = self._discoverpath(
555 path, normed, ignoremissing, exists, self._map.filefoldmap
556 path, normed, ignoremissing, exists, self._map.filefoldmap
556 )
557 )
557 return folded
558 return folded
558
559
559 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
560 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
560 normed = util.normcase(path)
561 normed = util.normcase(path)
561 folded = self._map.filefoldmap.get(normed, None)
562 folded = self._map.filefoldmap.get(normed, None)
562 if folded is None:
563 if folded is None:
563 folded = self._map.dirfoldmap.get(normed, None)
564 folded = self._map.dirfoldmap.get(normed, None)
564 if folded is None:
565 if folded is None:
565 if isknown:
566 if isknown:
566 folded = path
567 folded = path
567 else:
568 else:
568 # store discovered result in dirfoldmap so that future
569 # store discovered result in dirfoldmap so that future
569 # normalizefile calls don't start matching directories
570 # normalizefile calls don't start matching directories
570 folded = self._discoverpath(
571 folded = self._discoverpath(
571 path, normed, ignoremissing, exists, self._map.dirfoldmap
572 path, normed, ignoremissing, exists, self._map.dirfoldmap
572 )
573 )
573 return folded
574 return folded
574
575
575 def normalize(self, path, isknown=False, ignoremissing=False):
576 def normalize(self, path, isknown=False, ignoremissing=False):
576 '''
577 '''
577 normalize the case of a pathname when on a casefolding filesystem
578 normalize the case of a pathname when on a casefolding filesystem
578
579
579 isknown specifies whether the filename came from walking the
580 isknown specifies whether the filename came from walking the
580 disk, to avoid extra filesystem access.
581 disk, to avoid extra filesystem access.
581
582
582 If ignoremissing is True, missing path are returned
583 If ignoremissing is True, missing path are returned
583 unchanged. Otherwise, we try harder to normalize possibly
584 unchanged. Otherwise, we try harder to normalize possibly
584 existing path components.
585 existing path components.
585
586
586 The normalized case is determined based on the following precedence:
587 The normalized case is determined based on the following precedence:
587
588
588 - version of name already stored in the dirstate
589 - version of name already stored in the dirstate
589 - version of name stored on disk
590 - version of name stored on disk
590 - version provided via command arguments
591 - version provided via command arguments
591 '''
592 '''
592
593
593 if self._checkcase:
594 if self._checkcase:
594 return self._normalize(path, isknown, ignoremissing)
595 return self._normalize(path, isknown, ignoremissing)
595 return path
596 return path
596
597
597 def clear(self):
598 def clear(self):
598 self._map.clear()
599 self._map.clear()
599 self._lastnormaltime = 0
600 self._lastnormaltime = 0
600 self._updatedfiles.clear()
601 self._updatedfiles.clear()
601 self._dirty = True
602 self._dirty = True
602
603
603 def rebuild(self, parent, allfiles, changedfiles=None):
604 def rebuild(self, parent, allfiles, changedfiles=None):
604 if changedfiles is None:
605 if changedfiles is None:
605 # Rebuild entire dirstate
606 # Rebuild entire dirstate
606 to_lookup = allfiles
607 to_lookup = allfiles
607 to_drop = []
608 to_drop = []
608 lastnormaltime = self._lastnormaltime
609 lastnormaltime = self._lastnormaltime
609 self.clear()
610 self.clear()
610 self._lastnormaltime = lastnormaltime
611 self._lastnormaltime = lastnormaltime
611 elif len(changedfiles) < 10:
612 elif len(changedfiles) < 10:
612 # Avoid turning allfiles into a set, which can be expensive if it's
613 # Avoid turning allfiles into a set, which can be expensive if it's
613 # large.
614 # large.
614 to_lookup = []
615 to_lookup = []
615 to_drop = []
616 to_drop = []
616 for f in changedfiles:
617 for f in changedfiles:
617 if f in allfiles:
618 if f in allfiles:
618 to_lookup.append(f)
619 to_lookup.append(f)
619 else:
620 else:
620 to_drop.append(f)
621 to_drop.append(f)
621 else:
622 else:
622 changedfilesset = set(changedfiles)
623 changedfilesset = set(changedfiles)
623 to_lookup = changedfilesset & set(allfiles)
624 to_lookup = changedfilesset & set(allfiles)
624 to_drop = changedfilesset - to_lookup
625 to_drop = changedfilesset - to_lookup
625
626
626 if self._origpl is None:
627 if self._origpl is None:
627 self._origpl = self._pl
628 self._origpl = self._pl
628 self._map.setparents(parent, nullid)
629 self._map.setparents(parent, nullid)
629
630
630 for f in to_lookup:
631 for f in to_lookup:
631 self.normallookup(f)
632 self.normallookup(f)
632 for f in to_drop:
633 for f in to_drop:
633 self.drop(f)
634 self.drop(f)
634
635
635 self._dirty = True
636 self._dirty = True
636
637
637 def identity(self):
638 def identity(self):
638 '''Return identity of dirstate itself to detect changing in storage
639 '''Return identity of dirstate itself to detect changing in storage
639
640
640 If identity of previous dirstate is equal to this, writing
641 If identity of previous dirstate is equal to this, writing
641 changes based on the former dirstate out can keep consistency.
642 changes based on the former dirstate out can keep consistency.
642 '''
643 '''
643 return self._map.identity
644 return self._map.identity
644
645
645 def write(self, tr):
646 def write(self, tr):
646 if not self._dirty:
647 if not self._dirty:
647 return
648 return
648
649
649 filename = self._filename
650 filename = self._filename
650 if tr:
651 if tr:
651 # 'dirstate.write()' is not only for writing in-memory
652 # 'dirstate.write()' is not only for writing in-memory
652 # changes out, but also for dropping ambiguous timestamp.
653 # changes out, but also for dropping ambiguous timestamp.
653 # delayed writing re-raise "ambiguous timestamp issue".
654 # delayed writing re-raise "ambiguous timestamp issue".
654 # See also the wiki page below for detail:
655 # See also the wiki page below for detail:
655 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
656 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
656
657
657 # emulate dropping timestamp in 'parsers.pack_dirstate'
658 # emulate dropping timestamp in 'parsers.pack_dirstate'
658 now = _getfsnow(self._opener)
659 now = _getfsnow(self._opener)
659 self._map.clearambiguoustimes(self._updatedfiles, now)
660 self._map.clearambiguoustimes(self._updatedfiles, now)
660
661
661 # emulate that all 'dirstate.normal' results are written out
662 # emulate that all 'dirstate.normal' results are written out
662 self._lastnormaltime = 0
663 self._lastnormaltime = 0
663 self._updatedfiles.clear()
664 self._updatedfiles.clear()
664
665
665 # delay writing in-memory changes out
666 # delay writing in-memory changes out
666 tr.addfilegenerator(
667 tr.addfilegenerator(
667 b'dirstate',
668 b'dirstate',
668 (self._filename,),
669 (self._filename,),
669 self._writedirstate,
670 self._writedirstate,
670 location=b'plain',
671 location=b'plain',
671 )
672 )
672 return
673 return
673
674
674 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
675 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
675 self._writedirstate(st)
676 self._writedirstate(st)
676
677
677 def addparentchangecallback(self, category, callback):
678 def addparentchangecallback(self, category, callback):
678 """add a callback to be called when the wd parents are changed
679 """add a callback to be called when the wd parents are changed
679
680
680 Callback will be called with the following arguments:
681 Callback will be called with the following arguments:
681 dirstate, (oldp1, oldp2), (newp1, newp2)
682 dirstate, (oldp1, oldp2), (newp1, newp2)
682
683
683 Category is a unique identifier to allow overwriting an old callback
684 Category is a unique identifier to allow overwriting an old callback
684 with a newer callback.
685 with a newer callback.
685 """
686 """
686 self._plchangecallbacks[category] = callback
687 self._plchangecallbacks[category] = callback
687
688
688 def _writedirstate(self, st):
689 def _writedirstate(self, st):
689 # notify callbacks about parents change
690 # notify callbacks about parents change
690 if self._origpl is not None and self._origpl != self._pl:
691 if self._origpl is not None and self._origpl != self._pl:
691 for c, callback in sorted(
692 for c, callback in sorted(
692 pycompat.iteritems(self._plchangecallbacks)
693 pycompat.iteritems(self._plchangecallbacks)
693 ):
694 ):
694 callback(self, self._origpl, self._pl)
695 callback(self, self._origpl, self._pl)
695 self._origpl = None
696 self._origpl = None
696 # use the modification time of the newly created temporary file as the
697 # use the modification time of the newly created temporary file as the
697 # filesystem's notion of 'now'
698 # filesystem's notion of 'now'
698 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
699 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
699
700
700 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
701 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
701 # timestamp of each entries in dirstate, because of 'now > mtime'
702 # timestamp of each entries in dirstate, because of 'now > mtime'
702 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
703 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
703 if delaywrite > 0:
704 if delaywrite > 0:
704 # do we have any files to delay for?
705 # do we have any files to delay for?
705 for f, e in pycompat.iteritems(self._map):
706 for f, e in pycompat.iteritems(self._map):
706 if e[0] == b'n' and e[3] == now:
707 if e[0] == b'n' and e[3] == now:
707 import time # to avoid useless import
708 import time # to avoid useless import
708
709
709 # rather than sleep n seconds, sleep until the next
710 # rather than sleep n seconds, sleep until the next
710 # multiple of n seconds
711 # multiple of n seconds
711 clock = time.time()
712 clock = time.time()
712 start = int(clock) - (int(clock) % delaywrite)
713 start = int(clock) - (int(clock) % delaywrite)
713 end = start + delaywrite
714 end = start + delaywrite
714 time.sleep(end - clock)
715 time.sleep(end - clock)
715 now = end # trust our estimate that the end is near now
716 now = end # trust our estimate that the end is near now
716 break
717 break
717
718
718 self._map.write(st, now)
719 self._map.write(st, now)
719 self._lastnormaltime = 0
720 self._lastnormaltime = 0
720 self._dirty = False
721 self._dirty = False
721
722
722 def _dirignore(self, f):
723 def _dirignore(self, f):
723 if self._ignore(f):
724 if self._ignore(f):
724 return True
725 return True
725 for p in pathutil.finddirs(f):
726 for p in pathutil.finddirs(f):
726 if self._ignore(p):
727 if self._ignore(p):
727 return True
728 return True
728 return False
729 return False
729
730
730 def _ignorefiles(self):
731 def _ignorefiles(self):
731 files = []
732 files = []
732 if os.path.exists(self._join(b'.hgignore')):
733 if os.path.exists(self._join(b'.hgignore')):
733 files.append(self._join(b'.hgignore'))
734 files.append(self._join(b'.hgignore'))
734 for name, path in self._ui.configitems(b"ui"):
735 for name, path in self._ui.configitems(b"ui"):
735 if name == b'ignore' or name.startswith(b'ignore.'):
736 if name == b'ignore' or name.startswith(b'ignore.'):
736 # we need to use os.path.join here rather than self._join
737 # we need to use os.path.join here rather than self._join
737 # because path is arbitrary and user-specified
738 # because path is arbitrary and user-specified
738 files.append(os.path.join(self._rootdir, util.expandpath(path)))
739 files.append(os.path.join(self._rootdir, util.expandpath(path)))
739 return files
740 return files
740
741
741 def _ignorefileandline(self, f):
742 def _ignorefileandline(self, f):
742 files = collections.deque(self._ignorefiles())
743 files = collections.deque(self._ignorefiles())
743 visited = set()
744 visited = set()
744 while files:
745 while files:
745 i = files.popleft()
746 i = files.popleft()
746 patterns = matchmod.readpatternfile(
747 patterns = matchmod.readpatternfile(
747 i, self._ui.warn, sourceinfo=True
748 i, self._ui.warn, sourceinfo=True
748 )
749 )
749 for pattern, lineno, line in patterns:
750 for pattern, lineno, line in patterns:
750 kind, p = matchmod._patsplit(pattern, b'glob')
751 kind, p = matchmod._patsplit(pattern, b'glob')
751 if kind == b"subinclude":
752 if kind == b"subinclude":
752 if p not in visited:
753 if p not in visited:
753 files.append(p)
754 files.append(p)
754 continue
755 continue
755 m = matchmod.match(
756 m = matchmod.match(
756 self._root, b'', [], [pattern], warn=self._ui.warn
757 self._root, b'', [], [pattern], warn=self._ui.warn
757 )
758 )
758 if m(f):
759 if m(f):
759 return (i, lineno, line)
760 return (i, lineno, line)
760 visited.add(i)
761 visited.add(i)
761 return (None, -1, b"")
762 return (None, -1, b"")
762
763
763 def _walkexplicit(self, match, subrepos):
764 def _walkexplicit(self, match, subrepos):
764 '''Get stat data about the files explicitly specified by match.
765 '''Get stat data about the files explicitly specified by match.
765
766
766 Return a triple (results, dirsfound, dirsnotfound).
767 Return a triple (results, dirsfound, dirsnotfound).
767 - results is a mapping from filename to stat result. It also contains
768 - results is a mapping from filename to stat result. It also contains
768 listings mapping subrepos and .hg to None.
769 listings mapping subrepos and .hg to None.
769 - dirsfound is a list of files found to be directories.
770 - dirsfound is a list of files found to be directories.
770 - dirsnotfound is a list of files that the dirstate thinks are
771 - dirsnotfound is a list of files that the dirstate thinks are
771 directories and that were not found.'''
772 directories and that were not found.'''
772
773
773 def badtype(mode):
774 def badtype(mode):
774 kind = _(b'unknown')
775 kind = _(b'unknown')
775 if stat.S_ISCHR(mode):
776 if stat.S_ISCHR(mode):
776 kind = _(b'character device')
777 kind = _(b'character device')
777 elif stat.S_ISBLK(mode):
778 elif stat.S_ISBLK(mode):
778 kind = _(b'block device')
779 kind = _(b'block device')
779 elif stat.S_ISFIFO(mode):
780 elif stat.S_ISFIFO(mode):
780 kind = _(b'fifo')
781 kind = _(b'fifo')
781 elif stat.S_ISSOCK(mode):
782 elif stat.S_ISSOCK(mode):
782 kind = _(b'socket')
783 kind = _(b'socket')
783 elif stat.S_ISDIR(mode):
784 elif stat.S_ISDIR(mode):
784 kind = _(b'directory')
785 kind = _(b'directory')
785 return _(b'unsupported file type (type is %s)') % kind
786 return _(b'unsupported file type (type is %s)') % kind
786
787
787 badfn = match.bad
788 badfn = match.bad
788 dmap = self._map
789 dmap = self._map
789 lstat = os.lstat
790 lstat = os.lstat
790 getkind = stat.S_IFMT
791 getkind = stat.S_IFMT
791 dirkind = stat.S_IFDIR
792 dirkind = stat.S_IFDIR
792 regkind = stat.S_IFREG
793 regkind = stat.S_IFREG
793 lnkkind = stat.S_IFLNK
794 lnkkind = stat.S_IFLNK
794 join = self._join
795 join = self._join
795 dirsfound = []
796 dirsfound = []
796 foundadd = dirsfound.append
797 foundadd = dirsfound.append
797 dirsnotfound = []
798 dirsnotfound = []
798 notfoundadd = dirsnotfound.append
799 notfoundadd = dirsnotfound.append
799
800
800 if not match.isexact() and self._checkcase:
801 if not match.isexact() and self._checkcase:
801 normalize = self._normalize
802 normalize = self._normalize
802 else:
803 else:
803 normalize = None
804 normalize = None
804
805
805 files = sorted(match.files())
806 files = sorted(match.files())
806 subrepos.sort()
807 subrepos.sort()
807 i, j = 0, 0
808 i, j = 0, 0
808 while i < len(files) and j < len(subrepos):
809 while i < len(files) and j < len(subrepos):
809 subpath = subrepos[j] + b"/"
810 subpath = subrepos[j] + b"/"
810 if files[i] < subpath:
811 if files[i] < subpath:
811 i += 1
812 i += 1
812 continue
813 continue
813 while i < len(files) and files[i].startswith(subpath):
814 while i < len(files) and files[i].startswith(subpath):
814 del files[i]
815 del files[i]
815 j += 1
816 j += 1
816
817
817 if not files or b'' in files:
818 if not files or b'' in files:
818 files = [b'']
819 files = [b'']
819 # constructing the foldmap is expensive, so don't do it for the
820 # constructing the foldmap is expensive, so don't do it for the
820 # common case where files is ['']
821 # common case where files is ['']
821 normalize = None
822 normalize = None
822 results = dict.fromkeys(subrepos)
823 results = dict.fromkeys(subrepos)
823 results[b'.hg'] = None
824 results[b'.hg'] = None
824
825
825 for ff in files:
826 for ff in files:
826 if normalize:
827 if normalize:
827 nf = normalize(ff, False, True)
828 nf = normalize(ff, False, True)
828 else:
829 else:
829 nf = ff
830 nf = ff
830 if nf in results:
831 if nf in results:
831 continue
832 continue
832
833
833 try:
834 try:
834 st = lstat(join(nf))
835 st = lstat(join(nf))
835 kind = getkind(st.st_mode)
836 kind = getkind(st.st_mode)
836 if kind == dirkind:
837 if kind == dirkind:
837 if nf in dmap:
838 if nf in dmap:
838 # file replaced by dir on disk but still in dirstate
839 # file replaced by dir on disk but still in dirstate
839 results[nf] = None
840 results[nf] = None
840 foundadd((nf, ff))
841 foundadd((nf, ff))
841 elif kind == regkind or kind == lnkkind:
842 elif kind == regkind or kind == lnkkind:
842 results[nf] = st
843 results[nf] = st
843 else:
844 else:
844 badfn(ff, badtype(kind))
845 badfn(ff, badtype(kind))
845 if nf in dmap:
846 if nf in dmap:
846 results[nf] = None
847 results[nf] = None
847 except OSError as inst: # nf not found on disk - it is dirstate only
848 except OSError as inst: # nf not found on disk - it is dirstate only
848 if nf in dmap: # does it exactly match a missing file?
849 if nf in dmap: # does it exactly match a missing file?
849 results[nf] = None
850 results[nf] = None
850 else: # does it match a missing directory?
851 else: # does it match a missing directory?
851 if self._map.hasdir(nf):
852 if self._map.hasdir(nf):
852 notfoundadd(nf)
853 notfoundadd(nf)
853 else:
854 else:
854 badfn(ff, encoding.strtolocal(inst.strerror))
855 badfn(ff, encoding.strtolocal(inst.strerror))
855
856
856 # match.files() may contain explicitly-specified paths that shouldn't
857 # match.files() may contain explicitly-specified paths that shouldn't
857 # be taken; drop them from the list of files found. dirsfound/notfound
858 # be taken; drop them from the list of files found. dirsfound/notfound
858 # aren't filtered here because they will be tested later.
859 # aren't filtered here because they will be tested later.
859 if match.anypats():
860 if match.anypats():
860 for f in list(results):
861 for f in list(results):
861 if f == b'.hg' or f in subrepos:
862 if f == b'.hg' or f in subrepos:
862 # keep sentinel to disable further out-of-repo walks
863 # keep sentinel to disable further out-of-repo walks
863 continue
864 continue
864 if not match(f):
865 if not match(f):
865 del results[f]
866 del results[f]
866
867
867 # Case insensitive filesystems cannot rely on lstat() failing to detect
868 # Case insensitive filesystems cannot rely on lstat() failing to detect
868 # a case-only rename. Prune the stat object for any file that does not
869 # a case-only rename. Prune the stat object for any file that does not
869 # match the case in the filesystem, if there are multiple files that
870 # match the case in the filesystem, if there are multiple files that
870 # normalize to the same path.
871 # normalize to the same path.
871 if match.isexact() and self._checkcase:
872 if match.isexact() and self._checkcase:
872 normed = {}
873 normed = {}
873
874
874 for f, st in pycompat.iteritems(results):
875 for f, st in pycompat.iteritems(results):
875 if st is None:
876 if st is None:
876 continue
877 continue
877
878
878 nc = util.normcase(f)
879 nc = util.normcase(f)
879 paths = normed.get(nc)
880 paths = normed.get(nc)
880
881
881 if paths is None:
882 if paths is None:
882 paths = set()
883 paths = set()
883 normed[nc] = paths
884 normed[nc] = paths
884
885
885 paths.add(f)
886 paths.add(f)
886
887
887 for norm, paths in pycompat.iteritems(normed):
888 for norm, paths in pycompat.iteritems(normed):
888 if len(paths) > 1:
889 if len(paths) > 1:
889 for path in paths:
890 for path in paths:
890 folded = self._discoverpath(
891 folded = self._discoverpath(
891 path, norm, True, None, self._map.dirfoldmap
892 path, norm, True, None, self._map.dirfoldmap
892 )
893 )
893 if path != folded:
894 if path != folded:
894 results[path] = None
895 results[path] = None
895
896
896 return results, dirsfound, dirsnotfound
897 return results, dirsfound, dirsnotfound
897
898
898 def walk(self, match, subrepos, unknown, ignored, full=True):
899 def walk(self, match, subrepos, unknown, ignored, full=True):
899 '''
900 '''
900 Walk recursively through the directory tree, finding all files
901 Walk recursively through the directory tree, finding all files
901 matched by match.
902 matched by match.
902
903
903 If full is False, maybe skip some known-clean files.
904 If full is False, maybe skip some known-clean files.
904
905
905 Return a dict mapping filename to stat-like object (either
906 Return a dict mapping filename to stat-like object (either
906 mercurial.osutil.stat instance or return value of os.stat()).
907 mercurial.osutil.stat instance or return value of os.stat()).
907
908
908 '''
909 '''
909 # full is a flag that extensions that hook into walk can use -- this
910 # full is a flag that extensions that hook into walk can use -- this
910 # implementation doesn't use it at all. This satisfies the contract
911 # implementation doesn't use it at all. This satisfies the contract
911 # because we only guarantee a "maybe".
912 # because we only guarantee a "maybe".
912
913
913 if ignored:
914 if ignored:
914 ignore = util.never
915 ignore = util.never
915 dirignore = util.never
916 dirignore = util.never
916 elif unknown:
917 elif unknown:
917 ignore = self._ignore
918 ignore = self._ignore
918 dirignore = self._dirignore
919 dirignore = self._dirignore
919 else:
920 else:
920 # if not unknown and not ignored, drop dir recursion and step 2
921 # if not unknown and not ignored, drop dir recursion and step 2
921 ignore = util.always
922 ignore = util.always
922 dirignore = util.always
923 dirignore = util.always
923
924
924 matchfn = match.matchfn
925 matchfn = match.matchfn
925 matchalways = match.always()
926 matchalways = match.always()
926 matchtdir = match.traversedir
927 matchtdir = match.traversedir
927 dmap = self._map
928 dmap = self._map
928 listdir = util.listdir
929 listdir = util.listdir
929 lstat = os.lstat
930 lstat = os.lstat
930 dirkind = stat.S_IFDIR
931 dirkind = stat.S_IFDIR
931 regkind = stat.S_IFREG
932 regkind = stat.S_IFREG
932 lnkkind = stat.S_IFLNK
933 lnkkind = stat.S_IFLNK
933 join = self._join
934 join = self._join
934
935
935 exact = skipstep3 = False
936 exact = skipstep3 = False
936 if match.isexact(): # match.exact
937 if match.isexact(): # match.exact
937 exact = True
938 exact = True
938 dirignore = util.always # skip step 2
939 dirignore = util.always # skip step 2
939 elif match.prefix(): # match.match, no patterns
940 elif match.prefix(): # match.match, no patterns
940 skipstep3 = True
941 skipstep3 = True
941
942
942 if not exact and self._checkcase:
943 if not exact and self._checkcase:
943 normalize = self._normalize
944 normalize = self._normalize
944 normalizefile = self._normalizefile
945 normalizefile = self._normalizefile
945 skipstep3 = False
946 skipstep3 = False
946 else:
947 else:
947 normalize = self._normalize
948 normalize = self._normalize
948 normalizefile = None
949 normalizefile = None
949
950
950 # step 1: find all explicit files
951 # step 1: find all explicit files
951 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
952 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
952 if matchtdir:
953 if matchtdir:
953 for d in work:
954 for d in work:
954 matchtdir(d[0])
955 matchtdir(d[0])
955 for d in dirsnotfound:
956 for d in dirsnotfound:
956 matchtdir(d)
957 matchtdir(d)
957
958
958 skipstep3 = skipstep3 and not (work or dirsnotfound)
959 skipstep3 = skipstep3 and not (work or dirsnotfound)
959 work = [d for d in work if not dirignore(d[0])]
960 work = [d for d in work if not dirignore(d[0])]
960
961
961 # step 2: visit subdirectories
962 # step 2: visit subdirectories
962 def traverse(work, alreadynormed):
963 def traverse(work, alreadynormed):
963 wadd = work.append
964 wadd = work.append
964 while work:
965 while work:
965 tracing.counter('dirstate.walk work', len(work))
966 tracing.counter('dirstate.walk work', len(work))
966 nd = work.pop()
967 nd = work.pop()
967 visitentries = match.visitchildrenset(nd)
968 visitentries = match.visitchildrenset(nd)
968 if not visitentries:
969 if not visitentries:
969 continue
970 continue
970 if visitentries == b'this' or visitentries == b'all':
971 if visitentries == b'this' or visitentries == b'all':
971 visitentries = None
972 visitentries = None
972 skip = None
973 skip = None
973 if nd != b'':
974 if nd != b'':
974 skip = b'.hg'
975 skip = b'.hg'
975 try:
976 try:
976 with tracing.log('dirstate.walk.traverse listdir %s', nd):
977 with tracing.log('dirstate.walk.traverse listdir %s', nd):
977 entries = listdir(join(nd), stat=True, skip=skip)
978 entries = listdir(join(nd), stat=True, skip=skip)
978 except OSError as inst:
979 except OSError as inst:
979 if inst.errno in (errno.EACCES, errno.ENOENT):
980 if inst.errno in (errno.EACCES, errno.ENOENT):
980 match.bad(
981 match.bad(
981 self.pathto(nd), encoding.strtolocal(inst.strerror)
982 self.pathto(nd), encoding.strtolocal(inst.strerror)
982 )
983 )
983 continue
984 continue
984 raise
985 raise
985 for f, kind, st in entries:
986 for f, kind, st in entries:
986 # Some matchers may return files in the visitentries set,
987 # Some matchers may return files in the visitentries set,
987 # instead of 'this', if the matcher explicitly mentions them
988 # instead of 'this', if the matcher explicitly mentions them
988 # and is not an exactmatcher. This is acceptable; we do not
989 # and is not an exactmatcher. This is acceptable; we do not
989 # make any hard assumptions about file-or-directory below
990 # make any hard assumptions about file-or-directory below
990 # based on the presence of `f` in visitentries. If
991 # based on the presence of `f` in visitentries. If
991 # visitchildrenset returned a set, we can always skip the
992 # visitchildrenset returned a set, we can always skip the
992 # entries *not* in the set it provided regardless of whether
993 # entries *not* in the set it provided regardless of whether
993 # they're actually a file or a directory.
994 # they're actually a file or a directory.
994 if visitentries and f not in visitentries:
995 if visitentries and f not in visitentries:
995 continue
996 continue
996 if normalizefile:
997 if normalizefile:
997 # even though f might be a directory, we're only
998 # even though f might be a directory, we're only
998 # interested in comparing it to files currently in the
999 # interested in comparing it to files currently in the
999 # dmap -- therefore normalizefile is enough
1000 # dmap -- therefore normalizefile is enough
1000 nf = normalizefile(
1001 nf = normalizefile(
1001 nd and (nd + b"/" + f) or f, True, True
1002 nd and (nd + b"/" + f) or f, True, True
1002 )
1003 )
1003 else:
1004 else:
1004 nf = nd and (nd + b"/" + f) or f
1005 nf = nd and (nd + b"/" + f) or f
1005 if nf not in results:
1006 if nf not in results:
1006 if kind == dirkind:
1007 if kind == dirkind:
1007 if not ignore(nf):
1008 if not ignore(nf):
1008 if matchtdir:
1009 if matchtdir:
1009 matchtdir(nf)
1010 matchtdir(nf)
1010 wadd(nf)
1011 wadd(nf)
1011 if nf in dmap and (matchalways or matchfn(nf)):
1012 if nf in dmap and (matchalways or matchfn(nf)):
1012 results[nf] = None
1013 results[nf] = None
1013 elif kind == regkind or kind == lnkkind:
1014 elif kind == regkind or kind == lnkkind:
1014 if nf in dmap:
1015 if nf in dmap:
1015 if matchalways or matchfn(nf):
1016 if matchalways or matchfn(nf):
1016 results[nf] = st
1017 results[nf] = st
1017 elif (matchalways or matchfn(nf)) and not ignore(
1018 elif (matchalways or matchfn(nf)) and not ignore(
1018 nf
1019 nf
1019 ):
1020 ):
1020 # unknown file -- normalize if necessary
1021 # unknown file -- normalize if necessary
1021 if not alreadynormed:
1022 if not alreadynormed:
1022 nf = normalize(nf, False, True)
1023 nf = normalize(nf, False, True)
1023 results[nf] = st
1024 results[nf] = st
1024 elif nf in dmap and (matchalways or matchfn(nf)):
1025 elif nf in dmap and (matchalways or matchfn(nf)):
1025 results[nf] = None
1026 results[nf] = None
1026
1027
1027 for nd, d in work:
1028 for nd, d in work:
1028 # alreadynormed means that processwork doesn't have to do any
1029 # alreadynormed means that processwork doesn't have to do any
1029 # expensive directory normalization
1030 # expensive directory normalization
1030 alreadynormed = not normalize or nd == d
1031 alreadynormed = not normalize or nd == d
1031 traverse([d], alreadynormed)
1032 traverse([d], alreadynormed)
1032
1033
1033 for s in subrepos:
1034 for s in subrepos:
1034 del results[s]
1035 del results[s]
1035 del results[b'.hg']
1036 del results[b'.hg']
1036
1037
1037 # step 3: visit remaining files from dmap
1038 # step 3: visit remaining files from dmap
1038 if not skipstep3 and not exact:
1039 if not skipstep3 and not exact:
1039 # If a dmap file is not in results yet, it was either
1040 # If a dmap file is not in results yet, it was either
1040 # a) not matching matchfn b) ignored, c) missing, or d) under a
1041 # a) not matching matchfn b) ignored, c) missing, or d) under a
1041 # symlink directory.
1042 # symlink directory.
1042 if not results and matchalways:
1043 if not results and matchalways:
1043 visit = [f for f in dmap]
1044 visit = [f for f in dmap]
1044 else:
1045 else:
1045 visit = [f for f in dmap if f not in results and matchfn(f)]
1046 visit = [f for f in dmap if f not in results and matchfn(f)]
1046 visit.sort()
1047 visit.sort()
1047
1048
1048 if unknown:
1049 if unknown:
1049 # unknown == True means we walked all dirs under the roots
1050 # unknown == True means we walked all dirs under the roots
1050 # that wasn't ignored, and everything that matched was stat'ed
1051 # that wasn't ignored, and everything that matched was stat'ed
1051 # and is already in results.
1052 # and is already in results.
1052 # The rest must thus be ignored or under a symlink.
1053 # The rest must thus be ignored or under a symlink.
1053 audit_path = pathutil.pathauditor(self._root, cached=True)
1054 audit_path = pathutil.pathauditor(self._root, cached=True)
1054
1055
1055 for nf in iter(visit):
1056 for nf in iter(visit):
1056 # If a stat for the same file was already added with a
1057 # If a stat for the same file was already added with a
1057 # different case, don't add one for this, since that would
1058 # different case, don't add one for this, since that would
1058 # make it appear as if the file exists under both names
1059 # make it appear as if the file exists under both names
1059 # on disk.
1060 # on disk.
1060 if (
1061 if (
1061 normalizefile
1062 normalizefile
1062 and normalizefile(nf, True, True) in results
1063 and normalizefile(nf, True, True) in results
1063 ):
1064 ):
1064 results[nf] = None
1065 results[nf] = None
1065 # Report ignored items in the dmap as long as they are not
1066 # Report ignored items in the dmap as long as they are not
1066 # under a symlink directory.
1067 # under a symlink directory.
1067 elif audit_path.check(nf):
1068 elif audit_path.check(nf):
1068 try:
1069 try:
1069 results[nf] = lstat(join(nf))
1070 results[nf] = lstat(join(nf))
1070 # file was just ignored, no links, and exists
1071 # file was just ignored, no links, and exists
1071 except OSError:
1072 except OSError:
1072 # file doesn't exist
1073 # file doesn't exist
1073 results[nf] = None
1074 results[nf] = None
1074 else:
1075 else:
1075 # It's either missing or under a symlink directory
1076 # It's either missing or under a symlink directory
1076 # which we in this case report as missing
1077 # which we in this case report as missing
1077 results[nf] = None
1078 results[nf] = None
1078 else:
1079 else:
1079 # We may not have walked the full directory tree above,
1080 # We may not have walked the full directory tree above,
1080 # so stat and check everything we missed.
1081 # so stat and check everything we missed.
1081 iv = iter(visit)
1082 iv = iter(visit)
1082 for st in util.statfiles([join(i) for i in visit]):
1083 for st in util.statfiles([join(i) for i in visit]):
1083 results[next(iv)] = st
1084 results[next(iv)] = st
1084 return results
1085 return results
1085
1086
1086 def _rust_status(self, matcher, list_clean):
1087 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1087 # Force Rayon (Rust parallelism library) to respect the number of
1088 # Force Rayon (Rust parallelism library) to respect the number of
1088 # workers. This is a temporary workaround until Rust code knows
1089 # workers. This is a temporary workaround until Rust code knows
1089 # how to read the config file.
1090 # how to read the config file.
1090 numcpus = self._ui.configint(b"worker", b"numcpus")
1091 numcpus = self._ui.configint(b"worker", b"numcpus")
1091 if numcpus is not None:
1092 if numcpus is not None:
1092 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1093 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1093
1094
1094 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1095 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1095 if not workers_enabled:
1096 if not workers_enabled:
1096 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1097 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1097
1098
1098 (
1099 (
1099 lookup,
1100 lookup,
1100 modified,
1101 modified,
1101 added,
1102 added,
1102 removed,
1103 removed,
1103 deleted,
1104 deleted,
1105 clean,
1106 ignored,
1104 unknown,
1107 unknown,
1105 clean,
1108 warnings,
1109 bad,
1106 ) = rustmod.status(
1110 ) = rustmod.status(
1107 self._map._rustmap,
1111 self._map._rustmap,
1108 matcher,
1112 matcher,
1109 self._rootdir,
1113 self._rootdir,
1110 bool(list_clean),
1114 self._ignorefiles(),
1115 self._checkexec,
1111 self._lastnormaltime,
1116 self._lastnormaltime,
1112 self._checkexec,
1117 bool(list_clean),
1118 bool(list_ignored),
1119 bool(list_unknown),
1113 )
1120 )
1121 if self._ui.warn:
1122 for item in warnings:
1123 if isinstance(item, tuple):
1124 file_path, syntax = item
1125 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1126 file_path,
1127 syntax,
1128 )
1129 self._ui.warn(msg)
1130 else:
1131 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1132 self._ui.warn(
1133 msg
1134 % (
1135 pathutil.canonpath(
1136 self._rootdir, self._rootdir, item
1137 ),
1138 b"No such file or directory",
1139 )
1140 )
1141
1142 for (fn, message) in bad:
1143 matcher.bad(fn, encoding.strtolocal(message))
1114
1144
1115 status = scmutil.status(
1145 status = scmutil.status(
1116 modified=modified,
1146 modified=modified,
1117 added=added,
1147 added=added,
1118 removed=removed,
1148 removed=removed,
1119 deleted=deleted,
1149 deleted=deleted,
1120 unknown=unknown,
1150 unknown=unknown,
1121 ignored=[],
1151 ignored=ignored,
1122 clean=clean,
1152 clean=clean,
1123 )
1153 )
1124 return (lookup, status)
1154 return (lookup, status)
1125
1155
1126 def status(self, match, subrepos, ignored, clean, unknown):
1156 def status(self, match, subrepos, ignored, clean, unknown):
1127 '''Determine the status of the working copy relative to the
1157 '''Determine the status of the working copy relative to the
1128 dirstate and return a pair of (unsure, status), where status is of type
1158 dirstate and return a pair of (unsure, status), where status is of type
1129 scmutil.status and:
1159 scmutil.status and:
1130
1160
1131 unsure:
1161 unsure:
1132 files that might have been modified since the dirstate was
1162 files that might have been modified since the dirstate was
1133 written, but need to be read to be sure (size is the same
1163 written, but need to be read to be sure (size is the same
1134 but mtime differs)
1164 but mtime differs)
1135 status.modified:
1165 status.modified:
1136 files that have definitely been modified since the dirstate
1166 files that have definitely been modified since the dirstate
1137 was written (different size or mode)
1167 was written (different size or mode)
1138 status.clean:
1168 status.clean:
1139 files that have definitely not been modified since the
1169 files that have definitely not been modified since the
1140 dirstate was written
1170 dirstate was written
1141 '''
1171 '''
1142 listignored, listclean, listunknown = ignored, clean, unknown
1172 listignored, listclean, listunknown = ignored, clean, unknown
1143 lookup, modified, added, unknown, ignored = [], [], [], [], []
1173 lookup, modified, added, unknown, ignored = [], [], [], [], []
1144 removed, deleted, clean = [], [], []
1174 removed, deleted, clean = [], [], []
1145
1175
1146 dmap = self._map
1176 dmap = self._map
1147 dmap.preload()
1177 dmap.preload()
1148
1178
1149 use_rust = True
1179 use_rust = True
1150
1180
1151 allowed_matchers = (matchmod.alwaysmatcher, matchmod.exactmatcher)
1181 allowed_matchers = (
1182 matchmod.alwaysmatcher,
1183 matchmod.exactmatcher,
1184 matchmod.includematcher,
1185 )
1152
1186
1153 if rustmod is None:
1187 if rustmod is None:
1154 use_rust = False
1188 use_rust = False
1189 elif self._checkcase:
1190 # Case-insensitive filesystems are not handled yet
1191 use_rust = False
1155 elif subrepos:
1192 elif subrepos:
1156 use_rust = False
1193 use_rust = False
1157 elif bool(listunknown):
1194 elif sparse.enabled:
1158 # Pathauditor does not exist yet in Rust, unknown files
1159 # can't be trusted.
1160 use_rust = False
1195 use_rust = False
1161 elif self._ignorefiles() and listignored:
1196 elif match.traversedir is not None:
1162 # Rust has no ignore mechanism yet, so don't use Rust for
1163 # commands that need ignore.
1164 use_rust = False
1197 use_rust = False
1165 elif not isinstance(match, allowed_matchers):
1198 elif not isinstance(match, allowed_matchers):
1166 # Matchers have yet to be implemented
1199 # Matchers have yet to be implemented
1167 use_rust = False
1200 use_rust = False
1168
1201
1169 if use_rust:
1202 if use_rust:
1170 return self._rust_status(match, listclean)
1203 try:
1204 return self._rust_status(
1205 match, listclean, listignored, listunknown
1206 )
1207 except rustmod.FallbackError:
1208 pass
1171
1209
1172 def noop(f):
1210 def noop(f):
1173 pass
1211 pass
1174
1212
1175 dcontains = dmap.__contains__
1213 dcontains = dmap.__contains__
1176 dget = dmap.__getitem__
1214 dget = dmap.__getitem__
1177 ladd = lookup.append # aka "unsure"
1215 ladd = lookup.append # aka "unsure"
1178 madd = modified.append
1216 madd = modified.append
1179 aadd = added.append
1217 aadd = added.append
1180 uadd = unknown.append if listunknown else noop
1218 uadd = unknown.append if listunknown else noop
1181 iadd = ignored.append if listignored else noop
1219 iadd = ignored.append if listignored else noop
1182 radd = removed.append
1220 radd = removed.append
1183 dadd = deleted.append
1221 dadd = deleted.append
1184 cadd = clean.append if listclean else noop
1222 cadd = clean.append if listclean else noop
1185 mexact = match.exact
1223 mexact = match.exact
1186 dirignore = self._dirignore
1224 dirignore = self._dirignore
1187 checkexec = self._checkexec
1225 checkexec = self._checkexec
1188 copymap = self._map.copymap
1226 copymap = self._map.copymap
1189 lastnormaltime = self._lastnormaltime
1227 lastnormaltime = self._lastnormaltime
1190
1228
1191 # We need to do full walks when either
1229 # We need to do full walks when either
1192 # - we're listing all clean files, or
1230 # - we're listing all clean files, or
1193 # - match.traversedir does something, because match.traversedir should
1231 # - match.traversedir does something, because match.traversedir should
1194 # be called for every dir in the working dir
1232 # be called for every dir in the working dir
1195 full = listclean or match.traversedir is not None
1233 full = listclean or match.traversedir is not None
1196 for fn, st in pycompat.iteritems(
1234 for fn, st in pycompat.iteritems(
1197 self.walk(match, subrepos, listunknown, listignored, full=full)
1235 self.walk(match, subrepos, listunknown, listignored, full=full)
1198 ):
1236 ):
1199 if not dcontains(fn):
1237 if not dcontains(fn):
1200 if (listignored or mexact(fn)) and dirignore(fn):
1238 if (listignored or mexact(fn)) and dirignore(fn):
1201 if listignored:
1239 if listignored:
1202 iadd(fn)
1240 iadd(fn)
1203 else:
1241 else:
1204 uadd(fn)
1242 uadd(fn)
1205 continue
1243 continue
1206
1244
1207 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1245 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1208 # written like that for performance reasons. dmap[fn] is not a
1246 # written like that for performance reasons. dmap[fn] is not a
1209 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1247 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1210 # opcode has fast paths when the value to be unpacked is a tuple or
1248 # opcode has fast paths when the value to be unpacked is a tuple or
1211 # a list, but falls back to creating a full-fledged iterator in
1249 # a list, but falls back to creating a full-fledged iterator in
1212 # general. That is much slower than simply accessing and storing the
1250 # general. That is much slower than simply accessing and storing the
1213 # tuple members one by one.
1251 # tuple members one by one.
1214 t = dget(fn)
1252 t = dget(fn)
1215 state = t[0]
1253 state = t[0]
1216 mode = t[1]
1254 mode = t[1]
1217 size = t[2]
1255 size = t[2]
1218 time = t[3]
1256 time = t[3]
1219
1257
1220 if not st and state in b"nma":
1258 if not st and state in b"nma":
1221 dadd(fn)
1259 dadd(fn)
1222 elif state == b'n':
1260 elif state == b'n':
1223 if (
1261 if (
1224 size >= 0
1262 size >= 0
1225 and (
1263 and (
1226 (size != st.st_size and size != st.st_size & _rangemask)
1264 (size != st.st_size and size != st.st_size & _rangemask)
1227 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1265 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1228 )
1266 )
1229 or size == -2 # other parent
1267 or size == -2 # other parent
1230 or fn in copymap
1268 or fn in copymap
1231 ):
1269 ):
1232 madd(fn)
1270 madd(fn)
1233 elif (
1271 elif (
1234 time != st[stat.ST_MTIME]
1272 time != st[stat.ST_MTIME]
1235 and time != st[stat.ST_MTIME] & _rangemask
1273 and time != st[stat.ST_MTIME] & _rangemask
1236 ):
1274 ):
1237 ladd(fn)
1275 ladd(fn)
1238 elif st[stat.ST_MTIME] == lastnormaltime:
1276 elif st[stat.ST_MTIME] == lastnormaltime:
1239 # fn may have just been marked as normal and it may have
1277 # fn may have just been marked as normal and it may have
1240 # changed in the same second without changing its size.
1278 # changed in the same second without changing its size.
1241 # This can happen if we quickly do multiple commits.
1279 # This can happen if we quickly do multiple commits.
1242 # Force lookup, so we don't miss such a racy file change.
1280 # Force lookup, so we don't miss such a racy file change.
1243 ladd(fn)
1281 ladd(fn)
1244 elif listclean:
1282 elif listclean:
1245 cadd(fn)
1283 cadd(fn)
1246 elif state == b'm':
1284 elif state == b'm':
1247 madd(fn)
1285 madd(fn)
1248 elif state == b'a':
1286 elif state == b'a':
1249 aadd(fn)
1287 aadd(fn)
1250 elif state == b'r':
1288 elif state == b'r':
1251 radd(fn)
1289 radd(fn)
1252
1290 status = scmutil.status(
1253 return (
1291 modified, added, removed, deleted, unknown, ignored, clean
1254 lookup,
1255 scmutil.status(
1256 modified, added, removed, deleted, unknown, ignored, clean
1257 ),
1258 )
1292 )
1293 return (lookup, status)
1259
1294
1260 def matches(self, match):
1295 def matches(self, match):
1261 '''
1296 '''
1262 return files in the dirstate (in whatever state) filtered by match
1297 return files in the dirstate (in whatever state) filtered by match
1263 '''
1298 '''
1264 dmap = self._map
1299 dmap = self._map
1265 if rustmod is not None:
1300 if rustmod is not None:
1266 dmap = self._map._rustmap
1301 dmap = self._map._rustmap
1267
1302
1268 if match.always():
1303 if match.always():
1269 return dmap.keys()
1304 return dmap.keys()
1270 files = match.files()
1305 files = match.files()
1271 if match.isexact():
1306 if match.isexact():
1272 # fast path -- filter the other way around, since typically files is
1307 # fast path -- filter the other way around, since typically files is
1273 # much smaller than dmap
1308 # much smaller than dmap
1274 return [f for f in files if f in dmap]
1309 return [f for f in files if f in dmap]
1275 if match.prefix() and all(fn in dmap for fn in files):
1310 if match.prefix() and all(fn in dmap for fn in files):
1276 # fast path -- all the values are known to be files, so just return
1311 # fast path -- all the values are known to be files, so just return
1277 # that
1312 # that
1278 return list(files)
1313 return list(files)
1279 return [f for f in dmap if match(f)]
1314 return [f for f in dmap if match(f)]
1280
1315
1281 def _actualfilename(self, tr):
1316 def _actualfilename(self, tr):
1282 if tr:
1317 if tr:
1283 return self._pendingfilename
1318 return self._pendingfilename
1284 else:
1319 else:
1285 return self._filename
1320 return self._filename
1286
1321
1287 def savebackup(self, tr, backupname):
1322 def savebackup(self, tr, backupname):
1288 '''Save current dirstate into backup file'''
1323 '''Save current dirstate into backup file'''
1289 filename = self._actualfilename(tr)
1324 filename = self._actualfilename(tr)
1290 assert backupname != filename
1325 assert backupname != filename
1291
1326
1292 # use '_writedirstate' instead of 'write' to write changes certainly,
1327 # use '_writedirstate' instead of 'write' to write changes certainly,
1293 # because the latter omits writing out if transaction is running.
1328 # because the latter omits writing out if transaction is running.
1294 # output file will be used to create backup of dirstate at this point.
1329 # output file will be used to create backup of dirstate at this point.
1295 if self._dirty or not self._opener.exists(filename):
1330 if self._dirty or not self._opener.exists(filename):
1296 self._writedirstate(
1331 self._writedirstate(
1297 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1332 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1298 )
1333 )
1299
1334
1300 if tr:
1335 if tr:
1301 # ensure that subsequent tr.writepending returns True for
1336 # ensure that subsequent tr.writepending returns True for
1302 # changes written out above, even if dirstate is never
1337 # changes written out above, even if dirstate is never
1303 # changed after this
1338 # changed after this
1304 tr.addfilegenerator(
1339 tr.addfilegenerator(
1305 b'dirstate',
1340 b'dirstate',
1306 (self._filename,),
1341 (self._filename,),
1307 self._writedirstate,
1342 self._writedirstate,
1308 location=b'plain',
1343 location=b'plain',
1309 )
1344 )
1310
1345
1311 # ensure that pending file written above is unlinked at
1346 # ensure that pending file written above is unlinked at
1312 # failure, even if tr.writepending isn't invoked until the
1347 # failure, even if tr.writepending isn't invoked until the
1313 # end of this transaction
1348 # end of this transaction
1314 tr.registertmp(filename, location=b'plain')
1349 tr.registertmp(filename, location=b'plain')
1315
1350
1316 self._opener.tryunlink(backupname)
1351 self._opener.tryunlink(backupname)
1317 # hardlink backup is okay because _writedirstate is always called
1352 # hardlink backup is okay because _writedirstate is always called
1318 # with an "atomictemp=True" file.
1353 # with an "atomictemp=True" file.
1319 util.copyfile(
1354 util.copyfile(
1320 self._opener.join(filename),
1355 self._opener.join(filename),
1321 self._opener.join(backupname),
1356 self._opener.join(backupname),
1322 hardlink=True,
1357 hardlink=True,
1323 )
1358 )
1324
1359
1325 def restorebackup(self, tr, backupname):
1360 def restorebackup(self, tr, backupname):
1326 '''Restore dirstate by backup file'''
1361 '''Restore dirstate by backup file'''
1327 # this "invalidate()" prevents "wlock.release()" from writing
1362 # this "invalidate()" prevents "wlock.release()" from writing
1328 # changes of dirstate out after restoring from backup file
1363 # changes of dirstate out after restoring from backup file
1329 self.invalidate()
1364 self.invalidate()
1330 filename = self._actualfilename(tr)
1365 filename = self._actualfilename(tr)
1331 o = self._opener
1366 o = self._opener
1332 if util.samefile(o.join(backupname), o.join(filename)):
1367 if util.samefile(o.join(backupname), o.join(filename)):
1333 o.unlink(backupname)
1368 o.unlink(backupname)
1334 else:
1369 else:
1335 o.rename(backupname, filename, checkambig=True)
1370 o.rename(backupname, filename, checkambig=True)
1336
1371
1337 def clearbackup(self, tr, backupname):
1372 def clearbackup(self, tr, backupname):
1338 '''Clear backup file'''
1373 '''Clear backup file'''
1339 self._opener.unlink(backupname)
1374 self._opener.unlink(backupname)
1340
1375
1341
1376
1342 class dirstatemap(object):
1377 class dirstatemap(object):
1343 """Map encapsulating the dirstate's contents.
1378 """Map encapsulating the dirstate's contents.
1344
1379
1345 The dirstate contains the following state:
1380 The dirstate contains the following state:
1346
1381
1347 - `identity` is the identity of the dirstate file, which can be used to
1382 - `identity` is the identity of the dirstate file, which can be used to
1348 detect when changes have occurred to the dirstate file.
1383 detect when changes have occurred to the dirstate file.
1349
1384
1350 - `parents` is a pair containing the parents of the working copy. The
1385 - `parents` is a pair containing the parents of the working copy. The
1351 parents are updated by calling `setparents`.
1386 parents are updated by calling `setparents`.
1352
1387
1353 - the state map maps filenames to tuples of (state, mode, size, mtime),
1388 - the state map maps filenames to tuples of (state, mode, size, mtime),
1354 where state is a single character representing 'normal', 'added',
1389 where state is a single character representing 'normal', 'added',
1355 'removed', or 'merged'. It is read by treating the dirstate as a
1390 'removed', or 'merged'. It is read by treating the dirstate as a
1356 dict. File state is updated by calling the `addfile`, `removefile` and
1391 dict. File state is updated by calling the `addfile`, `removefile` and
1357 `dropfile` methods.
1392 `dropfile` methods.
1358
1393
1359 - `copymap` maps destination filenames to their source filename.
1394 - `copymap` maps destination filenames to their source filename.
1360
1395
1361 The dirstate also provides the following views onto the state:
1396 The dirstate also provides the following views onto the state:
1362
1397
1363 - `nonnormalset` is a set of the filenames that have state other
1398 - `nonnormalset` is a set of the filenames that have state other
1364 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1399 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1365
1400
1366 - `otherparentset` is a set of the filenames that are marked as coming
1401 - `otherparentset` is a set of the filenames that are marked as coming
1367 from the second parent when the dirstate is currently being merged.
1402 from the second parent when the dirstate is currently being merged.
1368
1403
1369 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1404 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1370 form that they appear as in the dirstate.
1405 form that they appear as in the dirstate.
1371
1406
1372 - `dirfoldmap` is a dict mapping normalized directory names to the
1407 - `dirfoldmap` is a dict mapping normalized directory names to the
1373 denormalized form that they appear as in the dirstate.
1408 denormalized form that they appear as in the dirstate.
1374 """
1409 """
1375
1410
1376 def __init__(self, ui, opener, root):
1411 def __init__(self, ui, opener, root):
1377 self._ui = ui
1412 self._ui = ui
1378 self._opener = opener
1413 self._opener = opener
1379 self._root = root
1414 self._root = root
1380 self._filename = b'dirstate'
1415 self._filename = b'dirstate'
1381
1416
1382 self._parents = None
1417 self._parents = None
1383 self._dirtyparents = False
1418 self._dirtyparents = False
1384
1419
1385 # for consistent view between _pl() and _read() invocations
1420 # for consistent view between _pl() and _read() invocations
1386 self._pendingmode = None
1421 self._pendingmode = None
1387
1422
1388 @propertycache
1423 @propertycache
1389 def _map(self):
1424 def _map(self):
1390 self._map = {}
1425 self._map = {}
1391 self.read()
1426 self.read()
1392 return self._map
1427 return self._map
1393
1428
1394 @propertycache
1429 @propertycache
1395 def copymap(self):
1430 def copymap(self):
1396 self.copymap = {}
1431 self.copymap = {}
1397 self._map
1432 self._map
1398 return self.copymap
1433 return self.copymap
1399
1434
1400 def clear(self):
1435 def clear(self):
1401 self._map.clear()
1436 self._map.clear()
1402 self.copymap.clear()
1437 self.copymap.clear()
1403 self.setparents(nullid, nullid)
1438 self.setparents(nullid, nullid)
1404 util.clearcachedproperty(self, b"_dirs")
1439 util.clearcachedproperty(self, b"_dirs")
1405 util.clearcachedproperty(self, b"_alldirs")
1440 util.clearcachedproperty(self, b"_alldirs")
1406 util.clearcachedproperty(self, b"filefoldmap")
1441 util.clearcachedproperty(self, b"filefoldmap")
1407 util.clearcachedproperty(self, b"dirfoldmap")
1442 util.clearcachedproperty(self, b"dirfoldmap")
1408 util.clearcachedproperty(self, b"nonnormalset")
1443 util.clearcachedproperty(self, b"nonnormalset")
1409 util.clearcachedproperty(self, b"otherparentset")
1444 util.clearcachedproperty(self, b"otherparentset")
1410
1445
1411 def items(self):
1446 def items(self):
1412 return pycompat.iteritems(self._map)
1447 return pycompat.iteritems(self._map)
1413
1448
1414 # forward for python2,3 compat
1449 # forward for python2,3 compat
1415 iteritems = items
1450 iteritems = items
1416
1451
1417 def __len__(self):
1452 def __len__(self):
1418 return len(self._map)
1453 return len(self._map)
1419
1454
1420 def __iter__(self):
1455 def __iter__(self):
1421 return iter(self._map)
1456 return iter(self._map)
1422
1457
1423 def get(self, key, default=None):
1458 def get(self, key, default=None):
1424 return self._map.get(key, default)
1459 return self._map.get(key, default)
1425
1460
1426 def __contains__(self, key):
1461 def __contains__(self, key):
1427 return key in self._map
1462 return key in self._map
1428
1463
1429 def __getitem__(self, key):
1464 def __getitem__(self, key):
1430 return self._map[key]
1465 return self._map[key]
1431
1466
1432 def keys(self):
1467 def keys(self):
1433 return self._map.keys()
1468 return self._map.keys()
1434
1469
1435 def preload(self):
1470 def preload(self):
1436 """Loads the underlying data, if it's not already loaded"""
1471 """Loads the underlying data, if it's not already loaded"""
1437 self._map
1472 self._map
1438
1473
1439 def addfile(self, f, oldstate, state, mode, size, mtime):
1474 def addfile(self, f, oldstate, state, mode, size, mtime):
1440 """Add a tracked file to the dirstate."""
1475 """Add a tracked file to the dirstate."""
1441 if oldstate in b"?r" and "_dirs" in self.__dict__:
1476 if oldstate in b"?r" and "_dirs" in self.__dict__:
1442 self._dirs.addpath(f)
1477 self._dirs.addpath(f)
1443 if oldstate == b"?" and "_alldirs" in self.__dict__:
1478 if oldstate == b"?" and "_alldirs" in self.__dict__:
1444 self._alldirs.addpath(f)
1479 self._alldirs.addpath(f)
1445 self._map[f] = dirstatetuple(state, mode, size, mtime)
1480 self._map[f] = dirstatetuple(state, mode, size, mtime)
1446 if state != b'n' or mtime == -1:
1481 if state != b'n' or mtime == -1:
1447 self.nonnormalset.add(f)
1482 self.nonnormalset.add(f)
1448 if size == -2:
1483 if size == -2:
1449 self.otherparentset.add(f)
1484 self.otherparentset.add(f)
1450
1485
1451 def removefile(self, f, oldstate, size):
1486 def removefile(self, f, oldstate, size):
1452 """
1487 """
1453 Mark a file as removed in the dirstate.
1488 Mark a file as removed in the dirstate.
1454
1489
1455 The `size` parameter is used to store sentinel values that indicate
1490 The `size` parameter is used to store sentinel values that indicate
1456 the file's previous state. In the future, we should refactor this
1491 the file's previous state. In the future, we should refactor this
1457 to be more explicit about what that state is.
1492 to be more explicit about what that state is.
1458 """
1493 """
1459 if oldstate not in b"?r" and "_dirs" in self.__dict__:
1494 if oldstate not in b"?r" and "_dirs" in self.__dict__:
1460 self._dirs.delpath(f)
1495 self._dirs.delpath(f)
1461 if oldstate == b"?" and "_alldirs" in self.__dict__:
1496 if oldstate == b"?" and "_alldirs" in self.__dict__:
1462 self._alldirs.addpath(f)
1497 self._alldirs.addpath(f)
1463 if "filefoldmap" in self.__dict__:
1498 if "filefoldmap" in self.__dict__:
1464 normed = util.normcase(f)
1499 normed = util.normcase(f)
1465 self.filefoldmap.pop(normed, None)
1500 self.filefoldmap.pop(normed, None)
1466 self._map[f] = dirstatetuple(b'r', 0, size, 0)
1501 self._map[f] = dirstatetuple(b'r', 0, size, 0)
1467 self.nonnormalset.add(f)
1502 self.nonnormalset.add(f)
1468
1503
1469 def dropfile(self, f, oldstate):
1504 def dropfile(self, f, oldstate):
1470 """
1505 """
1471 Remove a file from the dirstate. Returns True if the file was
1506 Remove a file from the dirstate. Returns True if the file was
1472 previously recorded.
1507 previously recorded.
1473 """
1508 """
1474 exists = self._map.pop(f, None) is not None
1509 exists = self._map.pop(f, None) is not None
1475 if exists:
1510 if exists:
1476 if oldstate != b"r" and "_dirs" in self.__dict__:
1511 if oldstate != b"r" and "_dirs" in self.__dict__:
1477 self._dirs.delpath(f)
1512 self._dirs.delpath(f)
1478 if "_alldirs" in self.__dict__:
1513 if "_alldirs" in self.__dict__:
1479 self._alldirs.delpath(f)
1514 self._alldirs.delpath(f)
1480 if "filefoldmap" in self.__dict__:
1515 if "filefoldmap" in self.__dict__:
1481 normed = util.normcase(f)
1516 normed = util.normcase(f)
1482 self.filefoldmap.pop(normed, None)
1517 self.filefoldmap.pop(normed, None)
1483 self.nonnormalset.discard(f)
1518 self.nonnormalset.discard(f)
1484 return exists
1519 return exists
1485
1520
1486 def clearambiguoustimes(self, files, now):
1521 def clearambiguoustimes(self, files, now):
1487 for f in files:
1522 for f in files:
1488 e = self.get(f)
1523 e = self.get(f)
1489 if e is not None and e[0] == b'n' and e[3] == now:
1524 if e is not None and e[0] == b'n' and e[3] == now:
1490 self._map[f] = dirstatetuple(e[0], e[1], e[2], -1)
1525 self._map[f] = dirstatetuple(e[0], e[1], e[2], -1)
1491 self.nonnormalset.add(f)
1526 self.nonnormalset.add(f)
1492
1527
1493 def nonnormalentries(self):
1528 def nonnormalentries(self):
1494 '''Compute the nonnormal dirstate entries from the dmap'''
1529 '''Compute the nonnormal dirstate entries from the dmap'''
1495 try:
1530 try:
1496 return parsers.nonnormalotherparententries(self._map)
1531 return parsers.nonnormalotherparententries(self._map)
1497 except AttributeError:
1532 except AttributeError:
1498 nonnorm = set()
1533 nonnorm = set()
1499 otherparent = set()
1534 otherparent = set()
1500 for fname, e in pycompat.iteritems(self._map):
1535 for fname, e in pycompat.iteritems(self._map):
1501 if e[0] != b'n' or e[3] == -1:
1536 if e[0] != b'n' or e[3] == -1:
1502 nonnorm.add(fname)
1537 nonnorm.add(fname)
1503 if e[0] == b'n' and e[2] == -2:
1538 if e[0] == b'n' and e[2] == -2:
1504 otherparent.add(fname)
1539 otherparent.add(fname)
1505 return nonnorm, otherparent
1540 return nonnorm, otherparent
1506
1541
1507 @propertycache
1542 @propertycache
1508 def filefoldmap(self):
1543 def filefoldmap(self):
1509 """Returns a dictionary mapping normalized case paths to their
1544 """Returns a dictionary mapping normalized case paths to their
1510 non-normalized versions.
1545 non-normalized versions.
1511 """
1546 """
1512 try:
1547 try:
1513 makefilefoldmap = parsers.make_file_foldmap
1548 makefilefoldmap = parsers.make_file_foldmap
1514 except AttributeError:
1549 except AttributeError:
1515 pass
1550 pass
1516 else:
1551 else:
1517 return makefilefoldmap(
1552 return makefilefoldmap(
1518 self._map, util.normcasespec, util.normcasefallback
1553 self._map, util.normcasespec, util.normcasefallback
1519 )
1554 )
1520
1555
1521 f = {}
1556 f = {}
1522 normcase = util.normcase
1557 normcase = util.normcase
1523 for name, s in pycompat.iteritems(self._map):
1558 for name, s in pycompat.iteritems(self._map):
1524 if s[0] != b'r':
1559 if s[0] != b'r':
1525 f[normcase(name)] = name
1560 f[normcase(name)] = name
1526 f[b'.'] = b'.' # prevents useless util.fspath() invocation
1561 f[b'.'] = b'.' # prevents useless util.fspath() invocation
1527 return f
1562 return f
1528
1563
1529 def hastrackeddir(self, d):
1564 def hastrackeddir(self, d):
1530 """
1565 """
1531 Returns True if the dirstate contains a tracked (not removed) file
1566 Returns True if the dirstate contains a tracked (not removed) file
1532 in this directory.
1567 in this directory.
1533 """
1568 """
1534 return d in self._dirs
1569 return d in self._dirs
1535
1570
1536 def hasdir(self, d):
1571 def hasdir(self, d):
1537 """
1572 """
1538 Returns True if the dirstate contains a file (tracked or removed)
1573 Returns True if the dirstate contains a file (tracked or removed)
1539 in this directory.
1574 in this directory.
1540 """
1575 """
1541 return d in self._alldirs
1576 return d in self._alldirs
1542
1577
1543 @propertycache
1578 @propertycache
1544 def _dirs(self):
1579 def _dirs(self):
1545 return pathutil.dirs(self._map, b'r')
1580 return pathutil.dirs(self._map, b'r')
1546
1581
1547 @propertycache
1582 @propertycache
1548 def _alldirs(self):
1583 def _alldirs(self):
1549 return pathutil.dirs(self._map)
1584 return pathutil.dirs(self._map)
1550
1585
1551 def _opendirstatefile(self):
1586 def _opendirstatefile(self):
1552 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1587 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1553 if self._pendingmode is not None and self._pendingmode != mode:
1588 if self._pendingmode is not None and self._pendingmode != mode:
1554 fp.close()
1589 fp.close()
1555 raise error.Abort(
1590 raise error.Abort(
1556 _(b'working directory state may be changed parallelly')
1591 _(b'working directory state may be changed parallelly')
1557 )
1592 )
1558 self._pendingmode = mode
1593 self._pendingmode = mode
1559 return fp
1594 return fp
1560
1595
1561 def parents(self):
1596 def parents(self):
1562 if not self._parents:
1597 if not self._parents:
1563 try:
1598 try:
1564 fp = self._opendirstatefile()
1599 fp = self._opendirstatefile()
1565 st = fp.read(40)
1600 st = fp.read(40)
1566 fp.close()
1601 fp.close()
1567 except IOError as err:
1602 except IOError as err:
1568 if err.errno != errno.ENOENT:
1603 if err.errno != errno.ENOENT:
1569 raise
1604 raise
1570 # File doesn't exist, so the current state is empty
1605 # File doesn't exist, so the current state is empty
1571 st = b''
1606 st = b''
1572
1607
1573 l = len(st)
1608 l = len(st)
1574 if l == 40:
1609 if l == 40:
1575 self._parents = (st[:20], st[20:40])
1610 self._parents = (st[:20], st[20:40])
1576 elif l == 0:
1611 elif l == 0:
1577 self._parents = (nullid, nullid)
1612 self._parents = (nullid, nullid)
1578 else:
1613 else:
1579 raise error.Abort(
1614 raise error.Abort(
1580 _(b'working directory state appears damaged!')
1615 _(b'working directory state appears damaged!')
1581 )
1616 )
1582
1617
1583 return self._parents
1618 return self._parents
1584
1619
1585 def setparents(self, p1, p2):
1620 def setparents(self, p1, p2):
1586 self._parents = (p1, p2)
1621 self._parents = (p1, p2)
1587 self._dirtyparents = True
1622 self._dirtyparents = True
1588
1623
1589 def read(self):
1624 def read(self):
1590 # ignore HG_PENDING because identity is used only for writing
1625 # ignore HG_PENDING because identity is used only for writing
1591 self.identity = util.filestat.frompath(
1626 self.identity = util.filestat.frompath(
1592 self._opener.join(self._filename)
1627 self._opener.join(self._filename)
1593 )
1628 )
1594
1629
1595 try:
1630 try:
1596 fp = self._opendirstatefile()
1631 fp = self._opendirstatefile()
1597 try:
1632 try:
1598 st = fp.read()
1633 st = fp.read()
1599 finally:
1634 finally:
1600 fp.close()
1635 fp.close()
1601 except IOError as err:
1636 except IOError as err:
1602 if err.errno != errno.ENOENT:
1637 if err.errno != errno.ENOENT:
1603 raise
1638 raise
1604 return
1639 return
1605 if not st:
1640 if not st:
1606 return
1641 return
1607
1642
1608 if util.safehasattr(parsers, b'dict_new_presized'):
1643 if util.safehasattr(parsers, b'dict_new_presized'):
1609 # Make an estimate of the number of files in the dirstate based on
1644 # Make an estimate of the number of files in the dirstate based on
1610 # its size. From a linear regression on a set of real-world repos,
1645 # its size. From a linear regression on a set of real-world repos,
1611 # all over 10,000 files, the size of a dirstate entry is 85
1646 # all over 10,000 files, the size of a dirstate entry is 85
1612 # bytes. The cost of resizing is significantly higher than the cost
1647 # bytes. The cost of resizing is significantly higher than the cost
1613 # of filling in a larger presized dict, so subtract 20% from the
1648 # of filling in a larger presized dict, so subtract 20% from the
1614 # size.
1649 # size.
1615 #
1650 #
1616 # This heuristic is imperfect in many ways, so in a future dirstate
1651 # This heuristic is imperfect in many ways, so in a future dirstate
1617 # format update it makes sense to just record the number of entries
1652 # format update it makes sense to just record the number of entries
1618 # on write.
1653 # on write.
1619 self._map = parsers.dict_new_presized(len(st) // 71)
1654 self._map = parsers.dict_new_presized(len(st) // 71)
1620
1655
1621 # Python's garbage collector triggers a GC each time a certain number
1656 # Python's garbage collector triggers a GC each time a certain number
1622 # of container objects (the number being defined by
1657 # of container objects (the number being defined by
1623 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1658 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1624 # for each file in the dirstate. The C version then immediately marks
1659 # for each file in the dirstate. The C version then immediately marks
1625 # them as not to be tracked by the collector. However, this has no
1660 # them as not to be tracked by the collector. However, this has no
1626 # effect on when GCs are triggered, only on what objects the GC looks
1661 # effect on when GCs are triggered, only on what objects the GC looks
1627 # into. This means that O(number of files) GCs are unavoidable.
1662 # into. This means that O(number of files) GCs are unavoidable.
1628 # Depending on when in the process's lifetime the dirstate is parsed,
1663 # Depending on when in the process's lifetime the dirstate is parsed,
1629 # this can get very expensive. As a workaround, disable GC while
1664 # this can get very expensive. As a workaround, disable GC while
1630 # parsing the dirstate.
1665 # parsing the dirstate.
1631 #
1666 #
1632 # (we cannot decorate the function directly since it is in a C module)
1667 # (we cannot decorate the function directly since it is in a C module)
1633 parse_dirstate = util.nogc(parsers.parse_dirstate)
1668 parse_dirstate = util.nogc(parsers.parse_dirstate)
1634 p = parse_dirstate(self._map, self.copymap, st)
1669 p = parse_dirstate(self._map, self.copymap, st)
1635 if not self._dirtyparents:
1670 if not self._dirtyparents:
1636 self.setparents(*p)
1671 self.setparents(*p)
1637
1672
1638 # Avoid excess attribute lookups by fast pathing certain checks
1673 # Avoid excess attribute lookups by fast pathing certain checks
1639 self.__contains__ = self._map.__contains__
1674 self.__contains__ = self._map.__contains__
1640 self.__getitem__ = self._map.__getitem__
1675 self.__getitem__ = self._map.__getitem__
1641 self.get = self._map.get
1676 self.get = self._map.get
1642
1677
1643 def write(self, st, now):
1678 def write(self, st, now):
1644 st.write(
1679 st.write(
1645 parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
1680 parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
1646 )
1681 )
1647 st.close()
1682 st.close()
1648 self._dirtyparents = False
1683 self._dirtyparents = False
1649 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1684 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1650
1685
1651 @propertycache
1686 @propertycache
1652 def nonnormalset(self):
1687 def nonnormalset(self):
1653 nonnorm, otherparents = self.nonnormalentries()
1688 nonnorm, otherparents = self.nonnormalentries()
1654 self.otherparentset = otherparents
1689 self.otherparentset = otherparents
1655 return nonnorm
1690 return nonnorm
1656
1691
1657 @propertycache
1692 @propertycache
1658 def otherparentset(self):
1693 def otherparentset(self):
1659 nonnorm, otherparents = self.nonnormalentries()
1694 nonnorm, otherparents = self.nonnormalentries()
1660 self.nonnormalset = nonnorm
1695 self.nonnormalset = nonnorm
1661 return otherparents
1696 return otherparents
1662
1697
1663 @propertycache
1698 @propertycache
1664 def identity(self):
1699 def identity(self):
1665 self._map
1700 self._map
1666 return self.identity
1701 return self.identity
1667
1702
1668 @propertycache
1703 @propertycache
1669 def dirfoldmap(self):
1704 def dirfoldmap(self):
1670 f = {}
1705 f = {}
1671 normcase = util.normcase
1706 normcase = util.normcase
1672 for name in self._dirs:
1707 for name in self._dirs:
1673 f[normcase(name)] = name
1708 f[normcase(name)] = name
1674 return f
1709 return f
1675
1710
1676
1711
1677 if rustmod is not None:
1712 if rustmod is not None:
1678
1713
1679 class dirstatemap(object):
1714 class dirstatemap(object):
1680 def __init__(self, ui, opener, root):
1715 def __init__(self, ui, opener, root):
1681 self._ui = ui
1716 self._ui = ui
1682 self._opener = opener
1717 self._opener = opener
1683 self._root = root
1718 self._root = root
1684 self._filename = b'dirstate'
1719 self._filename = b'dirstate'
1685 self._parents = None
1720 self._parents = None
1686 self._dirtyparents = False
1721 self._dirtyparents = False
1687
1722
1688 # for consistent view between _pl() and _read() invocations
1723 # for consistent view between _pl() and _read() invocations
1689 self._pendingmode = None
1724 self._pendingmode = None
1690
1725
1691 def addfile(self, *args, **kwargs):
1726 def addfile(self, *args, **kwargs):
1692 return self._rustmap.addfile(*args, **kwargs)
1727 return self._rustmap.addfile(*args, **kwargs)
1693
1728
1694 def removefile(self, *args, **kwargs):
1729 def removefile(self, *args, **kwargs):
1695 return self._rustmap.removefile(*args, **kwargs)
1730 return self._rustmap.removefile(*args, **kwargs)
1696
1731
1697 def dropfile(self, *args, **kwargs):
1732 def dropfile(self, *args, **kwargs):
1698 return self._rustmap.dropfile(*args, **kwargs)
1733 return self._rustmap.dropfile(*args, **kwargs)
1699
1734
1700 def clearambiguoustimes(self, *args, **kwargs):
1735 def clearambiguoustimes(self, *args, **kwargs):
1701 return self._rustmap.clearambiguoustimes(*args, **kwargs)
1736 return self._rustmap.clearambiguoustimes(*args, **kwargs)
1702
1737
1703 def nonnormalentries(self):
1738 def nonnormalentries(self):
1704 return self._rustmap.nonnormalentries()
1739 return self._rustmap.nonnormalentries()
1705
1740
1706 def get(self, *args, **kwargs):
1741 def get(self, *args, **kwargs):
1707 return self._rustmap.get(*args, **kwargs)
1742 return self._rustmap.get(*args, **kwargs)
1708
1743
1709 @propertycache
1744 @propertycache
1710 def _rustmap(self):
1745 def _rustmap(self):
1711 self._rustmap = rustmod.DirstateMap(self._root)
1746 self._rustmap = rustmod.DirstateMap(self._root)
1712 self.read()
1747 self.read()
1713 return self._rustmap
1748 return self._rustmap
1714
1749
1715 @property
1750 @property
1716 def copymap(self):
1751 def copymap(self):
1717 return self._rustmap.copymap()
1752 return self._rustmap.copymap()
1718
1753
1719 def preload(self):
1754 def preload(self):
1720 self._rustmap
1755 self._rustmap
1721
1756
1722 def clear(self):
1757 def clear(self):
1723 self._rustmap.clear()
1758 self._rustmap.clear()
1724 self.setparents(nullid, nullid)
1759 self.setparents(nullid, nullid)
1725 util.clearcachedproperty(self, b"_dirs")
1760 util.clearcachedproperty(self, b"_dirs")
1726 util.clearcachedproperty(self, b"_alldirs")
1761 util.clearcachedproperty(self, b"_alldirs")
1727 util.clearcachedproperty(self, b"dirfoldmap")
1762 util.clearcachedproperty(self, b"dirfoldmap")
1728
1763
1729 def items(self):
1764 def items(self):
1730 return self._rustmap.items()
1765 return self._rustmap.items()
1731
1766
1732 def keys(self):
1767 def keys(self):
1733 return iter(self._rustmap)
1768 return iter(self._rustmap)
1734
1769
1735 def __contains__(self, key):
1770 def __contains__(self, key):
1736 return key in self._rustmap
1771 return key in self._rustmap
1737
1772
1738 def __getitem__(self, item):
1773 def __getitem__(self, item):
1739 return self._rustmap[item]
1774 return self._rustmap[item]
1740
1775
1741 def __len__(self):
1776 def __len__(self):
1742 return len(self._rustmap)
1777 return len(self._rustmap)
1743
1778
1744 def __iter__(self):
1779 def __iter__(self):
1745 return iter(self._rustmap)
1780 return iter(self._rustmap)
1746
1781
1747 # forward for python2,3 compat
1782 # forward for python2,3 compat
1748 iteritems = items
1783 iteritems = items
1749
1784
1750 def _opendirstatefile(self):
1785 def _opendirstatefile(self):
1751 fp, mode = txnutil.trypending(
1786 fp, mode = txnutil.trypending(
1752 self._root, self._opener, self._filename
1787 self._root, self._opener, self._filename
1753 )
1788 )
1754 if self._pendingmode is not None and self._pendingmode != mode:
1789 if self._pendingmode is not None and self._pendingmode != mode:
1755 fp.close()
1790 fp.close()
1756 raise error.Abort(
1791 raise error.Abort(
1757 _(b'working directory state may be changed parallelly')
1792 _(b'working directory state may be changed parallelly')
1758 )
1793 )
1759 self._pendingmode = mode
1794 self._pendingmode = mode
1760 return fp
1795 return fp
1761
1796
1762 def setparents(self, p1, p2):
1797 def setparents(self, p1, p2):
1763 self._rustmap.setparents(p1, p2)
1798 self._rustmap.setparents(p1, p2)
1764 self._parents = (p1, p2)
1799 self._parents = (p1, p2)
1765 self._dirtyparents = True
1800 self._dirtyparents = True
1766
1801
1767 def parents(self):
1802 def parents(self):
1768 if not self._parents:
1803 if not self._parents:
1769 try:
1804 try:
1770 fp = self._opendirstatefile()
1805 fp = self._opendirstatefile()
1771 st = fp.read(40)
1806 st = fp.read(40)
1772 fp.close()
1807 fp.close()
1773 except IOError as err:
1808 except IOError as err:
1774 if err.errno != errno.ENOENT:
1809 if err.errno != errno.ENOENT:
1775 raise
1810 raise
1776 # File doesn't exist, so the current state is empty
1811 # File doesn't exist, so the current state is empty
1777 st = b''
1812 st = b''
1778
1813
1779 try:
1814 try:
1780 self._parents = self._rustmap.parents(st)
1815 self._parents = self._rustmap.parents(st)
1781 except ValueError:
1816 except ValueError:
1782 raise error.Abort(
1817 raise error.Abort(
1783 _(b'working directory state appears damaged!')
1818 _(b'working directory state appears damaged!')
1784 )
1819 )
1785
1820
1786 return self._parents
1821 return self._parents
1787
1822
1788 def read(self):
1823 def read(self):
1789 # ignore HG_PENDING because identity is used only for writing
1824 # ignore HG_PENDING because identity is used only for writing
1790 self.identity = util.filestat.frompath(
1825 self.identity = util.filestat.frompath(
1791 self._opener.join(self._filename)
1826 self._opener.join(self._filename)
1792 )
1827 )
1793
1828
1794 try:
1829 try:
1795 fp = self._opendirstatefile()
1830 fp = self._opendirstatefile()
1796 try:
1831 try:
1797 st = fp.read()
1832 st = fp.read()
1798 finally:
1833 finally:
1799 fp.close()
1834 fp.close()
1800 except IOError as err:
1835 except IOError as err:
1801 if err.errno != errno.ENOENT:
1836 if err.errno != errno.ENOENT:
1802 raise
1837 raise
1803 return
1838 return
1804 if not st:
1839 if not st:
1805 return
1840 return
1806
1841
1807 parse_dirstate = util.nogc(self._rustmap.read)
1842 parse_dirstate = util.nogc(self._rustmap.read)
1808 parents = parse_dirstate(st)
1843 parents = parse_dirstate(st)
1809 if parents and not self._dirtyparents:
1844 if parents and not self._dirtyparents:
1810 self.setparents(*parents)
1845 self.setparents(*parents)
1811
1846
1812 self.__contains__ = self._rustmap.__contains__
1847 self.__contains__ = self._rustmap.__contains__
1813 self.__getitem__ = self._rustmap.__getitem__
1848 self.__getitem__ = self._rustmap.__getitem__
1814 self.get = self._rustmap.get
1849 self.get = self._rustmap.get
1815
1850
1816 def write(self, st, now):
1851 def write(self, st, now):
1817 parents = self.parents()
1852 parents = self.parents()
1818 st.write(self._rustmap.write(parents[0], parents[1], now))
1853 st.write(self._rustmap.write(parents[0], parents[1], now))
1819 st.close()
1854 st.close()
1820 self._dirtyparents = False
1855 self._dirtyparents = False
1821
1856
1822 @propertycache
1857 @propertycache
1823 def filefoldmap(self):
1858 def filefoldmap(self):
1824 """Returns a dictionary mapping normalized case paths to their
1859 """Returns a dictionary mapping normalized case paths to their
1825 non-normalized versions.
1860 non-normalized versions.
1826 """
1861 """
1827 return self._rustmap.filefoldmapasdict()
1862 return self._rustmap.filefoldmapasdict()
1828
1863
1829 def hastrackeddir(self, d):
1864 def hastrackeddir(self, d):
1830 self._dirs # Trigger Python's propertycache
1865 self._dirs # Trigger Python's propertycache
1831 return self._rustmap.hastrackeddir(d)
1866 return self._rustmap.hastrackeddir(d)
1832
1867
1833 def hasdir(self, d):
1868 def hasdir(self, d):
1834 self._dirs # Trigger Python's propertycache
1869 self._dirs # Trigger Python's propertycache
1835 return self._rustmap.hasdir(d)
1870 return self._rustmap.hasdir(d)
1836
1871
1837 @propertycache
1872 @propertycache
1838 def _dirs(self):
1873 def _dirs(self):
1839 return self._rustmap.getdirs()
1874 return self._rustmap.getdirs()
1840
1875
1841 @propertycache
1876 @propertycache
1842 def _alldirs(self):
1877 def _alldirs(self):
1843 return self._rustmap.getalldirs()
1878 return self._rustmap.getalldirs()
1844
1879
1845 @propertycache
1880 @propertycache
1846 def identity(self):
1881 def identity(self):
1847 self._rustmap
1882 self._rustmap
1848 return self.identity
1883 return self.identity
1849
1884
1850 @property
1885 @property
1851 def nonnormalset(self):
1886 def nonnormalset(self):
1852 nonnorm = self._rustmap.non_normal_entries()
1887 nonnorm = self._rustmap.non_normal_entries()
1853 return nonnorm
1888 return nonnorm
1854
1889
1855 @propertycache
1890 @propertycache
1856 def otherparentset(self):
1891 def otherparentset(self):
1857 otherparents = self._rustmap.other_parent_entries()
1892 otherparents = self._rustmap.other_parent_entries()
1858 return otherparents
1893 return otherparents
1859
1894
1860 @propertycache
1895 @propertycache
1861 def dirfoldmap(self):
1896 def dirfoldmap(self):
1862 f = {}
1897 f = {}
1863 normcase = util.normcase
1898 normcase = util.normcase
1864 for name in self._dirs:
1899 for name in self._dirs:
1865 f[normcase(name)] = name
1900 f[normcase(name)] = name
1866 return f
1901 return f
@@ -1,1601 +1,1604 b''
1 # match.py - filename matching
1 # match.py - filename matching
2 #
2 #
3 # Copyright 2008, 2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2008, 2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import, print_function
8 from __future__ import absolute_import, print_function
9
9
10 import copy
10 import copy
11 import itertools
11 import itertools
12 import os
12 import os
13 import re
13 import re
14
14
15 from .i18n import _
15 from .i18n import _
16 from .pycompat import open
16 from .pycompat import open
17 from . import (
17 from . import (
18 encoding,
18 encoding,
19 error,
19 error,
20 pathutil,
20 pathutil,
21 policy,
21 policy,
22 pycompat,
22 pycompat,
23 util,
23 util,
24 )
24 )
25 from .utils import stringutil
25 from .utils import stringutil
26
26
27 rustmod = policy.importrust('dirstate')
27 rustmod = policy.importrust('dirstate')
28
28
29 allpatternkinds = (
29 allpatternkinds = (
30 b're',
30 b're',
31 b'glob',
31 b'glob',
32 b'path',
32 b'path',
33 b'relglob',
33 b'relglob',
34 b'relpath',
34 b'relpath',
35 b'relre',
35 b'relre',
36 b'rootglob',
36 b'rootglob',
37 b'listfile',
37 b'listfile',
38 b'listfile0',
38 b'listfile0',
39 b'set',
39 b'set',
40 b'include',
40 b'include',
41 b'subinclude',
41 b'subinclude',
42 b'rootfilesin',
42 b'rootfilesin',
43 )
43 )
44 cwdrelativepatternkinds = (b'relpath', b'glob')
44 cwdrelativepatternkinds = (b'relpath', b'glob')
45
45
46 propertycache = util.propertycache
46 propertycache = util.propertycache
47
47
48
48
49 def _rematcher(regex):
49 def _rematcher(regex):
50 '''compile the regexp with the best available regexp engine and return a
50 '''compile the regexp with the best available regexp engine and return a
51 matcher function'''
51 matcher function'''
52 m = util.re.compile(regex)
52 m = util.re.compile(regex)
53 try:
53 try:
54 # slightly faster, provided by facebook's re2 bindings
54 # slightly faster, provided by facebook's re2 bindings
55 return m.test_match
55 return m.test_match
56 except AttributeError:
56 except AttributeError:
57 return m.match
57 return m.match
58
58
59
59
60 def _expandsets(cwd, kindpats, ctx=None, listsubrepos=False, badfn=None):
60 def _expandsets(cwd, kindpats, ctx=None, listsubrepos=False, badfn=None):
61 '''Returns the kindpats list with the 'set' patterns expanded to matchers'''
61 '''Returns the kindpats list with the 'set' patterns expanded to matchers'''
62 matchers = []
62 matchers = []
63 other = []
63 other = []
64
64
65 for kind, pat, source in kindpats:
65 for kind, pat, source in kindpats:
66 if kind == b'set':
66 if kind == b'set':
67 if ctx is None:
67 if ctx is None:
68 raise error.ProgrammingError(
68 raise error.ProgrammingError(
69 b"fileset expression with no context"
69 b"fileset expression with no context"
70 )
70 )
71 matchers.append(ctx.matchfileset(cwd, pat, badfn=badfn))
71 matchers.append(ctx.matchfileset(cwd, pat, badfn=badfn))
72
72
73 if listsubrepos:
73 if listsubrepos:
74 for subpath in ctx.substate:
74 for subpath in ctx.substate:
75 sm = ctx.sub(subpath).matchfileset(cwd, pat, badfn=badfn)
75 sm = ctx.sub(subpath).matchfileset(cwd, pat, badfn=badfn)
76 pm = prefixdirmatcher(subpath, sm, badfn=badfn)
76 pm = prefixdirmatcher(subpath, sm, badfn=badfn)
77 matchers.append(pm)
77 matchers.append(pm)
78
78
79 continue
79 continue
80 other.append((kind, pat, source))
80 other.append((kind, pat, source))
81 return matchers, other
81 return matchers, other
82
82
83
83
84 def _expandsubinclude(kindpats, root):
84 def _expandsubinclude(kindpats, root):
85 '''Returns the list of subinclude matcher args and the kindpats without the
85 '''Returns the list of subinclude matcher args and the kindpats without the
86 subincludes in it.'''
86 subincludes in it.'''
87 relmatchers = []
87 relmatchers = []
88 other = []
88 other = []
89
89
90 for kind, pat, source in kindpats:
90 for kind, pat, source in kindpats:
91 if kind == b'subinclude':
91 if kind == b'subinclude':
92 sourceroot = pathutil.dirname(util.normpath(source))
92 sourceroot = pathutil.dirname(util.normpath(source))
93 pat = util.pconvert(pat)
93 pat = util.pconvert(pat)
94 path = pathutil.join(sourceroot, pat)
94 path = pathutil.join(sourceroot, pat)
95
95
96 newroot = pathutil.dirname(path)
96 newroot = pathutil.dirname(path)
97 matcherargs = (newroot, b'', [], [b'include:%s' % path])
97 matcherargs = (newroot, b'', [], [b'include:%s' % path])
98
98
99 prefix = pathutil.canonpath(root, root, newroot)
99 prefix = pathutil.canonpath(root, root, newroot)
100 if prefix:
100 if prefix:
101 prefix += b'/'
101 prefix += b'/'
102 relmatchers.append((prefix, matcherargs))
102 relmatchers.append((prefix, matcherargs))
103 else:
103 else:
104 other.append((kind, pat, source))
104 other.append((kind, pat, source))
105
105
106 return relmatchers, other
106 return relmatchers, other
107
107
108
108
109 def _kindpatsalwaysmatch(kindpats):
109 def _kindpatsalwaysmatch(kindpats):
110 """"Checks whether the kindspats match everything, as e.g.
110 """"Checks whether the kindspats match everything, as e.g.
111 'relpath:.' does.
111 'relpath:.' does.
112 """
112 """
113 for kind, pat, source in kindpats:
113 for kind, pat, source in kindpats:
114 if pat != b'' or kind not in [b'relpath', b'glob']:
114 if pat != b'' or kind not in [b'relpath', b'glob']:
115 return False
115 return False
116 return True
116 return True
117
117
118
118
119 def _buildkindpatsmatcher(
119 def _buildkindpatsmatcher(
120 matchercls, root, cwd, kindpats, ctx=None, listsubrepos=False, badfn=None,
120 matchercls, root, cwd, kindpats, ctx=None, listsubrepos=False, badfn=None,
121 ):
121 ):
122 matchers = []
122 matchers = []
123 fms, kindpats = _expandsets(
123 fms, kindpats = _expandsets(
124 cwd, kindpats, ctx=ctx, listsubrepos=listsubrepos, badfn=badfn,
124 cwd, kindpats, ctx=ctx, listsubrepos=listsubrepos, badfn=badfn,
125 )
125 )
126 if kindpats:
126 if kindpats:
127 m = matchercls(root, kindpats, badfn=badfn)
127 m = matchercls(root, kindpats, badfn=badfn)
128 matchers.append(m)
128 matchers.append(m)
129 if fms:
129 if fms:
130 matchers.extend(fms)
130 matchers.extend(fms)
131 if not matchers:
131 if not matchers:
132 return nevermatcher(badfn=badfn)
132 return nevermatcher(badfn=badfn)
133 if len(matchers) == 1:
133 if len(matchers) == 1:
134 return matchers[0]
134 return matchers[0]
135 return unionmatcher(matchers)
135 return unionmatcher(matchers)
136
136
137
137
138 def match(
138 def match(
139 root,
139 root,
140 cwd,
140 cwd,
141 patterns=None,
141 patterns=None,
142 include=None,
142 include=None,
143 exclude=None,
143 exclude=None,
144 default=b'glob',
144 default=b'glob',
145 auditor=None,
145 auditor=None,
146 ctx=None,
146 ctx=None,
147 listsubrepos=False,
147 listsubrepos=False,
148 warn=None,
148 warn=None,
149 badfn=None,
149 badfn=None,
150 icasefs=False,
150 icasefs=False,
151 ):
151 ):
152 r"""build an object to match a set of file patterns
152 r"""build an object to match a set of file patterns
153
153
154 arguments:
154 arguments:
155 root - the canonical root of the tree you're matching against
155 root - the canonical root of the tree you're matching against
156 cwd - the current working directory, if relevant
156 cwd - the current working directory, if relevant
157 patterns - patterns to find
157 patterns - patterns to find
158 include - patterns to include (unless they are excluded)
158 include - patterns to include (unless they are excluded)
159 exclude - patterns to exclude (even if they are included)
159 exclude - patterns to exclude (even if they are included)
160 default - if a pattern in patterns has no explicit type, assume this one
160 default - if a pattern in patterns has no explicit type, assume this one
161 auditor - optional path auditor
161 auditor - optional path auditor
162 ctx - optional changecontext
162 ctx - optional changecontext
163 listsubrepos - if True, recurse into subrepositories
163 listsubrepos - if True, recurse into subrepositories
164 warn - optional function used for printing warnings
164 warn - optional function used for printing warnings
165 badfn - optional bad() callback for this matcher instead of the default
165 badfn - optional bad() callback for this matcher instead of the default
166 icasefs - make a matcher for wdir on case insensitive filesystems, which
166 icasefs - make a matcher for wdir on case insensitive filesystems, which
167 normalizes the given patterns to the case in the filesystem
167 normalizes the given patterns to the case in the filesystem
168
168
169 a pattern is one of:
169 a pattern is one of:
170 'glob:<glob>' - a glob relative to cwd
170 'glob:<glob>' - a glob relative to cwd
171 're:<regexp>' - a regular expression
171 're:<regexp>' - a regular expression
172 'path:<path>' - a path relative to repository root, which is matched
172 'path:<path>' - a path relative to repository root, which is matched
173 recursively
173 recursively
174 'rootfilesin:<path>' - a path relative to repository root, which is
174 'rootfilesin:<path>' - a path relative to repository root, which is
175 matched non-recursively (will not match subdirectories)
175 matched non-recursively (will not match subdirectories)
176 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
176 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
177 'relpath:<path>' - a path relative to cwd
177 'relpath:<path>' - a path relative to cwd
178 'relre:<regexp>' - a regexp that needn't match the start of a name
178 'relre:<regexp>' - a regexp that needn't match the start of a name
179 'set:<fileset>' - a fileset expression
179 'set:<fileset>' - a fileset expression
180 'include:<path>' - a file of patterns to read and include
180 'include:<path>' - a file of patterns to read and include
181 'subinclude:<path>' - a file of patterns to match against files under
181 'subinclude:<path>' - a file of patterns to match against files under
182 the same directory
182 the same directory
183 '<something>' - a pattern of the specified default type
183 '<something>' - a pattern of the specified default type
184
184
185 >>> def _match(root, *args, **kwargs):
185 >>> def _match(root, *args, **kwargs):
186 ... return match(util.localpath(root), *args, **kwargs)
186 ... return match(util.localpath(root), *args, **kwargs)
187
187
188 Usually a patternmatcher is returned:
188 Usually a patternmatcher is returned:
189 >>> _match(b'/foo', b'.', [b're:.*\.c$', b'path:foo/a', b'*.py'])
189 >>> _match(b'/foo', b'.', [b're:.*\.c$', b'path:foo/a', b'*.py'])
190 <patternmatcher patterns='.*\\.c$|foo/a(?:/|$)|[^/]*\\.py$'>
190 <patternmatcher patterns='.*\\.c$|foo/a(?:/|$)|[^/]*\\.py$'>
191
191
192 Combining 'patterns' with 'include' (resp. 'exclude') gives an
192 Combining 'patterns' with 'include' (resp. 'exclude') gives an
193 intersectionmatcher (resp. a differencematcher):
193 intersectionmatcher (resp. a differencematcher):
194 >>> type(_match(b'/foo', b'.', [b're:.*\.c$'], include=[b'path:lib']))
194 >>> type(_match(b'/foo', b'.', [b're:.*\.c$'], include=[b'path:lib']))
195 <class 'mercurial.match.intersectionmatcher'>
195 <class 'mercurial.match.intersectionmatcher'>
196 >>> type(_match(b'/foo', b'.', [b're:.*\.c$'], exclude=[b'path:build']))
196 >>> type(_match(b'/foo', b'.', [b're:.*\.c$'], exclude=[b'path:build']))
197 <class 'mercurial.match.differencematcher'>
197 <class 'mercurial.match.differencematcher'>
198
198
199 Notice that, if 'patterns' is empty, an alwaysmatcher is returned:
199 Notice that, if 'patterns' is empty, an alwaysmatcher is returned:
200 >>> _match(b'/foo', b'.', [])
200 >>> _match(b'/foo', b'.', [])
201 <alwaysmatcher>
201 <alwaysmatcher>
202
202
203 The 'default' argument determines which kind of pattern is assumed if a
203 The 'default' argument determines which kind of pattern is assumed if a
204 pattern has no prefix:
204 pattern has no prefix:
205 >>> _match(b'/foo', b'.', [b'.*\.c$'], default=b're')
205 >>> _match(b'/foo', b'.', [b'.*\.c$'], default=b're')
206 <patternmatcher patterns='.*\\.c$'>
206 <patternmatcher patterns='.*\\.c$'>
207 >>> _match(b'/foo', b'.', [b'main.py'], default=b'relpath')
207 >>> _match(b'/foo', b'.', [b'main.py'], default=b'relpath')
208 <patternmatcher patterns='main\\.py(?:/|$)'>
208 <patternmatcher patterns='main\\.py(?:/|$)'>
209 >>> _match(b'/foo', b'.', [b'main.py'], default=b're')
209 >>> _match(b'/foo', b'.', [b'main.py'], default=b're')
210 <patternmatcher patterns='main.py'>
210 <patternmatcher patterns='main.py'>
211
211
212 The primary use of matchers is to check whether a value (usually a file
212 The primary use of matchers is to check whether a value (usually a file
213 name) matches againset one of the patterns given at initialization. There
213 name) matches againset one of the patterns given at initialization. There
214 are two ways of doing this check.
214 are two ways of doing this check.
215
215
216 >>> m = _match(b'/foo', b'', [b're:.*\.c$', b'relpath:a'])
216 >>> m = _match(b'/foo', b'', [b're:.*\.c$', b'relpath:a'])
217
217
218 1. Calling the matcher with a file name returns True if any pattern
218 1. Calling the matcher with a file name returns True if any pattern
219 matches that file name:
219 matches that file name:
220 >>> m(b'a')
220 >>> m(b'a')
221 True
221 True
222 >>> m(b'main.c')
222 >>> m(b'main.c')
223 True
223 True
224 >>> m(b'test.py')
224 >>> m(b'test.py')
225 False
225 False
226
226
227 2. Using the exact() method only returns True if the file name matches one
227 2. Using the exact() method only returns True if the file name matches one
228 of the exact patterns (i.e. not re: or glob: patterns):
228 of the exact patterns (i.e. not re: or glob: patterns):
229 >>> m.exact(b'a')
229 >>> m.exact(b'a')
230 True
230 True
231 >>> m.exact(b'main.c')
231 >>> m.exact(b'main.c')
232 False
232 False
233 """
233 """
234 assert os.path.isabs(root)
234 assert os.path.isabs(root)
235 cwd = os.path.join(root, util.localpath(cwd))
235 cwd = os.path.join(root, util.localpath(cwd))
236 normalize = _donormalize
236 normalize = _donormalize
237 if icasefs:
237 if icasefs:
238 dirstate = ctx.repo().dirstate
238 dirstate = ctx.repo().dirstate
239 dsnormalize = dirstate.normalize
239 dsnormalize = dirstate.normalize
240
240
241 def normalize(patterns, default, root, cwd, auditor, warn):
241 def normalize(patterns, default, root, cwd, auditor, warn):
242 kp = _donormalize(patterns, default, root, cwd, auditor, warn)
242 kp = _donormalize(patterns, default, root, cwd, auditor, warn)
243 kindpats = []
243 kindpats = []
244 for kind, pats, source in kp:
244 for kind, pats, source in kp:
245 if kind not in (b're', b'relre'): # regex can't be normalized
245 if kind not in (b're', b'relre'): # regex can't be normalized
246 p = pats
246 p = pats
247 pats = dsnormalize(pats)
247 pats = dsnormalize(pats)
248
248
249 # Preserve the original to handle a case only rename.
249 # Preserve the original to handle a case only rename.
250 if p != pats and p in dirstate:
250 if p != pats and p in dirstate:
251 kindpats.append((kind, p, source))
251 kindpats.append((kind, p, source))
252
252
253 kindpats.append((kind, pats, source))
253 kindpats.append((kind, pats, source))
254 return kindpats
254 return kindpats
255
255
256 if patterns:
256 if patterns:
257 kindpats = normalize(patterns, default, root, cwd, auditor, warn)
257 kindpats = normalize(patterns, default, root, cwd, auditor, warn)
258 if _kindpatsalwaysmatch(kindpats):
258 if _kindpatsalwaysmatch(kindpats):
259 m = alwaysmatcher(badfn)
259 m = alwaysmatcher(badfn)
260 else:
260 else:
261 m = _buildkindpatsmatcher(
261 m = _buildkindpatsmatcher(
262 patternmatcher,
262 patternmatcher,
263 root,
263 root,
264 cwd,
264 cwd,
265 kindpats,
265 kindpats,
266 ctx=ctx,
266 ctx=ctx,
267 listsubrepos=listsubrepos,
267 listsubrepos=listsubrepos,
268 badfn=badfn,
268 badfn=badfn,
269 )
269 )
270 else:
270 else:
271 # It's a little strange that no patterns means to match everything.
271 # It's a little strange that no patterns means to match everything.
272 # Consider changing this to match nothing (probably using nevermatcher).
272 # Consider changing this to match nothing (probably using nevermatcher).
273 m = alwaysmatcher(badfn)
273 m = alwaysmatcher(badfn)
274
274
275 if include:
275 if include:
276 kindpats = normalize(include, b'glob', root, cwd, auditor, warn)
276 kindpats = normalize(include, b'glob', root, cwd, auditor, warn)
277 im = _buildkindpatsmatcher(
277 im = _buildkindpatsmatcher(
278 includematcher,
278 includematcher,
279 root,
279 root,
280 cwd,
280 cwd,
281 kindpats,
281 kindpats,
282 ctx=ctx,
282 ctx=ctx,
283 listsubrepos=listsubrepos,
283 listsubrepos=listsubrepos,
284 badfn=None,
284 badfn=None,
285 )
285 )
286 m = intersectmatchers(m, im)
286 m = intersectmatchers(m, im)
287 if exclude:
287 if exclude:
288 kindpats = normalize(exclude, b'glob', root, cwd, auditor, warn)
288 kindpats = normalize(exclude, b'glob', root, cwd, auditor, warn)
289 em = _buildkindpatsmatcher(
289 em = _buildkindpatsmatcher(
290 includematcher,
290 includematcher,
291 root,
291 root,
292 cwd,
292 cwd,
293 kindpats,
293 kindpats,
294 ctx=ctx,
294 ctx=ctx,
295 listsubrepos=listsubrepos,
295 listsubrepos=listsubrepos,
296 badfn=None,
296 badfn=None,
297 )
297 )
298 m = differencematcher(m, em)
298 m = differencematcher(m, em)
299 return m
299 return m
300
300
301
301
302 def exact(files, badfn=None):
302 def exact(files, badfn=None):
303 return exactmatcher(files, badfn=badfn)
303 return exactmatcher(files, badfn=badfn)
304
304
305
305
306 def always(badfn=None):
306 def always(badfn=None):
307 return alwaysmatcher(badfn)
307 return alwaysmatcher(badfn)
308
308
309
309
310 def never(badfn=None):
310 def never(badfn=None):
311 return nevermatcher(badfn)
311 return nevermatcher(badfn)
312
312
313
313
314 def badmatch(match, badfn):
314 def badmatch(match, badfn):
315 """Make a copy of the given matcher, replacing its bad method with the given
315 """Make a copy of the given matcher, replacing its bad method with the given
316 one.
316 one.
317 """
317 """
318 m = copy.copy(match)
318 m = copy.copy(match)
319 m.bad = badfn
319 m.bad = badfn
320 return m
320 return m
321
321
322
322
323 def _donormalize(patterns, default, root, cwd, auditor=None, warn=None):
323 def _donormalize(patterns, default, root, cwd, auditor=None, warn=None):
324 '''Convert 'kind:pat' from the patterns list to tuples with kind and
324 '''Convert 'kind:pat' from the patterns list to tuples with kind and
325 normalized and rooted patterns and with listfiles expanded.'''
325 normalized and rooted patterns and with listfiles expanded.'''
326 kindpats = []
326 kindpats = []
327 for kind, pat in [_patsplit(p, default) for p in patterns]:
327 for kind, pat in [_patsplit(p, default) for p in patterns]:
328 if kind in cwdrelativepatternkinds:
328 if kind in cwdrelativepatternkinds:
329 pat = pathutil.canonpath(root, cwd, pat, auditor=auditor)
329 pat = pathutil.canonpath(root, cwd, pat, auditor=auditor)
330 elif kind in (b'relglob', b'path', b'rootfilesin', b'rootglob'):
330 elif kind in (b'relglob', b'path', b'rootfilesin', b'rootglob'):
331 pat = util.normpath(pat)
331 pat = util.normpath(pat)
332 elif kind in (b'listfile', b'listfile0'):
332 elif kind in (b'listfile', b'listfile0'):
333 try:
333 try:
334 files = util.readfile(pat)
334 files = util.readfile(pat)
335 if kind == b'listfile0':
335 if kind == b'listfile0':
336 files = files.split(b'\0')
336 files = files.split(b'\0')
337 else:
337 else:
338 files = files.splitlines()
338 files = files.splitlines()
339 files = [f for f in files if f]
339 files = [f for f in files if f]
340 except EnvironmentError:
340 except EnvironmentError:
341 raise error.Abort(_(b"unable to read file list (%s)") % pat)
341 raise error.Abort(_(b"unable to read file list (%s)") % pat)
342 for k, p, source in _donormalize(
342 for k, p, source in _donormalize(
343 files, default, root, cwd, auditor, warn
343 files, default, root, cwd, auditor, warn
344 ):
344 ):
345 kindpats.append((k, p, pat))
345 kindpats.append((k, p, pat))
346 continue
346 continue
347 elif kind == b'include':
347 elif kind == b'include':
348 try:
348 try:
349 fullpath = os.path.join(root, util.localpath(pat))
349 fullpath = os.path.join(root, util.localpath(pat))
350 includepats = readpatternfile(fullpath, warn)
350 includepats = readpatternfile(fullpath, warn)
351 for k, p, source in _donormalize(
351 for k, p, source in _donormalize(
352 includepats, default, root, cwd, auditor, warn
352 includepats, default, root, cwd, auditor, warn
353 ):
353 ):
354 kindpats.append((k, p, source or pat))
354 kindpats.append((k, p, source or pat))
355 except error.Abort as inst:
355 except error.Abort as inst:
356 raise error.Abort(
356 raise error.Abort(
357 b'%s: %s'
357 b'%s: %s'
358 % (pat, inst[0]) # pytype: disable=unsupported-operands
358 % (pat, inst[0]) # pytype: disable=unsupported-operands
359 )
359 )
360 except IOError as inst:
360 except IOError as inst:
361 if warn:
361 if warn:
362 warn(
362 warn(
363 _(b"skipping unreadable pattern file '%s': %s\n")
363 _(b"skipping unreadable pattern file '%s': %s\n")
364 % (pat, stringutil.forcebytestr(inst.strerror))
364 % (pat, stringutil.forcebytestr(inst.strerror))
365 )
365 )
366 continue
366 continue
367 # else: re or relre - which cannot be normalized
367 # else: re or relre - which cannot be normalized
368 kindpats.append((kind, pat, b''))
368 kindpats.append((kind, pat, b''))
369 return kindpats
369 return kindpats
370
370
371
371
372 class basematcher(object):
372 class basematcher(object):
373 def __init__(self, badfn=None):
373 def __init__(self, badfn=None):
374 if badfn is not None:
374 if badfn is not None:
375 self.bad = badfn
375 self.bad = badfn
376
376
377 def __call__(self, fn):
377 def __call__(self, fn):
378 return self.matchfn(fn)
378 return self.matchfn(fn)
379
379
380 # Callbacks related to how the matcher is used by dirstate.walk.
380 # Callbacks related to how the matcher is used by dirstate.walk.
381 # Subscribers to these events must monkeypatch the matcher object.
381 # Subscribers to these events must monkeypatch the matcher object.
382 def bad(self, f, msg):
382 def bad(self, f, msg):
383 '''Callback from dirstate.walk for each explicit file that can't be
383 '''Callback from dirstate.walk for each explicit file that can't be
384 found/accessed, with an error message.'''
384 found/accessed, with an error message.'''
385
385
386 # If an traversedir is set, it will be called when a directory discovered
386 # If an traversedir is set, it will be called when a directory discovered
387 # by recursive traversal is visited.
387 # by recursive traversal is visited.
388 traversedir = None
388 traversedir = None
389
389
390 @propertycache
390 @propertycache
391 def _files(self):
391 def _files(self):
392 return []
392 return []
393
393
394 def files(self):
394 def files(self):
395 '''Explicitly listed files or patterns or roots:
395 '''Explicitly listed files or patterns or roots:
396 if no patterns or .always(): empty list,
396 if no patterns or .always(): empty list,
397 if exact: list exact files,
397 if exact: list exact files,
398 if not .anypats(): list all files and dirs,
398 if not .anypats(): list all files and dirs,
399 else: optimal roots'''
399 else: optimal roots'''
400 return self._files
400 return self._files
401
401
402 @propertycache
402 @propertycache
403 def _fileset(self):
403 def _fileset(self):
404 return set(self._files)
404 return set(self._files)
405
405
406 def exact(self, f):
406 def exact(self, f):
407 '''Returns True if f is in .files().'''
407 '''Returns True if f is in .files().'''
408 return f in self._fileset
408 return f in self._fileset
409
409
410 def matchfn(self, f):
410 def matchfn(self, f):
411 return False
411 return False
412
412
413 def visitdir(self, dir):
413 def visitdir(self, dir):
414 '''Decides whether a directory should be visited based on whether it
414 '''Decides whether a directory should be visited based on whether it
415 has potential matches in it or one of its subdirectories. This is
415 has potential matches in it or one of its subdirectories. This is
416 based on the match's primary, included, and excluded patterns.
416 based on the match's primary, included, and excluded patterns.
417
417
418 Returns the string 'all' if the given directory and all subdirectories
418 Returns the string 'all' if the given directory and all subdirectories
419 should be visited. Otherwise returns True or False indicating whether
419 should be visited. Otherwise returns True or False indicating whether
420 the given directory should be visited.
420 the given directory should be visited.
421 '''
421 '''
422 return True
422 return True
423
423
424 def visitchildrenset(self, dir):
424 def visitchildrenset(self, dir):
425 '''Decides whether a directory should be visited based on whether it
425 '''Decides whether a directory should be visited based on whether it
426 has potential matches in it or one of its subdirectories, and
426 has potential matches in it or one of its subdirectories, and
427 potentially lists which subdirectories of that directory should be
427 potentially lists which subdirectories of that directory should be
428 visited. This is based on the match's primary, included, and excluded
428 visited. This is based on the match's primary, included, and excluded
429 patterns.
429 patterns.
430
430
431 This function is very similar to 'visitdir', and the following mapping
431 This function is very similar to 'visitdir', and the following mapping
432 can be applied:
432 can be applied:
433
433
434 visitdir | visitchildrenlist
434 visitdir | visitchildrenlist
435 ----------+-------------------
435 ----------+-------------------
436 False | set()
436 False | set()
437 'all' | 'all'
437 'all' | 'all'
438 True | 'this' OR non-empty set of subdirs -or files- to visit
438 True | 'this' OR non-empty set of subdirs -or files- to visit
439
439
440 Example:
440 Example:
441 Assume matchers ['path:foo/bar', 'rootfilesin:qux'], we would return
441 Assume matchers ['path:foo/bar', 'rootfilesin:qux'], we would return
442 the following values (assuming the implementation of visitchildrenset
442 the following values (assuming the implementation of visitchildrenset
443 is capable of recognizing this; some implementations are not).
443 is capable of recognizing this; some implementations are not).
444
444
445 '' -> {'foo', 'qux'}
445 '' -> {'foo', 'qux'}
446 'baz' -> set()
446 'baz' -> set()
447 'foo' -> {'bar'}
447 'foo' -> {'bar'}
448 # Ideally this would be 'all', but since the prefix nature of matchers
448 # Ideally this would be 'all', but since the prefix nature of matchers
449 # is applied to the entire matcher, we have to downgrade this to
449 # is applied to the entire matcher, we have to downgrade this to
450 # 'this' due to the non-prefix 'rootfilesin'-kind matcher being mixed
450 # 'this' due to the non-prefix 'rootfilesin'-kind matcher being mixed
451 # in.
451 # in.
452 'foo/bar' -> 'this'
452 'foo/bar' -> 'this'
453 'qux' -> 'this'
453 'qux' -> 'this'
454
454
455 Important:
455 Important:
456 Most matchers do not know if they're representing files or
456 Most matchers do not know if they're representing files or
457 directories. They see ['path:dir/f'] and don't know whether 'f' is a
457 directories. They see ['path:dir/f'] and don't know whether 'f' is a
458 file or a directory, so visitchildrenset('dir') for most matchers will
458 file or a directory, so visitchildrenset('dir') for most matchers will
459 return {'f'}, but if the matcher knows it's a file (like exactmatcher
459 return {'f'}, but if the matcher knows it's a file (like exactmatcher
460 does), it may return 'this'. Do not rely on the return being a set
460 does), it may return 'this'. Do not rely on the return being a set
461 indicating that there are no files in this dir to investigate (or
461 indicating that there are no files in this dir to investigate (or
462 equivalently that if there are files to investigate in 'dir' that it
462 equivalently that if there are files to investigate in 'dir' that it
463 will always return 'this').
463 will always return 'this').
464 '''
464 '''
465 return b'this'
465 return b'this'
466
466
467 def always(self):
467 def always(self):
468 '''Matcher will match everything and .files() will be empty --
468 '''Matcher will match everything and .files() will be empty --
469 optimization might be possible.'''
469 optimization might be possible.'''
470 return False
470 return False
471
471
472 def isexact(self):
472 def isexact(self):
473 '''Matcher will match exactly the list of files in .files() --
473 '''Matcher will match exactly the list of files in .files() --
474 optimization might be possible.'''
474 optimization might be possible.'''
475 return False
475 return False
476
476
477 def prefix(self):
477 def prefix(self):
478 '''Matcher will match the paths in .files() recursively --
478 '''Matcher will match the paths in .files() recursively --
479 optimization might be possible.'''
479 optimization might be possible.'''
480 return False
480 return False
481
481
482 def anypats(self):
482 def anypats(self):
483 '''None of .always(), .isexact(), and .prefix() is true --
483 '''None of .always(), .isexact(), and .prefix() is true --
484 optimizations will be difficult.'''
484 optimizations will be difficult.'''
485 return not self.always() and not self.isexact() and not self.prefix()
485 return not self.always() and not self.isexact() and not self.prefix()
486
486
487
487
488 class alwaysmatcher(basematcher):
488 class alwaysmatcher(basematcher):
489 '''Matches everything.'''
489 '''Matches everything.'''
490
490
491 def __init__(self, badfn=None):
491 def __init__(self, badfn=None):
492 super(alwaysmatcher, self).__init__(badfn)
492 super(alwaysmatcher, self).__init__(badfn)
493
493
494 def always(self):
494 def always(self):
495 return True
495 return True
496
496
497 def matchfn(self, f):
497 def matchfn(self, f):
498 return True
498 return True
499
499
500 def visitdir(self, dir):
500 def visitdir(self, dir):
501 return b'all'
501 return b'all'
502
502
503 def visitchildrenset(self, dir):
503 def visitchildrenset(self, dir):
504 return b'all'
504 return b'all'
505
505
506 def __repr__(self):
506 def __repr__(self):
507 return r'<alwaysmatcher>'
507 return r'<alwaysmatcher>'
508
508
509
509
510 class nevermatcher(basematcher):
510 class nevermatcher(basematcher):
511 '''Matches nothing.'''
511 '''Matches nothing.'''
512
512
513 def __init__(self, badfn=None):
513 def __init__(self, badfn=None):
514 super(nevermatcher, self).__init__(badfn)
514 super(nevermatcher, self).__init__(badfn)
515
515
516 # It's a little weird to say that the nevermatcher is an exact matcher
516 # It's a little weird to say that the nevermatcher is an exact matcher
517 # or a prefix matcher, but it seems to make sense to let callers take
517 # or a prefix matcher, but it seems to make sense to let callers take
518 # fast paths based on either. There will be no exact matches, nor any
518 # fast paths based on either. There will be no exact matches, nor any
519 # prefixes (files() returns []), so fast paths iterating over them should
519 # prefixes (files() returns []), so fast paths iterating over them should
520 # be efficient (and correct).
520 # be efficient (and correct).
521 def isexact(self):
521 def isexact(self):
522 return True
522 return True
523
523
524 def prefix(self):
524 def prefix(self):
525 return True
525 return True
526
526
527 def visitdir(self, dir):
527 def visitdir(self, dir):
528 return False
528 return False
529
529
530 def visitchildrenset(self, dir):
530 def visitchildrenset(self, dir):
531 return set()
531 return set()
532
532
533 def __repr__(self):
533 def __repr__(self):
534 return r'<nevermatcher>'
534 return r'<nevermatcher>'
535
535
536
536
537 class predicatematcher(basematcher):
537 class predicatematcher(basematcher):
538 """A matcher adapter for a simple boolean function"""
538 """A matcher adapter for a simple boolean function"""
539
539
540 def __init__(self, predfn, predrepr=None, badfn=None):
540 def __init__(self, predfn, predrepr=None, badfn=None):
541 super(predicatematcher, self).__init__(badfn)
541 super(predicatematcher, self).__init__(badfn)
542 self.matchfn = predfn
542 self.matchfn = predfn
543 self._predrepr = predrepr
543 self._predrepr = predrepr
544
544
545 @encoding.strmethod
545 @encoding.strmethod
546 def __repr__(self):
546 def __repr__(self):
547 s = stringutil.buildrepr(self._predrepr) or pycompat.byterepr(
547 s = stringutil.buildrepr(self._predrepr) or pycompat.byterepr(
548 self.matchfn
548 self.matchfn
549 )
549 )
550 return b'<predicatenmatcher pred=%s>' % s
550 return b'<predicatenmatcher pred=%s>' % s
551
551
552
552
553 class patternmatcher(basematcher):
553 class patternmatcher(basematcher):
554 r"""Matches a set of (kind, pat, source) against a 'root' directory.
554 r"""Matches a set of (kind, pat, source) against a 'root' directory.
555
555
556 >>> kindpats = [
556 >>> kindpats = [
557 ... (b're', br'.*\.c$', b''),
557 ... (b're', br'.*\.c$', b''),
558 ... (b'path', b'foo/a', b''),
558 ... (b'path', b'foo/a', b''),
559 ... (b'relpath', b'b', b''),
559 ... (b'relpath', b'b', b''),
560 ... (b'glob', b'*.h', b''),
560 ... (b'glob', b'*.h', b''),
561 ... ]
561 ... ]
562 >>> m = patternmatcher(b'foo', kindpats)
562 >>> m = patternmatcher(b'foo', kindpats)
563 >>> m(b'main.c') # matches re:.*\.c$
563 >>> m(b'main.c') # matches re:.*\.c$
564 True
564 True
565 >>> m(b'b.txt')
565 >>> m(b'b.txt')
566 False
566 False
567 >>> m(b'foo/a') # matches path:foo/a
567 >>> m(b'foo/a') # matches path:foo/a
568 True
568 True
569 >>> m(b'a') # does not match path:b, since 'root' is 'foo'
569 >>> m(b'a') # does not match path:b, since 'root' is 'foo'
570 False
570 False
571 >>> m(b'b') # matches relpath:b, since 'root' is 'foo'
571 >>> m(b'b') # matches relpath:b, since 'root' is 'foo'
572 True
572 True
573 >>> m(b'lib.h') # matches glob:*.h
573 >>> m(b'lib.h') # matches glob:*.h
574 True
574 True
575
575
576 >>> m.files()
576 >>> m.files()
577 ['', 'foo/a', 'b', '']
577 ['', 'foo/a', 'b', '']
578 >>> m.exact(b'foo/a')
578 >>> m.exact(b'foo/a')
579 True
579 True
580 >>> m.exact(b'b')
580 >>> m.exact(b'b')
581 True
581 True
582 >>> m.exact(b'lib.h') # exact matches are for (rel)path kinds
582 >>> m.exact(b'lib.h') # exact matches are for (rel)path kinds
583 False
583 False
584 """
584 """
585
585
586 def __init__(self, root, kindpats, badfn=None):
586 def __init__(self, root, kindpats, badfn=None):
587 super(patternmatcher, self).__init__(badfn)
587 super(patternmatcher, self).__init__(badfn)
588
588
589 self._files = _explicitfiles(kindpats)
589 self._files = _explicitfiles(kindpats)
590 self._prefix = _prefix(kindpats)
590 self._prefix = _prefix(kindpats)
591 self._pats, self.matchfn = _buildmatch(kindpats, b'$', root)
591 self._pats, self.matchfn = _buildmatch(kindpats, b'$', root)
592
592
593 @propertycache
593 @propertycache
594 def _dirs(self):
594 def _dirs(self):
595 return set(pathutil.dirs(self._fileset))
595 return set(pathutil.dirs(self._fileset))
596
596
597 def visitdir(self, dir):
597 def visitdir(self, dir):
598 if self._prefix and dir in self._fileset:
598 if self._prefix and dir in self._fileset:
599 return b'all'
599 return b'all'
600 return (
600 return (
601 dir in self._fileset
601 dir in self._fileset
602 or dir in self._dirs
602 or dir in self._dirs
603 or any(
603 or any(
604 parentdir in self._fileset
604 parentdir in self._fileset
605 for parentdir in pathutil.finddirs(dir)
605 for parentdir in pathutil.finddirs(dir)
606 )
606 )
607 )
607 )
608
608
609 def visitchildrenset(self, dir):
609 def visitchildrenset(self, dir):
610 ret = self.visitdir(dir)
610 ret = self.visitdir(dir)
611 if ret is True:
611 if ret is True:
612 return b'this'
612 return b'this'
613 elif not ret:
613 elif not ret:
614 return set()
614 return set()
615 assert ret == b'all'
615 assert ret == b'all'
616 return b'all'
616 return b'all'
617
617
618 def prefix(self):
618 def prefix(self):
619 return self._prefix
619 return self._prefix
620
620
621 @encoding.strmethod
621 @encoding.strmethod
622 def __repr__(self):
622 def __repr__(self):
623 return b'<patternmatcher patterns=%r>' % pycompat.bytestr(self._pats)
623 return b'<patternmatcher patterns=%r>' % pycompat.bytestr(self._pats)
624
624
625
625
626 # This is basically a reimplementation of pathutil.dirs that stores the
626 # This is basically a reimplementation of pathutil.dirs that stores the
627 # children instead of just a count of them, plus a small optional optimization
627 # children instead of just a count of them, plus a small optional optimization
628 # to avoid some directories we don't need.
628 # to avoid some directories we don't need.
629 class _dirchildren(object):
629 class _dirchildren(object):
630 def __init__(self, paths, onlyinclude=None):
630 def __init__(self, paths, onlyinclude=None):
631 self._dirs = {}
631 self._dirs = {}
632 self._onlyinclude = onlyinclude or []
632 self._onlyinclude = onlyinclude or []
633 addpath = self.addpath
633 addpath = self.addpath
634 for f in paths:
634 for f in paths:
635 addpath(f)
635 addpath(f)
636
636
637 def addpath(self, path):
637 def addpath(self, path):
638 if path == b'':
638 if path == b'':
639 return
639 return
640 dirs = self._dirs
640 dirs = self._dirs
641 findsplitdirs = _dirchildren._findsplitdirs
641 findsplitdirs = _dirchildren._findsplitdirs
642 for d, b in findsplitdirs(path):
642 for d, b in findsplitdirs(path):
643 if d not in self._onlyinclude:
643 if d not in self._onlyinclude:
644 continue
644 continue
645 dirs.setdefault(d, set()).add(b)
645 dirs.setdefault(d, set()).add(b)
646
646
647 @staticmethod
647 @staticmethod
648 def _findsplitdirs(path):
648 def _findsplitdirs(path):
649 # yields (dirname, basename) tuples, walking back to the root. This is
649 # yields (dirname, basename) tuples, walking back to the root. This is
650 # very similar to pathutil.finddirs, except:
650 # very similar to pathutil.finddirs, except:
651 # - produces a (dirname, basename) tuple, not just 'dirname'
651 # - produces a (dirname, basename) tuple, not just 'dirname'
652 # Unlike manifest._splittopdir, this does not suffix `dirname` with a
652 # Unlike manifest._splittopdir, this does not suffix `dirname` with a
653 # slash.
653 # slash.
654 oldpos = len(path)
654 oldpos = len(path)
655 pos = path.rfind(b'/')
655 pos = path.rfind(b'/')
656 while pos != -1:
656 while pos != -1:
657 yield path[:pos], path[pos + 1 : oldpos]
657 yield path[:pos], path[pos + 1 : oldpos]
658 oldpos = pos
658 oldpos = pos
659 pos = path.rfind(b'/', 0, pos)
659 pos = path.rfind(b'/', 0, pos)
660 yield b'', path[:oldpos]
660 yield b'', path[:oldpos]
661
661
662 def get(self, path):
662 def get(self, path):
663 return self._dirs.get(path, set())
663 return self._dirs.get(path, set())
664
664
665
665
666 class includematcher(basematcher):
666 class includematcher(basematcher):
667 def __init__(self, root, kindpats, badfn=None):
667 def __init__(self, root, kindpats, badfn=None):
668 super(includematcher, self).__init__(badfn)
668 super(includematcher, self).__init__(badfn)
669
669 if rustmod is not None:
670 # We need to pass the patterns to Rust because they can contain
671 # patterns from the user interface
672 self._kindpats = kindpats
670 self._pats, self.matchfn = _buildmatch(kindpats, b'(?:/|$)', root)
673 self._pats, self.matchfn = _buildmatch(kindpats, b'(?:/|$)', root)
671 self._prefix = _prefix(kindpats)
674 self._prefix = _prefix(kindpats)
672 roots, dirs, parents = _rootsdirsandparents(kindpats)
675 roots, dirs, parents = _rootsdirsandparents(kindpats)
673 # roots are directories which are recursively included.
676 # roots are directories which are recursively included.
674 self._roots = set(roots)
677 self._roots = set(roots)
675 # dirs are directories which are non-recursively included.
678 # dirs are directories which are non-recursively included.
676 self._dirs = set(dirs)
679 self._dirs = set(dirs)
677 # parents are directories which are non-recursively included because
680 # parents are directories which are non-recursively included because
678 # they are needed to get to items in _dirs or _roots.
681 # they are needed to get to items in _dirs or _roots.
679 self._parents = parents
682 self._parents = parents
680
683
681 def visitdir(self, dir):
684 def visitdir(self, dir):
682 if self._prefix and dir in self._roots:
685 if self._prefix and dir in self._roots:
683 return b'all'
686 return b'all'
684 return (
687 return (
685 dir in self._roots
688 dir in self._roots
686 or dir in self._dirs
689 or dir in self._dirs
687 or dir in self._parents
690 or dir in self._parents
688 or any(
691 or any(
689 parentdir in self._roots for parentdir in pathutil.finddirs(dir)
692 parentdir in self._roots for parentdir in pathutil.finddirs(dir)
690 )
693 )
691 )
694 )
692
695
693 @propertycache
696 @propertycache
694 def _allparentschildren(self):
697 def _allparentschildren(self):
695 # It may seem odd that we add dirs, roots, and parents, and then
698 # It may seem odd that we add dirs, roots, and parents, and then
696 # restrict to only parents. This is to catch the case of:
699 # restrict to only parents. This is to catch the case of:
697 # dirs = ['foo/bar']
700 # dirs = ['foo/bar']
698 # parents = ['foo']
701 # parents = ['foo']
699 # if we asked for the children of 'foo', but had only added
702 # if we asked for the children of 'foo', but had only added
700 # self._parents, we wouldn't be able to respond ['bar'].
703 # self._parents, we wouldn't be able to respond ['bar'].
701 return _dirchildren(
704 return _dirchildren(
702 itertools.chain(self._dirs, self._roots, self._parents),
705 itertools.chain(self._dirs, self._roots, self._parents),
703 onlyinclude=self._parents,
706 onlyinclude=self._parents,
704 )
707 )
705
708
706 def visitchildrenset(self, dir):
709 def visitchildrenset(self, dir):
707 if self._prefix and dir in self._roots:
710 if self._prefix and dir in self._roots:
708 return b'all'
711 return b'all'
709 # Note: this does *not* include the 'dir in self._parents' case from
712 # Note: this does *not* include the 'dir in self._parents' case from
710 # visitdir, that's handled below.
713 # visitdir, that's handled below.
711 if (
714 if (
712 b'' in self._roots
715 b'' in self._roots
713 or dir in self._roots
716 or dir in self._roots
714 or dir in self._dirs
717 or dir in self._dirs
715 or any(
718 or any(
716 parentdir in self._roots for parentdir in pathutil.finddirs(dir)
719 parentdir in self._roots for parentdir in pathutil.finddirs(dir)
717 )
720 )
718 ):
721 ):
719 return b'this'
722 return b'this'
720
723
721 if dir in self._parents:
724 if dir in self._parents:
722 return self._allparentschildren.get(dir) or set()
725 return self._allparentschildren.get(dir) or set()
723 return set()
726 return set()
724
727
725 @encoding.strmethod
728 @encoding.strmethod
726 def __repr__(self):
729 def __repr__(self):
727 return b'<includematcher includes=%r>' % pycompat.bytestr(self._pats)
730 return b'<includematcher includes=%r>' % pycompat.bytestr(self._pats)
728
731
729
732
730 class exactmatcher(basematcher):
733 class exactmatcher(basematcher):
731 r'''Matches the input files exactly. They are interpreted as paths, not
734 r'''Matches the input files exactly. They are interpreted as paths, not
732 patterns (so no kind-prefixes).
735 patterns (so no kind-prefixes).
733
736
734 >>> m = exactmatcher([b'a.txt', br're:.*\.c$'])
737 >>> m = exactmatcher([b'a.txt', br're:.*\.c$'])
735 >>> m(b'a.txt')
738 >>> m(b'a.txt')
736 True
739 True
737 >>> m(b'b.txt')
740 >>> m(b'b.txt')
738 False
741 False
739
742
740 Input files that would be matched are exactly those returned by .files()
743 Input files that would be matched are exactly those returned by .files()
741 >>> m.files()
744 >>> m.files()
742 ['a.txt', 're:.*\\.c$']
745 ['a.txt', 're:.*\\.c$']
743
746
744 So pattern 're:.*\.c$' is not considered as a regex, but as a file name
747 So pattern 're:.*\.c$' is not considered as a regex, but as a file name
745 >>> m(b'main.c')
748 >>> m(b'main.c')
746 False
749 False
747 >>> m(br're:.*\.c$')
750 >>> m(br're:.*\.c$')
748 True
751 True
749 '''
752 '''
750
753
751 def __init__(self, files, badfn=None):
754 def __init__(self, files, badfn=None):
752 super(exactmatcher, self).__init__(badfn)
755 super(exactmatcher, self).__init__(badfn)
753
756
754 if isinstance(files, list):
757 if isinstance(files, list):
755 self._files = files
758 self._files = files
756 else:
759 else:
757 self._files = list(files)
760 self._files = list(files)
758
761
759 matchfn = basematcher.exact
762 matchfn = basematcher.exact
760
763
761 @propertycache
764 @propertycache
762 def _dirs(self):
765 def _dirs(self):
763 return set(pathutil.dirs(self._fileset))
766 return set(pathutil.dirs(self._fileset))
764
767
765 def visitdir(self, dir):
768 def visitdir(self, dir):
766 return dir in self._dirs
769 return dir in self._dirs
767
770
768 def visitchildrenset(self, dir):
771 def visitchildrenset(self, dir):
769 if not self._fileset or dir not in self._dirs:
772 if not self._fileset or dir not in self._dirs:
770 return set()
773 return set()
771
774
772 candidates = self._fileset | self._dirs - {b''}
775 candidates = self._fileset | self._dirs - {b''}
773 if dir != b'':
776 if dir != b'':
774 d = dir + b'/'
777 d = dir + b'/'
775 candidates = {c[len(d) :] for c in candidates if c.startswith(d)}
778 candidates = {c[len(d) :] for c in candidates if c.startswith(d)}
776 # self._dirs includes all of the directories, recursively, so if
779 # self._dirs includes all of the directories, recursively, so if
777 # we're attempting to match foo/bar/baz.txt, it'll have '', 'foo',
780 # we're attempting to match foo/bar/baz.txt, it'll have '', 'foo',
778 # 'foo/bar' in it. Thus we can safely ignore a candidate that has a
781 # 'foo/bar' in it. Thus we can safely ignore a candidate that has a
779 # '/' in it, indicating a it's for a subdir-of-a-subdir; the
782 # '/' in it, indicating a it's for a subdir-of-a-subdir; the
780 # immediate subdir will be in there without a slash.
783 # immediate subdir will be in there without a slash.
781 ret = {c for c in candidates if b'/' not in c}
784 ret = {c for c in candidates if b'/' not in c}
782 # We really do not expect ret to be empty, since that would imply that
785 # We really do not expect ret to be empty, since that would imply that
783 # there's something in _dirs that didn't have a file in _fileset.
786 # there's something in _dirs that didn't have a file in _fileset.
784 assert ret
787 assert ret
785 return ret
788 return ret
786
789
787 def isexact(self):
790 def isexact(self):
788 return True
791 return True
789
792
790 @encoding.strmethod
793 @encoding.strmethod
791 def __repr__(self):
794 def __repr__(self):
792 return b'<exactmatcher files=%r>' % self._files
795 return b'<exactmatcher files=%r>' % self._files
793
796
794
797
795 class differencematcher(basematcher):
798 class differencematcher(basematcher):
796 '''Composes two matchers by matching if the first matches and the second
799 '''Composes two matchers by matching if the first matches and the second
797 does not.
800 does not.
798
801
799 The second matcher's non-matching-attributes (bad, traversedir) are ignored.
802 The second matcher's non-matching-attributes (bad, traversedir) are ignored.
800 '''
803 '''
801
804
802 def __init__(self, m1, m2):
805 def __init__(self, m1, m2):
803 super(differencematcher, self).__init__()
806 super(differencematcher, self).__init__()
804 self._m1 = m1
807 self._m1 = m1
805 self._m2 = m2
808 self._m2 = m2
806 self.bad = m1.bad
809 self.bad = m1.bad
807 self.traversedir = m1.traversedir
810 self.traversedir = m1.traversedir
808
811
809 def matchfn(self, f):
812 def matchfn(self, f):
810 return self._m1(f) and not self._m2(f)
813 return self._m1(f) and not self._m2(f)
811
814
812 @propertycache
815 @propertycache
813 def _files(self):
816 def _files(self):
814 if self.isexact():
817 if self.isexact():
815 return [f for f in self._m1.files() if self(f)]
818 return [f for f in self._m1.files() if self(f)]
816 # If m1 is not an exact matcher, we can't easily figure out the set of
819 # If m1 is not an exact matcher, we can't easily figure out the set of
817 # files, because its files() are not always files. For example, if
820 # files, because its files() are not always files. For example, if
818 # m1 is "path:dir" and m2 is "rootfileins:.", we don't
821 # m1 is "path:dir" and m2 is "rootfileins:.", we don't
819 # want to remove "dir" from the set even though it would match m2,
822 # want to remove "dir" from the set even though it would match m2,
820 # because the "dir" in m1 may not be a file.
823 # because the "dir" in m1 may not be a file.
821 return self._m1.files()
824 return self._m1.files()
822
825
823 def visitdir(self, dir):
826 def visitdir(self, dir):
824 if self._m2.visitdir(dir) == b'all':
827 if self._m2.visitdir(dir) == b'all':
825 return False
828 return False
826 elif not self._m2.visitdir(dir):
829 elif not self._m2.visitdir(dir):
827 # m2 does not match dir, we can return 'all' here if possible
830 # m2 does not match dir, we can return 'all' here if possible
828 return self._m1.visitdir(dir)
831 return self._m1.visitdir(dir)
829 return bool(self._m1.visitdir(dir))
832 return bool(self._m1.visitdir(dir))
830
833
831 def visitchildrenset(self, dir):
834 def visitchildrenset(self, dir):
832 m2_set = self._m2.visitchildrenset(dir)
835 m2_set = self._m2.visitchildrenset(dir)
833 if m2_set == b'all':
836 if m2_set == b'all':
834 return set()
837 return set()
835 m1_set = self._m1.visitchildrenset(dir)
838 m1_set = self._m1.visitchildrenset(dir)
836 # Possible values for m1: 'all', 'this', set(...), set()
839 # Possible values for m1: 'all', 'this', set(...), set()
837 # Possible values for m2: 'this', set(...), set()
840 # Possible values for m2: 'this', set(...), set()
838 # If m2 has nothing under here that we care about, return m1, even if
841 # If m2 has nothing under here that we care about, return m1, even if
839 # it's 'all'. This is a change in behavior from visitdir, which would
842 # it's 'all'. This is a change in behavior from visitdir, which would
840 # return True, not 'all', for some reason.
843 # return True, not 'all', for some reason.
841 if not m2_set:
844 if not m2_set:
842 return m1_set
845 return m1_set
843 if m1_set in [b'all', b'this']:
846 if m1_set in [b'all', b'this']:
844 # Never return 'all' here if m2_set is any kind of non-empty (either
847 # Never return 'all' here if m2_set is any kind of non-empty (either
845 # 'this' or set(foo)), since m2 might return set() for a
848 # 'this' or set(foo)), since m2 might return set() for a
846 # subdirectory.
849 # subdirectory.
847 return b'this'
850 return b'this'
848 # Possible values for m1: set(...), set()
851 # Possible values for m1: set(...), set()
849 # Possible values for m2: 'this', set(...)
852 # Possible values for m2: 'this', set(...)
850 # We ignore m2's set results. They're possibly incorrect:
853 # We ignore m2's set results. They're possibly incorrect:
851 # m1 = path:dir/subdir, m2=rootfilesin:dir, visitchildrenset(''):
854 # m1 = path:dir/subdir, m2=rootfilesin:dir, visitchildrenset(''):
852 # m1 returns {'dir'}, m2 returns {'dir'}, if we subtracted we'd
855 # m1 returns {'dir'}, m2 returns {'dir'}, if we subtracted we'd
853 # return set(), which is *not* correct, we still need to visit 'dir'!
856 # return set(), which is *not* correct, we still need to visit 'dir'!
854 return m1_set
857 return m1_set
855
858
856 def isexact(self):
859 def isexact(self):
857 return self._m1.isexact()
860 return self._m1.isexact()
858
861
859 @encoding.strmethod
862 @encoding.strmethod
860 def __repr__(self):
863 def __repr__(self):
861 return b'<differencematcher m1=%r, m2=%r>' % (self._m1, self._m2)
864 return b'<differencematcher m1=%r, m2=%r>' % (self._m1, self._m2)
862
865
863
866
864 def intersectmatchers(m1, m2):
867 def intersectmatchers(m1, m2):
865 '''Composes two matchers by matching if both of them match.
868 '''Composes two matchers by matching if both of them match.
866
869
867 The second matcher's non-matching-attributes (bad, traversedir) are ignored.
870 The second matcher's non-matching-attributes (bad, traversedir) are ignored.
868 '''
871 '''
869 if m1 is None or m2 is None:
872 if m1 is None or m2 is None:
870 return m1 or m2
873 return m1 or m2
871 if m1.always():
874 if m1.always():
872 m = copy.copy(m2)
875 m = copy.copy(m2)
873 # TODO: Consider encapsulating these things in a class so there's only
876 # TODO: Consider encapsulating these things in a class so there's only
874 # one thing to copy from m1.
877 # one thing to copy from m1.
875 m.bad = m1.bad
878 m.bad = m1.bad
876 m.traversedir = m1.traversedir
879 m.traversedir = m1.traversedir
877 return m
880 return m
878 if m2.always():
881 if m2.always():
879 m = copy.copy(m1)
882 m = copy.copy(m1)
880 return m
883 return m
881 return intersectionmatcher(m1, m2)
884 return intersectionmatcher(m1, m2)
882
885
883
886
884 class intersectionmatcher(basematcher):
887 class intersectionmatcher(basematcher):
885 def __init__(self, m1, m2):
888 def __init__(self, m1, m2):
886 super(intersectionmatcher, self).__init__()
889 super(intersectionmatcher, self).__init__()
887 self._m1 = m1
890 self._m1 = m1
888 self._m2 = m2
891 self._m2 = m2
889 self.bad = m1.bad
892 self.bad = m1.bad
890 self.traversedir = m1.traversedir
893 self.traversedir = m1.traversedir
891
894
892 @propertycache
895 @propertycache
893 def _files(self):
896 def _files(self):
894 if self.isexact():
897 if self.isexact():
895 m1, m2 = self._m1, self._m2
898 m1, m2 = self._m1, self._m2
896 if not m1.isexact():
899 if not m1.isexact():
897 m1, m2 = m2, m1
900 m1, m2 = m2, m1
898 return [f for f in m1.files() if m2(f)]
901 return [f for f in m1.files() if m2(f)]
899 # It neither m1 nor m2 is an exact matcher, we can't easily intersect
902 # It neither m1 nor m2 is an exact matcher, we can't easily intersect
900 # the set of files, because their files() are not always files. For
903 # the set of files, because their files() are not always files. For
901 # example, if intersecting a matcher "-I glob:foo.txt" with matcher of
904 # example, if intersecting a matcher "-I glob:foo.txt" with matcher of
902 # "path:dir2", we don't want to remove "dir2" from the set.
905 # "path:dir2", we don't want to remove "dir2" from the set.
903 return self._m1.files() + self._m2.files()
906 return self._m1.files() + self._m2.files()
904
907
905 def matchfn(self, f):
908 def matchfn(self, f):
906 return self._m1(f) and self._m2(f)
909 return self._m1(f) and self._m2(f)
907
910
908 def visitdir(self, dir):
911 def visitdir(self, dir):
909 visit1 = self._m1.visitdir(dir)
912 visit1 = self._m1.visitdir(dir)
910 if visit1 == b'all':
913 if visit1 == b'all':
911 return self._m2.visitdir(dir)
914 return self._m2.visitdir(dir)
912 # bool() because visit1=True + visit2='all' should not be 'all'
915 # bool() because visit1=True + visit2='all' should not be 'all'
913 return bool(visit1 and self._m2.visitdir(dir))
916 return bool(visit1 and self._m2.visitdir(dir))
914
917
915 def visitchildrenset(self, dir):
918 def visitchildrenset(self, dir):
916 m1_set = self._m1.visitchildrenset(dir)
919 m1_set = self._m1.visitchildrenset(dir)
917 if not m1_set:
920 if not m1_set:
918 return set()
921 return set()
919 m2_set = self._m2.visitchildrenset(dir)
922 m2_set = self._m2.visitchildrenset(dir)
920 if not m2_set:
923 if not m2_set:
921 return set()
924 return set()
922
925
923 if m1_set == b'all':
926 if m1_set == b'all':
924 return m2_set
927 return m2_set
925 elif m2_set == b'all':
928 elif m2_set == b'all':
926 return m1_set
929 return m1_set
927
930
928 if m1_set == b'this' or m2_set == b'this':
931 if m1_set == b'this' or m2_set == b'this':
929 return b'this'
932 return b'this'
930
933
931 assert isinstance(m1_set, set) and isinstance(m2_set, set)
934 assert isinstance(m1_set, set) and isinstance(m2_set, set)
932 return m1_set.intersection(m2_set)
935 return m1_set.intersection(m2_set)
933
936
934 def always(self):
937 def always(self):
935 return self._m1.always() and self._m2.always()
938 return self._m1.always() and self._m2.always()
936
939
937 def isexact(self):
940 def isexact(self):
938 return self._m1.isexact() or self._m2.isexact()
941 return self._m1.isexact() or self._m2.isexact()
939
942
940 @encoding.strmethod
943 @encoding.strmethod
941 def __repr__(self):
944 def __repr__(self):
942 return b'<intersectionmatcher m1=%r, m2=%r>' % (self._m1, self._m2)
945 return b'<intersectionmatcher m1=%r, m2=%r>' % (self._m1, self._m2)
943
946
944
947
945 class subdirmatcher(basematcher):
948 class subdirmatcher(basematcher):
946 """Adapt a matcher to work on a subdirectory only.
949 """Adapt a matcher to work on a subdirectory only.
947
950
948 The paths are remapped to remove/insert the path as needed:
951 The paths are remapped to remove/insert the path as needed:
949
952
950 >>> from . import pycompat
953 >>> from . import pycompat
951 >>> m1 = match(util.localpath(b'/root'), b'', [b'a.txt', b'sub/b.txt'], auditor=lambda name: None)
954 >>> m1 = match(util.localpath(b'/root'), b'', [b'a.txt', b'sub/b.txt'], auditor=lambda name: None)
952 >>> m2 = subdirmatcher(b'sub', m1)
955 >>> m2 = subdirmatcher(b'sub', m1)
953 >>> m2(b'a.txt')
956 >>> m2(b'a.txt')
954 False
957 False
955 >>> m2(b'b.txt')
958 >>> m2(b'b.txt')
956 True
959 True
957 >>> m2.matchfn(b'a.txt')
960 >>> m2.matchfn(b'a.txt')
958 False
961 False
959 >>> m2.matchfn(b'b.txt')
962 >>> m2.matchfn(b'b.txt')
960 True
963 True
961 >>> m2.files()
964 >>> m2.files()
962 ['b.txt']
965 ['b.txt']
963 >>> m2.exact(b'b.txt')
966 >>> m2.exact(b'b.txt')
964 True
967 True
965 >>> def bad(f, msg):
968 >>> def bad(f, msg):
966 ... print(pycompat.sysstr(b"%s: %s" % (f, msg)))
969 ... print(pycompat.sysstr(b"%s: %s" % (f, msg)))
967 >>> m1.bad = bad
970 >>> m1.bad = bad
968 >>> m2.bad(b'x.txt', b'No such file')
971 >>> m2.bad(b'x.txt', b'No such file')
969 sub/x.txt: No such file
972 sub/x.txt: No such file
970 """
973 """
971
974
972 def __init__(self, path, matcher):
975 def __init__(self, path, matcher):
973 super(subdirmatcher, self).__init__()
976 super(subdirmatcher, self).__init__()
974 self._path = path
977 self._path = path
975 self._matcher = matcher
978 self._matcher = matcher
976 self._always = matcher.always()
979 self._always = matcher.always()
977
980
978 self._files = [
981 self._files = [
979 f[len(path) + 1 :]
982 f[len(path) + 1 :]
980 for f in matcher._files
983 for f in matcher._files
981 if f.startswith(path + b"/")
984 if f.startswith(path + b"/")
982 ]
985 ]
983
986
984 # If the parent repo had a path to this subrepo and the matcher is
987 # If the parent repo had a path to this subrepo and the matcher is
985 # a prefix matcher, this submatcher always matches.
988 # a prefix matcher, this submatcher always matches.
986 if matcher.prefix():
989 if matcher.prefix():
987 self._always = any(f == path for f in matcher._files)
990 self._always = any(f == path for f in matcher._files)
988
991
989 def bad(self, f, msg):
992 def bad(self, f, msg):
990 self._matcher.bad(self._path + b"/" + f, msg)
993 self._matcher.bad(self._path + b"/" + f, msg)
991
994
992 def matchfn(self, f):
995 def matchfn(self, f):
993 # Some information is lost in the superclass's constructor, so we
996 # Some information is lost in the superclass's constructor, so we
994 # can not accurately create the matching function for the subdirectory
997 # can not accurately create the matching function for the subdirectory
995 # from the inputs. Instead, we override matchfn() and visitdir() to
998 # from the inputs. Instead, we override matchfn() and visitdir() to
996 # call the original matcher with the subdirectory path prepended.
999 # call the original matcher with the subdirectory path prepended.
997 return self._matcher.matchfn(self._path + b"/" + f)
1000 return self._matcher.matchfn(self._path + b"/" + f)
998
1001
999 def visitdir(self, dir):
1002 def visitdir(self, dir):
1000 if dir == b'':
1003 if dir == b'':
1001 dir = self._path
1004 dir = self._path
1002 else:
1005 else:
1003 dir = self._path + b"/" + dir
1006 dir = self._path + b"/" + dir
1004 return self._matcher.visitdir(dir)
1007 return self._matcher.visitdir(dir)
1005
1008
1006 def visitchildrenset(self, dir):
1009 def visitchildrenset(self, dir):
1007 if dir == b'':
1010 if dir == b'':
1008 dir = self._path
1011 dir = self._path
1009 else:
1012 else:
1010 dir = self._path + b"/" + dir
1013 dir = self._path + b"/" + dir
1011 return self._matcher.visitchildrenset(dir)
1014 return self._matcher.visitchildrenset(dir)
1012
1015
1013 def always(self):
1016 def always(self):
1014 return self._always
1017 return self._always
1015
1018
1016 def prefix(self):
1019 def prefix(self):
1017 return self._matcher.prefix() and not self._always
1020 return self._matcher.prefix() and not self._always
1018
1021
1019 @encoding.strmethod
1022 @encoding.strmethod
1020 def __repr__(self):
1023 def __repr__(self):
1021 return b'<subdirmatcher path=%r, matcher=%r>' % (
1024 return b'<subdirmatcher path=%r, matcher=%r>' % (
1022 self._path,
1025 self._path,
1023 self._matcher,
1026 self._matcher,
1024 )
1027 )
1025
1028
1026
1029
1027 class prefixdirmatcher(basematcher):
1030 class prefixdirmatcher(basematcher):
1028 """Adapt a matcher to work on a parent directory.
1031 """Adapt a matcher to work on a parent directory.
1029
1032
1030 The matcher's non-matching-attributes (bad, traversedir) are ignored.
1033 The matcher's non-matching-attributes (bad, traversedir) are ignored.
1031
1034
1032 The prefix path should usually be the relative path from the root of
1035 The prefix path should usually be the relative path from the root of
1033 this matcher to the root of the wrapped matcher.
1036 this matcher to the root of the wrapped matcher.
1034
1037
1035 >>> m1 = match(util.localpath(b'/root/d/e'), b'f', [b'../a.txt', b'b.txt'], auditor=lambda name: None)
1038 >>> m1 = match(util.localpath(b'/root/d/e'), b'f', [b'../a.txt', b'b.txt'], auditor=lambda name: None)
1036 >>> m2 = prefixdirmatcher(b'd/e', m1)
1039 >>> m2 = prefixdirmatcher(b'd/e', m1)
1037 >>> m2(b'a.txt')
1040 >>> m2(b'a.txt')
1038 False
1041 False
1039 >>> m2(b'd/e/a.txt')
1042 >>> m2(b'd/e/a.txt')
1040 True
1043 True
1041 >>> m2(b'd/e/b.txt')
1044 >>> m2(b'd/e/b.txt')
1042 False
1045 False
1043 >>> m2.files()
1046 >>> m2.files()
1044 ['d/e/a.txt', 'd/e/f/b.txt']
1047 ['d/e/a.txt', 'd/e/f/b.txt']
1045 >>> m2.exact(b'd/e/a.txt')
1048 >>> m2.exact(b'd/e/a.txt')
1046 True
1049 True
1047 >>> m2.visitdir(b'd')
1050 >>> m2.visitdir(b'd')
1048 True
1051 True
1049 >>> m2.visitdir(b'd/e')
1052 >>> m2.visitdir(b'd/e')
1050 True
1053 True
1051 >>> m2.visitdir(b'd/e/f')
1054 >>> m2.visitdir(b'd/e/f')
1052 True
1055 True
1053 >>> m2.visitdir(b'd/e/g')
1056 >>> m2.visitdir(b'd/e/g')
1054 False
1057 False
1055 >>> m2.visitdir(b'd/ef')
1058 >>> m2.visitdir(b'd/ef')
1056 False
1059 False
1057 """
1060 """
1058
1061
1059 def __init__(self, path, matcher, badfn=None):
1062 def __init__(self, path, matcher, badfn=None):
1060 super(prefixdirmatcher, self).__init__(badfn)
1063 super(prefixdirmatcher, self).__init__(badfn)
1061 if not path:
1064 if not path:
1062 raise error.ProgrammingError(b'prefix path must not be empty')
1065 raise error.ProgrammingError(b'prefix path must not be empty')
1063 self._path = path
1066 self._path = path
1064 self._pathprefix = path + b'/'
1067 self._pathprefix = path + b'/'
1065 self._matcher = matcher
1068 self._matcher = matcher
1066
1069
1067 @propertycache
1070 @propertycache
1068 def _files(self):
1071 def _files(self):
1069 return [self._pathprefix + f for f in self._matcher._files]
1072 return [self._pathprefix + f for f in self._matcher._files]
1070
1073
1071 def matchfn(self, f):
1074 def matchfn(self, f):
1072 if not f.startswith(self._pathprefix):
1075 if not f.startswith(self._pathprefix):
1073 return False
1076 return False
1074 return self._matcher.matchfn(f[len(self._pathprefix) :])
1077 return self._matcher.matchfn(f[len(self._pathprefix) :])
1075
1078
1076 @propertycache
1079 @propertycache
1077 def _pathdirs(self):
1080 def _pathdirs(self):
1078 return set(pathutil.finddirs(self._path))
1081 return set(pathutil.finddirs(self._path))
1079
1082
1080 def visitdir(self, dir):
1083 def visitdir(self, dir):
1081 if dir == self._path:
1084 if dir == self._path:
1082 return self._matcher.visitdir(b'')
1085 return self._matcher.visitdir(b'')
1083 if dir.startswith(self._pathprefix):
1086 if dir.startswith(self._pathprefix):
1084 return self._matcher.visitdir(dir[len(self._pathprefix) :])
1087 return self._matcher.visitdir(dir[len(self._pathprefix) :])
1085 return dir in self._pathdirs
1088 return dir in self._pathdirs
1086
1089
1087 def visitchildrenset(self, dir):
1090 def visitchildrenset(self, dir):
1088 if dir == self._path:
1091 if dir == self._path:
1089 return self._matcher.visitchildrenset(b'')
1092 return self._matcher.visitchildrenset(b'')
1090 if dir.startswith(self._pathprefix):
1093 if dir.startswith(self._pathprefix):
1091 return self._matcher.visitchildrenset(dir[len(self._pathprefix) :])
1094 return self._matcher.visitchildrenset(dir[len(self._pathprefix) :])
1092 if dir in self._pathdirs:
1095 if dir in self._pathdirs:
1093 return b'this'
1096 return b'this'
1094 return set()
1097 return set()
1095
1098
1096 def isexact(self):
1099 def isexact(self):
1097 return self._matcher.isexact()
1100 return self._matcher.isexact()
1098
1101
1099 def prefix(self):
1102 def prefix(self):
1100 return self._matcher.prefix()
1103 return self._matcher.prefix()
1101
1104
1102 @encoding.strmethod
1105 @encoding.strmethod
1103 def __repr__(self):
1106 def __repr__(self):
1104 return b'<prefixdirmatcher path=%r, matcher=%r>' % (
1107 return b'<prefixdirmatcher path=%r, matcher=%r>' % (
1105 pycompat.bytestr(self._path),
1108 pycompat.bytestr(self._path),
1106 self._matcher,
1109 self._matcher,
1107 )
1110 )
1108
1111
1109
1112
1110 class unionmatcher(basematcher):
1113 class unionmatcher(basematcher):
1111 """A matcher that is the union of several matchers.
1114 """A matcher that is the union of several matchers.
1112
1115
1113 The non-matching-attributes (bad, traversedir) are taken from the first
1116 The non-matching-attributes (bad, traversedir) are taken from the first
1114 matcher.
1117 matcher.
1115 """
1118 """
1116
1119
1117 def __init__(self, matchers):
1120 def __init__(self, matchers):
1118 m1 = matchers[0]
1121 m1 = matchers[0]
1119 super(unionmatcher, self).__init__()
1122 super(unionmatcher, self).__init__()
1120 self.traversedir = m1.traversedir
1123 self.traversedir = m1.traversedir
1121 self._matchers = matchers
1124 self._matchers = matchers
1122
1125
1123 def matchfn(self, f):
1126 def matchfn(self, f):
1124 for match in self._matchers:
1127 for match in self._matchers:
1125 if match(f):
1128 if match(f):
1126 return True
1129 return True
1127 return False
1130 return False
1128
1131
1129 def visitdir(self, dir):
1132 def visitdir(self, dir):
1130 r = False
1133 r = False
1131 for m in self._matchers:
1134 for m in self._matchers:
1132 v = m.visitdir(dir)
1135 v = m.visitdir(dir)
1133 if v == b'all':
1136 if v == b'all':
1134 return v
1137 return v
1135 r |= v
1138 r |= v
1136 return r
1139 return r
1137
1140
1138 def visitchildrenset(self, dir):
1141 def visitchildrenset(self, dir):
1139 r = set()
1142 r = set()
1140 this = False
1143 this = False
1141 for m in self._matchers:
1144 for m in self._matchers:
1142 v = m.visitchildrenset(dir)
1145 v = m.visitchildrenset(dir)
1143 if not v:
1146 if not v:
1144 continue
1147 continue
1145 if v == b'all':
1148 if v == b'all':
1146 return v
1149 return v
1147 if this or v == b'this':
1150 if this or v == b'this':
1148 this = True
1151 this = True
1149 # don't break, we might have an 'all' in here.
1152 # don't break, we might have an 'all' in here.
1150 continue
1153 continue
1151 assert isinstance(v, set)
1154 assert isinstance(v, set)
1152 r = r.union(v)
1155 r = r.union(v)
1153 if this:
1156 if this:
1154 return b'this'
1157 return b'this'
1155 return r
1158 return r
1156
1159
1157 @encoding.strmethod
1160 @encoding.strmethod
1158 def __repr__(self):
1161 def __repr__(self):
1159 return b'<unionmatcher matchers=%r>' % self._matchers
1162 return b'<unionmatcher matchers=%r>' % self._matchers
1160
1163
1161
1164
1162 def patkind(pattern, default=None):
1165 def patkind(pattern, default=None):
1163 r'''If pattern is 'kind:pat' with a known kind, return kind.
1166 r'''If pattern is 'kind:pat' with a known kind, return kind.
1164
1167
1165 >>> patkind(br're:.*\.c$')
1168 >>> patkind(br're:.*\.c$')
1166 're'
1169 're'
1167 >>> patkind(b'glob:*.c')
1170 >>> patkind(b'glob:*.c')
1168 'glob'
1171 'glob'
1169 >>> patkind(b'relpath:test.py')
1172 >>> patkind(b'relpath:test.py')
1170 'relpath'
1173 'relpath'
1171 >>> patkind(b'main.py')
1174 >>> patkind(b'main.py')
1172 >>> patkind(b'main.py', default=b're')
1175 >>> patkind(b'main.py', default=b're')
1173 're'
1176 're'
1174 '''
1177 '''
1175 return _patsplit(pattern, default)[0]
1178 return _patsplit(pattern, default)[0]
1176
1179
1177
1180
1178 def _patsplit(pattern, default):
1181 def _patsplit(pattern, default):
1179 """Split a string into the optional pattern kind prefix and the actual
1182 """Split a string into the optional pattern kind prefix and the actual
1180 pattern."""
1183 pattern."""
1181 if b':' in pattern:
1184 if b':' in pattern:
1182 kind, pat = pattern.split(b':', 1)
1185 kind, pat = pattern.split(b':', 1)
1183 if kind in allpatternkinds:
1186 if kind in allpatternkinds:
1184 return kind, pat
1187 return kind, pat
1185 return default, pattern
1188 return default, pattern
1186
1189
1187
1190
1188 def _globre(pat):
1191 def _globre(pat):
1189 r'''Convert an extended glob string to a regexp string.
1192 r'''Convert an extended glob string to a regexp string.
1190
1193
1191 >>> from . import pycompat
1194 >>> from . import pycompat
1192 >>> def bprint(s):
1195 >>> def bprint(s):
1193 ... print(pycompat.sysstr(s))
1196 ... print(pycompat.sysstr(s))
1194 >>> bprint(_globre(br'?'))
1197 >>> bprint(_globre(br'?'))
1195 .
1198 .
1196 >>> bprint(_globre(br'*'))
1199 >>> bprint(_globre(br'*'))
1197 [^/]*
1200 [^/]*
1198 >>> bprint(_globre(br'**'))
1201 >>> bprint(_globre(br'**'))
1199 .*
1202 .*
1200 >>> bprint(_globre(br'**/a'))
1203 >>> bprint(_globre(br'**/a'))
1201 (?:.*/)?a
1204 (?:.*/)?a
1202 >>> bprint(_globre(br'a/**/b'))
1205 >>> bprint(_globre(br'a/**/b'))
1203 a/(?:.*/)?b
1206 a/(?:.*/)?b
1204 >>> bprint(_globre(br'[a*?!^][^b][!c]'))
1207 >>> bprint(_globre(br'[a*?!^][^b][!c]'))
1205 [a*?!^][\^b][^c]
1208 [a*?!^][\^b][^c]
1206 >>> bprint(_globre(br'{a,b}'))
1209 >>> bprint(_globre(br'{a,b}'))
1207 (?:a|b)
1210 (?:a|b)
1208 >>> bprint(_globre(br'.\*\?'))
1211 >>> bprint(_globre(br'.\*\?'))
1209 \.\*\?
1212 \.\*\?
1210 '''
1213 '''
1211 i, n = 0, len(pat)
1214 i, n = 0, len(pat)
1212 res = b''
1215 res = b''
1213 group = 0
1216 group = 0
1214 escape = util.stringutil.regexbytesescapemap.get
1217 escape = util.stringutil.regexbytesescapemap.get
1215
1218
1216 def peek():
1219 def peek():
1217 return i < n and pat[i : i + 1]
1220 return i < n and pat[i : i + 1]
1218
1221
1219 while i < n:
1222 while i < n:
1220 c = pat[i : i + 1]
1223 c = pat[i : i + 1]
1221 i += 1
1224 i += 1
1222 if c not in b'*?[{},\\':
1225 if c not in b'*?[{},\\':
1223 res += escape(c, c)
1226 res += escape(c, c)
1224 elif c == b'*':
1227 elif c == b'*':
1225 if peek() == b'*':
1228 if peek() == b'*':
1226 i += 1
1229 i += 1
1227 if peek() == b'/':
1230 if peek() == b'/':
1228 i += 1
1231 i += 1
1229 res += b'(?:.*/)?'
1232 res += b'(?:.*/)?'
1230 else:
1233 else:
1231 res += b'.*'
1234 res += b'.*'
1232 else:
1235 else:
1233 res += b'[^/]*'
1236 res += b'[^/]*'
1234 elif c == b'?':
1237 elif c == b'?':
1235 res += b'.'
1238 res += b'.'
1236 elif c == b'[':
1239 elif c == b'[':
1237 j = i
1240 j = i
1238 if j < n and pat[j : j + 1] in b'!]':
1241 if j < n and pat[j : j + 1] in b'!]':
1239 j += 1
1242 j += 1
1240 while j < n and pat[j : j + 1] != b']':
1243 while j < n and pat[j : j + 1] != b']':
1241 j += 1
1244 j += 1
1242 if j >= n:
1245 if j >= n:
1243 res += b'\\['
1246 res += b'\\['
1244 else:
1247 else:
1245 stuff = pat[i:j].replace(b'\\', b'\\\\')
1248 stuff = pat[i:j].replace(b'\\', b'\\\\')
1246 i = j + 1
1249 i = j + 1
1247 if stuff[0:1] == b'!':
1250 if stuff[0:1] == b'!':
1248 stuff = b'^' + stuff[1:]
1251 stuff = b'^' + stuff[1:]
1249 elif stuff[0:1] == b'^':
1252 elif stuff[0:1] == b'^':
1250 stuff = b'\\' + stuff
1253 stuff = b'\\' + stuff
1251 res = b'%s[%s]' % (res, stuff)
1254 res = b'%s[%s]' % (res, stuff)
1252 elif c == b'{':
1255 elif c == b'{':
1253 group += 1
1256 group += 1
1254 res += b'(?:'
1257 res += b'(?:'
1255 elif c == b'}' and group:
1258 elif c == b'}' and group:
1256 res += b')'
1259 res += b')'
1257 group -= 1
1260 group -= 1
1258 elif c == b',' and group:
1261 elif c == b',' and group:
1259 res += b'|'
1262 res += b'|'
1260 elif c == b'\\':
1263 elif c == b'\\':
1261 p = peek()
1264 p = peek()
1262 if p:
1265 if p:
1263 i += 1
1266 i += 1
1264 res += escape(p, p)
1267 res += escape(p, p)
1265 else:
1268 else:
1266 res += escape(c, c)
1269 res += escape(c, c)
1267 else:
1270 else:
1268 res += escape(c, c)
1271 res += escape(c, c)
1269 return res
1272 return res
1270
1273
1271
1274
1272 def _regex(kind, pat, globsuffix):
1275 def _regex(kind, pat, globsuffix):
1273 '''Convert a (normalized) pattern of any kind into a
1276 '''Convert a (normalized) pattern of any kind into a
1274 regular expression.
1277 regular expression.
1275 globsuffix is appended to the regexp of globs.'''
1278 globsuffix is appended to the regexp of globs.'''
1276 if not pat and kind in (b'glob', b'relpath'):
1279 if not pat and kind in (b'glob', b'relpath'):
1277 return b''
1280 return b''
1278 if kind == b're':
1281 if kind == b're':
1279 return pat
1282 return pat
1280 if kind in (b'path', b'relpath'):
1283 if kind in (b'path', b'relpath'):
1281 if pat == b'.':
1284 if pat == b'.':
1282 return b''
1285 return b''
1283 return util.stringutil.reescape(pat) + b'(?:/|$)'
1286 return util.stringutil.reescape(pat) + b'(?:/|$)'
1284 if kind == b'rootfilesin':
1287 if kind == b'rootfilesin':
1285 if pat == b'.':
1288 if pat == b'.':
1286 escaped = b''
1289 escaped = b''
1287 else:
1290 else:
1288 # Pattern is a directory name.
1291 # Pattern is a directory name.
1289 escaped = util.stringutil.reescape(pat) + b'/'
1292 escaped = util.stringutil.reescape(pat) + b'/'
1290 # Anything after the pattern must be a non-directory.
1293 # Anything after the pattern must be a non-directory.
1291 return escaped + b'[^/]+$'
1294 return escaped + b'[^/]+$'
1292 if kind == b'relglob':
1295 if kind == b'relglob':
1293 globre = _globre(pat)
1296 globre = _globre(pat)
1294 if globre.startswith(b'[^/]*'):
1297 if globre.startswith(b'[^/]*'):
1295 # When pat has the form *XYZ (common), make the returned regex more
1298 # When pat has the form *XYZ (common), make the returned regex more
1296 # legible by returning the regex for **XYZ instead of **/*XYZ.
1299 # legible by returning the regex for **XYZ instead of **/*XYZ.
1297 return b'.*' + globre[len(b'[^/]*') :] + globsuffix
1300 return b'.*' + globre[len(b'[^/]*') :] + globsuffix
1298 return b'(?:|.*/)' + globre + globsuffix
1301 return b'(?:|.*/)' + globre + globsuffix
1299 if kind == b'relre':
1302 if kind == b'relre':
1300 if pat.startswith(b'^'):
1303 if pat.startswith(b'^'):
1301 return pat
1304 return pat
1302 return b'.*' + pat
1305 return b'.*' + pat
1303 if kind in (b'glob', b'rootglob'):
1306 if kind in (b'glob', b'rootglob'):
1304 return _globre(pat) + globsuffix
1307 return _globre(pat) + globsuffix
1305 raise error.ProgrammingError(b'not a regex pattern: %s:%s' % (kind, pat))
1308 raise error.ProgrammingError(b'not a regex pattern: %s:%s' % (kind, pat))
1306
1309
1307
1310
1308 def _buildmatch(kindpats, globsuffix, root):
1311 def _buildmatch(kindpats, globsuffix, root):
1309 '''Return regexp string and a matcher function for kindpats.
1312 '''Return regexp string and a matcher function for kindpats.
1310 globsuffix is appended to the regexp of globs.'''
1313 globsuffix is appended to the regexp of globs.'''
1311 matchfuncs = []
1314 matchfuncs = []
1312
1315
1313 subincludes, kindpats = _expandsubinclude(kindpats, root)
1316 subincludes, kindpats = _expandsubinclude(kindpats, root)
1314 if subincludes:
1317 if subincludes:
1315 submatchers = {}
1318 submatchers = {}
1316
1319
1317 def matchsubinclude(f):
1320 def matchsubinclude(f):
1318 for prefix, matcherargs in subincludes:
1321 for prefix, matcherargs in subincludes:
1319 if f.startswith(prefix):
1322 if f.startswith(prefix):
1320 mf = submatchers.get(prefix)
1323 mf = submatchers.get(prefix)
1321 if mf is None:
1324 if mf is None:
1322 mf = match(*matcherargs)
1325 mf = match(*matcherargs)
1323 submatchers[prefix] = mf
1326 submatchers[prefix] = mf
1324
1327
1325 if mf(f[len(prefix) :]):
1328 if mf(f[len(prefix) :]):
1326 return True
1329 return True
1327 return False
1330 return False
1328
1331
1329 matchfuncs.append(matchsubinclude)
1332 matchfuncs.append(matchsubinclude)
1330
1333
1331 regex = b''
1334 regex = b''
1332 if kindpats:
1335 if kindpats:
1333 if all(k == b'rootfilesin' for k, p, s in kindpats):
1336 if all(k == b'rootfilesin' for k, p, s in kindpats):
1334 dirs = {p for k, p, s in kindpats}
1337 dirs = {p for k, p, s in kindpats}
1335
1338
1336 def mf(f):
1339 def mf(f):
1337 i = f.rfind(b'/')
1340 i = f.rfind(b'/')
1338 if i >= 0:
1341 if i >= 0:
1339 dir = f[:i]
1342 dir = f[:i]
1340 else:
1343 else:
1341 dir = b'.'
1344 dir = b'.'
1342 return dir in dirs
1345 return dir in dirs
1343
1346
1344 regex = b'rootfilesin: %s' % stringutil.pprint(list(sorted(dirs)))
1347 regex = b'rootfilesin: %s' % stringutil.pprint(list(sorted(dirs)))
1345 matchfuncs.append(mf)
1348 matchfuncs.append(mf)
1346 else:
1349 else:
1347 regex, mf = _buildregexmatch(kindpats, globsuffix)
1350 regex, mf = _buildregexmatch(kindpats, globsuffix)
1348 matchfuncs.append(mf)
1351 matchfuncs.append(mf)
1349
1352
1350 if len(matchfuncs) == 1:
1353 if len(matchfuncs) == 1:
1351 return regex, matchfuncs[0]
1354 return regex, matchfuncs[0]
1352 else:
1355 else:
1353 return regex, lambda f: any(mf(f) for mf in matchfuncs)
1356 return regex, lambda f: any(mf(f) for mf in matchfuncs)
1354
1357
1355
1358
1356 MAX_RE_SIZE = 20000
1359 MAX_RE_SIZE = 20000
1357
1360
1358
1361
1359 def _joinregexes(regexps):
1362 def _joinregexes(regexps):
1360 """gather multiple regular expressions into a single one"""
1363 """gather multiple regular expressions into a single one"""
1361 return b'|'.join(regexps)
1364 return b'|'.join(regexps)
1362
1365
1363
1366
1364 def _buildregexmatch(kindpats, globsuffix):
1367 def _buildregexmatch(kindpats, globsuffix):
1365 """Build a match function from a list of kinds and kindpats,
1368 """Build a match function from a list of kinds and kindpats,
1366 return regexp string and a matcher function.
1369 return regexp string and a matcher function.
1367
1370
1368 Test too large input
1371 Test too large input
1369 >>> _buildregexmatch([
1372 >>> _buildregexmatch([
1370 ... (b'relglob', b'?' * MAX_RE_SIZE, b'')
1373 ... (b'relglob', b'?' * MAX_RE_SIZE, b'')
1371 ... ], b'$')
1374 ... ], b'$')
1372 Traceback (most recent call last):
1375 Traceback (most recent call last):
1373 ...
1376 ...
1374 Abort: matcher pattern is too long (20009 bytes)
1377 Abort: matcher pattern is too long (20009 bytes)
1375 """
1378 """
1376 try:
1379 try:
1377 allgroups = []
1380 allgroups = []
1378 regexps = [_regex(k, p, globsuffix) for (k, p, s) in kindpats]
1381 regexps = [_regex(k, p, globsuffix) for (k, p, s) in kindpats]
1379 fullregexp = _joinregexes(regexps)
1382 fullregexp = _joinregexes(regexps)
1380
1383
1381 startidx = 0
1384 startidx = 0
1382 groupsize = 0
1385 groupsize = 0
1383 for idx, r in enumerate(regexps):
1386 for idx, r in enumerate(regexps):
1384 piecesize = len(r)
1387 piecesize = len(r)
1385 if piecesize > MAX_RE_SIZE:
1388 if piecesize > MAX_RE_SIZE:
1386 msg = _(b"matcher pattern is too long (%d bytes)") % piecesize
1389 msg = _(b"matcher pattern is too long (%d bytes)") % piecesize
1387 raise error.Abort(msg)
1390 raise error.Abort(msg)
1388 elif (groupsize + piecesize) > MAX_RE_SIZE:
1391 elif (groupsize + piecesize) > MAX_RE_SIZE:
1389 group = regexps[startidx:idx]
1392 group = regexps[startidx:idx]
1390 allgroups.append(_joinregexes(group))
1393 allgroups.append(_joinregexes(group))
1391 startidx = idx
1394 startidx = idx
1392 groupsize = 0
1395 groupsize = 0
1393 groupsize += piecesize + 1
1396 groupsize += piecesize + 1
1394
1397
1395 if startidx == 0:
1398 if startidx == 0:
1396 matcher = _rematcher(fullregexp)
1399 matcher = _rematcher(fullregexp)
1397 func = lambda s: bool(matcher(s))
1400 func = lambda s: bool(matcher(s))
1398 else:
1401 else:
1399 group = regexps[startidx:]
1402 group = regexps[startidx:]
1400 allgroups.append(_joinregexes(group))
1403 allgroups.append(_joinregexes(group))
1401 allmatchers = [_rematcher(g) for g in allgroups]
1404 allmatchers = [_rematcher(g) for g in allgroups]
1402 func = lambda s: any(m(s) for m in allmatchers)
1405 func = lambda s: any(m(s) for m in allmatchers)
1403 return fullregexp, func
1406 return fullregexp, func
1404 except re.error:
1407 except re.error:
1405 for k, p, s in kindpats:
1408 for k, p, s in kindpats:
1406 try:
1409 try:
1407 _rematcher(_regex(k, p, globsuffix))
1410 _rematcher(_regex(k, p, globsuffix))
1408 except re.error:
1411 except re.error:
1409 if s:
1412 if s:
1410 raise error.Abort(
1413 raise error.Abort(
1411 _(b"%s: invalid pattern (%s): %s") % (s, k, p)
1414 _(b"%s: invalid pattern (%s): %s") % (s, k, p)
1412 )
1415 )
1413 else:
1416 else:
1414 raise error.Abort(_(b"invalid pattern (%s): %s") % (k, p))
1417 raise error.Abort(_(b"invalid pattern (%s): %s") % (k, p))
1415 raise error.Abort(_(b"invalid pattern"))
1418 raise error.Abort(_(b"invalid pattern"))
1416
1419
1417
1420
1418 def _patternrootsanddirs(kindpats):
1421 def _patternrootsanddirs(kindpats):
1419 '''Returns roots and directories corresponding to each pattern.
1422 '''Returns roots and directories corresponding to each pattern.
1420
1423
1421 This calculates the roots and directories exactly matching the patterns and
1424 This calculates the roots and directories exactly matching the patterns and
1422 returns a tuple of (roots, dirs) for each. It does not return other
1425 returns a tuple of (roots, dirs) for each. It does not return other
1423 directories which may also need to be considered, like the parent
1426 directories which may also need to be considered, like the parent
1424 directories.
1427 directories.
1425 '''
1428 '''
1426 r = []
1429 r = []
1427 d = []
1430 d = []
1428 for kind, pat, source in kindpats:
1431 for kind, pat, source in kindpats:
1429 if kind in (b'glob', b'rootglob'): # find the non-glob prefix
1432 if kind in (b'glob', b'rootglob'): # find the non-glob prefix
1430 root = []
1433 root = []
1431 for p in pat.split(b'/'):
1434 for p in pat.split(b'/'):
1432 if b'[' in p or b'{' in p or b'*' in p or b'?' in p:
1435 if b'[' in p or b'{' in p or b'*' in p or b'?' in p:
1433 break
1436 break
1434 root.append(p)
1437 root.append(p)
1435 r.append(b'/'.join(root))
1438 r.append(b'/'.join(root))
1436 elif kind in (b'relpath', b'path'):
1439 elif kind in (b'relpath', b'path'):
1437 if pat == b'.':
1440 if pat == b'.':
1438 pat = b''
1441 pat = b''
1439 r.append(pat)
1442 r.append(pat)
1440 elif kind in (b'rootfilesin',):
1443 elif kind in (b'rootfilesin',):
1441 if pat == b'.':
1444 if pat == b'.':
1442 pat = b''
1445 pat = b''
1443 d.append(pat)
1446 d.append(pat)
1444 else: # relglob, re, relre
1447 else: # relglob, re, relre
1445 r.append(b'')
1448 r.append(b'')
1446 return r, d
1449 return r, d
1447
1450
1448
1451
1449 def _roots(kindpats):
1452 def _roots(kindpats):
1450 '''Returns root directories to match recursively from the given patterns.'''
1453 '''Returns root directories to match recursively from the given patterns.'''
1451 roots, dirs = _patternrootsanddirs(kindpats)
1454 roots, dirs = _patternrootsanddirs(kindpats)
1452 return roots
1455 return roots
1453
1456
1454
1457
1455 def _rootsdirsandparents(kindpats):
1458 def _rootsdirsandparents(kindpats):
1456 '''Returns roots and exact directories from patterns.
1459 '''Returns roots and exact directories from patterns.
1457
1460
1458 `roots` are directories to match recursively, `dirs` should
1461 `roots` are directories to match recursively, `dirs` should
1459 be matched non-recursively, and `parents` are the implicitly required
1462 be matched non-recursively, and `parents` are the implicitly required
1460 directories to walk to items in either roots or dirs.
1463 directories to walk to items in either roots or dirs.
1461
1464
1462 Returns a tuple of (roots, dirs, parents).
1465 Returns a tuple of (roots, dirs, parents).
1463
1466
1464 >>> r = _rootsdirsandparents(
1467 >>> r = _rootsdirsandparents(
1465 ... [(b'glob', b'g/h/*', b''), (b'glob', b'g/h', b''),
1468 ... [(b'glob', b'g/h/*', b''), (b'glob', b'g/h', b''),
1466 ... (b'glob', b'g*', b'')])
1469 ... (b'glob', b'g*', b'')])
1467 >>> print(r[0:2], sorted(r[2])) # the set has an unstable output
1470 >>> print(r[0:2], sorted(r[2])) # the set has an unstable output
1468 (['g/h', 'g/h', ''], []) ['', 'g']
1471 (['g/h', 'g/h', ''], []) ['', 'g']
1469 >>> r = _rootsdirsandparents(
1472 >>> r = _rootsdirsandparents(
1470 ... [(b'rootfilesin', b'g/h', b''), (b'rootfilesin', b'', b'')])
1473 ... [(b'rootfilesin', b'g/h', b''), (b'rootfilesin', b'', b'')])
1471 >>> print(r[0:2], sorted(r[2])) # the set has an unstable output
1474 >>> print(r[0:2], sorted(r[2])) # the set has an unstable output
1472 ([], ['g/h', '']) ['', 'g']
1475 ([], ['g/h', '']) ['', 'g']
1473 >>> r = _rootsdirsandparents(
1476 >>> r = _rootsdirsandparents(
1474 ... [(b'relpath', b'r', b''), (b'path', b'p/p', b''),
1477 ... [(b'relpath', b'r', b''), (b'path', b'p/p', b''),
1475 ... (b'path', b'', b'')])
1478 ... (b'path', b'', b'')])
1476 >>> print(r[0:2], sorted(r[2])) # the set has an unstable output
1479 >>> print(r[0:2], sorted(r[2])) # the set has an unstable output
1477 (['r', 'p/p', ''], []) ['', 'p']
1480 (['r', 'p/p', ''], []) ['', 'p']
1478 >>> r = _rootsdirsandparents(
1481 >>> r = _rootsdirsandparents(
1479 ... [(b'relglob', b'rg*', b''), (b're', b're/', b''),
1482 ... [(b'relglob', b'rg*', b''), (b're', b're/', b''),
1480 ... (b'relre', b'rr', b'')])
1483 ... (b'relre', b'rr', b'')])
1481 >>> print(r[0:2], sorted(r[2])) # the set has an unstable output
1484 >>> print(r[0:2], sorted(r[2])) # the set has an unstable output
1482 (['', '', ''], []) ['']
1485 (['', '', ''], []) ['']
1483 '''
1486 '''
1484 r, d = _patternrootsanddirs(kindpats)
1487 r, d = _patternrootsanddirs(kindpats)
1485
1488
1486 p = set()
1489 p = set()
1487 # Add the parents as non-recursive/exact directories, since they must be
1490 # Add the parents as non-recursive/exact directories, since they must be
1488 # scanned to get to either the roots or the other exact directories.
1491 # scanned to get to either the roots or the other exact directories.
1489 p.update(pathutil.dirs(d))
1492 p.update(pathutil.dirs(d))
1490 p.update(pathutil.dirs(r))
1493 p.update(pathutil.dirs(r))
1491
1494
1492 # FIXME: all uses of this function convert these to sets, do so before
1495 # FIXME: all uses of this function convert these to sets, do so before
1493 # returning.
1496 # returning.
1494 # FIXME: all uses of this function do not need anything in 'roots' and
1497 # FIXME: all uses of this function do not need anything in 'roots' and
1495 # 'dirs' to also be in 'parents', consider removing them before returning.
1498 # 'dirs' to also be in 'parents', consider removing them before returning.
1496 return r, d, p
1499 return r, d, p
1497
1500
1498
1501
1499 def _explicitfiles(kindpats):
1502 def _explicitfiles(kindpats):
1500 '''Returns the potential explicit filenames from the patterns.
1503 '''Returns the potential explicit filenames from the patterns.
1501
1504
1502 >>> _explicitfiles([(b'path', b'foo/bar', b'')])
1505 >>> _explicitfiles([(b'path', b'foo/bar', b'')])
1503 ['foo/bar']
1506 ['foo/bar']
1504 >>> _explicitfiles([(b'rootfilesin', b'foo/bar', b'')])
1507 >>> _explicitfiles([(b'rootfilesin', b'foo/bar', b'')])
1505 []
1508 []
1506 '''
1509 '''
1507 # Keep only the pattern kinds where one can specify filenames (vs only
1510 # Keep only the pattern kinds where one can specify filenames (vs only
1508 # directory names).
1511 # directory names).
1509 filable = [kp for kp in kindpats if kp[0] not in (b'rootfilesin',)]
1512 filable = [kp for kp in kindpats if kp[0] not in (b'rootfilesin',)]
1510 return _roots(filable)
1513 return _roots(filable)
1511
1514
1512
1515
1513 def _prefix(kindpats):
1516 def _prefix(kindpats):
1514 '''Whether all the patterns match a prefix (i.e. recursively)'''
1517 '''Whether all the patterns match a prefix (i.e. recursively)'''
1515 for kind, pat, source in kindpats:
1518 for kind, pat, source in kindpats:
1516 if kind not in (b'path', b'relpath'):
1519 if kind not in (b'path', b'relpath'):
1517 return False
1520 return False
1518 return True
1521 return True
1519
1522
1520
1523
1521 _commentre = None
1524 _commentre = None
1522
1525
1523
1526
1524 def readpatternfile(filepath, warn, sourceinfo=False):
1527 def readpatternfile(filepath, warn, sourceinfo=False):
1525 '''parse a pattern file, returning a list of
1528 '''parse a pattern file, returning a list of
1526 patterns. These patterns should be given to compile()
1529 patterns. These patterns should be given to compile()
1527 to be validated and converted into a match function.
1530 to be validated and converted into a match function.
1528
1531
1529 trailing white space is dropped.
1532 trailing white space is dropped.
1530 the escape character is backslash.
1533 the escape character is backslash.
1531 comments start with #.
1534 comments start with #.
1532 empty lines are skipped.
1535 empty lines are skipped.
1533
1536
1534 lines can be of the following formats:
1537 lines can be of the following formats:
1535
1538
1536 syntax: regexp # defaults following lines to non-rooted regexps
1539 syntax: regexp # defaults following lines to non-rooted regexps
1537 syntax: glob # defaults following lines to non-rooted globs
1540 syntax: glob # defaults following lines to non-rooted globs
1538 re:pattern # non-rooted regular expression
1541 re:pattern # non-rooted regular expression
1539 glob:pattern # non-rooted glob
1542 glob:pattern # non-rooted glob
1540 rootglob:pat # rooted glob (same root as ^ in regexps)
1543 rootglob:pat # rooted glob (same root as ^ in regexps)
1541 pattern # pattern of the current default type
1544 pattern # pattern of the current default type
1542
1545
1543 if sourceinfo is set, returns a list of tuples:
1546 if sourceinfo is set, returns a list of tuples:
1544 (pattern, lineno, originalline).
1547 (pattern, lineno, originalline).
1545 This is useful to debug ignore patterns.
1548 This is useful to debug ignore patterns.
1546 '''
1549 '''
1547
1550
1548 syntaxes = {
1551 syntaxes = {
1549 b're': b'relre:',
1552 b're': b'relre:',
1550 b'regexp': b'relre:',
1553 b'regexp': b'relre:',
1551 b'glob': b'relglob:',
1554 b'glob': b'relglob:',
1552 b'rootglob': b'rootglob:',
1555 b'rootglob': b'rootglob:',
1553 b'include': b'include',
1556 b'include': b'include',
1554 b'subinclude': b'subinclude',
1557 b'subinclude': b'subinclude',
1555 }
1558 }
1556 syntax = b'relre:'
1559 syntax = b'relre:'
1557 patterns = []
1560 patterns = []
1558
1561
1559 fp = open(filepath, b'rb')
1562 fp = open(filepath, b'rb')
1560 for lineno, line in enumerate(util.iterfile(fp), start=1):
1563 for lineno, line in enumerate(util.iterfile(fp), start=1):
1561 if b"#" in line:
1564 if b"#" in line:
1562 global _commentre
1565 global _commentre
1563 if not _commentre:
1566 if not _commentre:
1564 _commentre = util.re.compile(br'((?:^|[^\\])(?:\\\\)*)#.*')
1567 _commentre = util.re.compile(br'((?:^|[^\\])(?:\\\\)*)#.*')
1565 # remove comments prefixed by an even number of escapes
1568 # remove comments prefixed by an even number of escapes
1566 m = _commentre.search(line)
1569 m = _commentre.search(line)
1567 if m:
1570 if m:
1568 line = line[: m.end(1)]
1571 line = line[: m.end(1)]
1569 # fixup properly escaped comments that survived the above
1572 # fixup properly escaped comments that survived the above
1570 line = line.replace(b"\\#", b"#")
1573 line = line.replace(b"\\#", b"#")
1571 line = line.rstrip()
1574 line = line.rstrip()
1572 if not line:
1575 if not line:
1573 continue
1576 continue
1574
1577
1575 if line.startswith(b'syntax:'):
1578 if line.startswith(b'syntax:'):
1576 s = line[7:].strip()
1579 s = line[7:].strip()
1577 try:
1580 try:
1578 syntax = syntaxes[s]
1581 syntax = syntaxes[s]
1579 except KeyError:
1582 except KeyError:
1580 if warn:
1583 if warn:
1581 warn(
1584 warn(
1582 _(b"%s: ignoring invalid syntax '%s'\n") % (filepath, s)
1585 _(b"%s: ignoring invalid syntax '%s'\n") % (filepath, s)
1583 )
1586 )
1584 continue
1587 continue
1585
1588
1586 linesyntax = syntax
1589 linesyntax = syntax
1587 for s, rels in pycompat.iteritems(syntaxes):
1590 for s, rels in pycompat.iteritems(syntaxes):
1588 if line.startswith(rels):
1591 if line.startswith(rels):
1589 linesyntax = rels
1592 linesyntax = rels
1590 line = line[len(rels) :]
1593 line = line[len(rels) :]
1591 break
1594 break
1592 elif line.startswith(s + b':'):
1595 elif line.startswith(s + b':'):
1593 linesyntax = rels
1596 linesyntax = rels
1594 line = line[len(s) + 1 :]
1597 line = line[len(s) + 1 :]
1595 break
1598 break
1596 if sourceinfo:
1599 if sourceinfo:
1597 patterns.append((linesyntax + line, lineno, line))
1600 patterns.append((linesyntax + line, lineno, line))
1598 else:
1601 else:
1599 patterns.append(linesyntax + line)
1602 patterns.append(linesyntax + line)
1600 fp.close()
1603 fp.close()
1601 return patterns
1604 return patterns
@@ -1,1177 +1,1182 b''
1 $ cat >> $HGRCPATH <<EOF
1 $ cat >> $HGRCPATH <<EOF
2 > [extdiff]
2 > [extdiff]
3 > # for portability:
3 > # for portability:
4 > pdiff = sh "$RUNTESTDIR/pdiff"
4 > pdiff = sh "$RUNTESTDIR/pdiff"
5 > [progress]
5 > [progress]
6 > disable=False
6 > disable=False
7 > assume-tty = 1
7 > assume-tty = 1
8 > delay = 0
8 > delay = 0
9 > # set changedelay really large so we don't see nested topics
9 > # set changedelay really large so we don't see nested topics
10 > changedelay = 30000
10 > changedelay = 30000
11 > format = topic bar number
11 > format = topic bar number
12 > refresh = 0
12 > refresh = 0
13 > width = 60
13 > width = 60
14 > EOF
14 > EOF
15
15
16 Preparing the subrepository 'sub2'
16 Preparing the subrepository 'sub2'
17
17
18 $ hg init sub2
18 $ hg init sub2
19 $ echo sub2 > sub2/sub2
19 $ echo sub2 > sub2/sub2
20 $ hg add -R sub2
20 $ hg add -R sub2
21 adding sub2/sub2
21 adding sub2/sub2
22 $ hg commit -R sub2 -m "sub2 import"
22 $ hg commit -R sub2 -m "sub2 import"
23
23
24 Preparing the 'sub1' repo which depends on the subrepo 'sub2'
24 Preparing the 'sub1' repo which depends on the subrepo 'sub2'
25
25
26 $ hg init sub1
26 $ hg init sub1
27 $ echo sub1 > sub1/sub1
27 $ echo sub1 > sub1/sub1
28 $ echo "sub2 = ../sub2" > sub1/.hgsub
28 $ echo "sub2 = ../sub2" > sub1/.hgsub
29 $ hg clone sub2 sub1/sub2
29 $ hg clone sub2 sub1/sub2
30 \r (no-eol) (esc)
30 \r (no-eol) (esc)
31 linking [ <=> ] 1\r (no-eol) (esc)
31 linking [ <=> ] 1\r (no-eol) (esc)
32 linking [ <=> ] 2\r (no-eol) (esc)
32 linking [ <=> ] 2\r (no-eol) (esc)
33 linking [ <=> ] 3\r (no-eol) (esc)
33 linking [ <=> ] 3\r (no-eol) (esc)
34 linking [ <=> ] 4\r (no-eol) (esc)
34 linking [ <=> ] 4\r (no-eol) (esc)
35 linking [ <=> ] 5\r (no-eol) (esc)
35 linking [ <=> ] 5\r (no-eol) (esc)
36 linking [ <=> ] 6\r (no-eol) (esc)
36 linking [ <=> ] 6\r (no-eol) (esc)
37 \r (no-eol) (esc)
37 \r (no-eol) (esc)
38 \r (no-eol) (esc)
38 \r (no-eol) (esc)
39 updating [===========================================>] 1/1\r (no-eol) (esc)
39 updating [===========================================>] 1/1\r (no-eol) (esc)
40 \r (no-eol) (esc)
40 \r (no-eol) (esc)
41 updating to branch default
41 updating to branch default
42 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
42 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
43 $ hg add -R sub1
43 $ hg add -R sub1
44 adding sub1/.hgsub
44 adding sub1/.hgsub
45 adding sub1/sub1
45 adding sub1/sub1
46 $ hg commit -R sub1 -m "sub1 import"
46 $ hg commit -R sub1 -m "sub1 import"
47
47
48 Preparing the 'main' repo which depends on the subrepo 'sub1'
48 Preparing the 'main' repo which depends on the subrepo 'sub1'
49
49
50 $ hg init main
50 $ hg init main
51 $ echo main > main/main
51 $ echo main > main/main
52 $ echo "sub1 = ../sub1" > main/.hgsub
52 $ echo "sub1 = ../sub1" > main/.hgsub
53 $ hg clone sub1 main/sub1
53 $ hg clone sub1 main/sub1
54 \r (no-eol) (esc)
54 \r (no-eol) (esc)
55 linking [ <=> ] 1\r (no-eol) (esc)
55 linking [ <=> ] 1\r (no-eol) (esc)
56 linking [ <=> ] 2\r (no-eol) (esc)
56 linking [ <=> ] 2\r (no-eol) (esc)
57 linking [ <=> ] 3\r (no-eol) (esc)
57 linking [ <=> ] 3\r (no-eol) (esc)
58 linking [ <=> ] 4\r (no-eol) (esc)
58 linking [ <=> ] 4\r (no-eol) (esc)
59 linking [ <=> ] 5\r (no-eol) (esc)
59 linking [ <=> ] 5\r (no-eol) (esc)
60 linking [ <=> ] 6\r (no-eol) (esc)
60 linking [ <=> ] 6\r (no-eol) (esc)
61 linking [ <=> ] 7\r (no-eol) (esc)
61 linking [ <=> ] 7\r (no-eol) (esc)
62 linking [ <=> ] 8\r (no-eol) (esc)
62 linking [ <=> ] 8\r (no-eol) (esc)
63 linking [ <=> ] 9\r (no-eol) (esc) (reposimplestore !)
63 linking [ <=> ] 9\r (no-eol) (esc) (reposimplestore !)
64 linking [ <=> ] 10\r (no-eol) (esc) (reposimplestore !)
64 linking [ <=> ] 10\r (no-eol) (esc) (reposimplestore !)
65 \r (no-eol) (esc)
65 \r (no-eol) (esc)
66 \r (no-eol) (esc)
66 \r (no-eol) (esc)
67 updating [===========================================>] 3/3\r (no-eol) (esc)
67 updating [===========================================>] 3/3\r (no-eol) (esc)
68 \r (no-eol) (esc)
68 \r (no-eol) (esc)
69 \r (no-eol) (esc)
69 \r (no-eol) (esc)
70 linking [ <=> ] 1\r (no-eol) (esc)
70 linking [ <=> ] 1\r (no-eol) (esc)
71 linking [ <=> ] 2\r (no-eol) (esc)
71 linking [ <=> ] 2\r (no-eol) (esc)
72 linking [ <=> ] 3\r (no-eol) (esc)
72 linking [ <=> ] 3\r (no-eol) (esc)
73 linking [ <=> ] 4\r (no-eol) (esc)
73 linking [ <=> ] 4\r (no-eol) (esc)
74 linking [ <=> ] 5\r (no-eol) (esc)
74 linking [ <=> ] 5\r (no-eol) (esc)
75 linking [ <=> ] 6\r (no-eol) (esc)
75 linking [ <=> ] 6\r (no-eol) (esc)
76 updating [===========================================>] 1/1\r (no-eol) (esc)
76 updating [===========================================>] 1/1\r (no-eol) (esc)
77 \r (no-eol) (esc)
77 \r (no-eol) (esc)
78 updating to branch default
78 updating to branch default
79 cloning subrepo sub2 from $TESTTMP/sub2
79 cloning subrepo sub2 from $TESTTMP/sub2
80 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
80 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
81 $ hg add -R main
81 $ hg add -R main
82 adding main/.hgsub
82 adding main/.hgsub
83 adding main/main
83 adding main/main
84 $ hg commit -R main -m "main import"
84 $ hg commit -R main -m "main import"
85
85
86 #if serve
86 #if serve
87
87
88 Unfortunately, subrepos not at their nominal location cannot be cloned. But
88 Unfortunately, subrepos not at their nominal location cannot be cloned. But
89 they are still served from their location within the local repository. The only
89 they are still served from their location within the local repository. The only
90 reason why 'main' can be cloned via the filesystem is because 'sub1' and 'sub2'
90 reason why 'main' can be cloned via the filesystem is because 'sub1' and 'sub2'
91 are also available as siblings of 'main'.
91 are also available as siblings of 'main'.
92
92
93 $ hg serve -R main --debug -S -p $HGPORT -d --pid-file=hg1.pid -E error.log -A access.log
93 $ hg serve -R main --debug -S -p $HGPORT -d --pid-file=hg1.pid -E error.log -A access.log
94 adding = $TESTTMP/main
94 adding = $TESTTMP/main
95 adding sub1 = $TESTTMP/main/sub1
95 adding sub1 = $TESTTMP/main/sub1
96 adding sub1/sub2 = $TESTTMP/main/sub1/sub2
96 adding sub1/sub2 = $TESTTMP/main/sub1/sub2
97 listening at http://*:$HGPORT/ (bound to *:$HGPORT) (glob) (?)
97 listening at http://*:$HGPORT/ (bound to *:$HGPORT) (glob) (?)
98 adding = $TESTTMP/main (?)
98 adding = $TESTTMP/main (?)
99 adding sub1 = $TESTTMP/main/sub1 (?)
99 adding sub1 = $TESTTMP/main/sub1 (?)
100 adding sub1/sub2 = $TESTTMP/main/sub1/sub2 (?)
100 adding sub1/sub2 = $TESTTMP/main/sub1/sub2 (?)
101 $ cat hg1.pid >> $DAEMON_PIDS
101 $ cat hg1.pid >> $DAEMON_PIDS
102
102
103 $ hg clone http://localhost:$HGPORT httpclone --config progress.disable=True
103 $ hg clone http://localhost:$HGPORT httpclone --config progress.disable=True
104 requesting all changes
104 requesting all changes
105 adding changesets
105 adding changesets
106 adding manifests
106 adding manifests
107 adding file changes
107 adding file changes
108 added 1 changesets with 3 changes to 3 files
108 added 1 changesets with 3 changes to 3 files
109 new changesets 7f491f53a367
109 new changesets 7f491f53a367
110 updating to branch default
110 updating to branch default
111 cloning subrepo sub1 from http://localhost:$HGPORT/../sub1
111 cloning subrepo sub1 from http://localhost:$HGPORT/../sub1
112 abort: HTTP Error 404: Not Found
112 abort: HTTP Error 404: Not Found
113 [255]
113 [255]
114
114
115 $ cat access.log
115 $ cat access.log
116 * "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
116 * "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
117 * "GET /?cmd=batch HTTP/1.1" 200 - * (glob)
117 * "GET /?cmd=batch HTTP/1.1" 200 - * (glob)
118 * "GET /?cmd=getbundle HTTP/1.1" 200 - * (glob)
118 * "GET /?cmd=getbundle HTTP/1.1" 200 - * (glob)
119 * "GET /../sub1?cmd=capabilities HTTP/1.1" 404 - (glob)
119 * "GET /../sub1?cmd=capabilities HTTP/1.1" 404 - (glob)
120 $ cat error.log
120 $ cat error.log
121
121
122 $ killdaemons.py
122 $ killdaemons.py
123 $ rm hg1.pid error.log access.log
123 $ rm hg1.pid error.log access.log
124 #endif
124 #endif
125
125
126 Cleaning both repositories, just as a clone -U
126 Cleaning both repositories, just as a clone -U
127
127
128 $ hg up -C -R sub2 null
128 $ hg up -C -R sub2 null
129 \r (no-eol) (esc)
129 \r (no-eol) (esc)
130 updating [===========================================>] 1/1\r (no-eol) (esc)
130 updating [===========================================>] 1/1\r (no-eol) (esc)
131 \r (no-eol) (esc)
131 \r (no-eol) (esc)
132 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
132 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
133 $ hg up -C -R sub1 null
133 $ hg up -C -R sub1 null
134 \r (no-eol) (esc)
134 \r (no-eol) (esc)
135 updating [===========================================>] 1/1\r (no-eol) (esc)
135 updating [===========================================>] 1/1\r (no-eol) (esc)
136 \r (no-eol) (esc)
136 \r (no-eol) (esc)
137 \r (no-eol) (esc)
137 \r (no-eol) (esc)
138 updating [===========================================>] 3/3\r (no-eol) (esc)
138 updating [===========================================>] 3/3\r (no-eol) (esc)
139 \r (no-eol) (esc)
139 \r (no-eol) (esc)
140 0 files updated, 0 files merged, 3 files removed, 0 files unresolved
140 0 files updated, 0 files merged, 3 files removed, 0 files unresolved
141 $ hg up -C -R main null
141 $ hg up -C -R main null
142 \r (no-eol) (esc)
142 \r (no-eol) (esc)
143 updating [===========================================>] 1/1\r (no-eol) (esc)
143 updating [===========================================>] 1/1\r (no-eol) (esc)
144 \r (no-eol) (esc)
144 \r (no-eol) (esc)
145 \r (no-eol) (esc)
145 \r (no-eol) (esc)
146 updating [===========================================>] 3/3\r (no-eol) (esc)
146 updating [===========================================>] 3/3\r (no-eol) (esc)
147 \r (no-eol) (esc)
147 \r (no-eol) (esc)
148 \r (no-eol) (esc)
148 \r (no-eol) (esc)
149 updating [===========================================>] 3/3\r (no-eol) (esc)
149 updating [===========================================>] 3/3\r (no-eol) (esc)
150 \r (no-eol) (esc)
150 \r (no-eol) (esc)
151 0 files updated, 0 files merged, 3 files removed, 0 files unresolved
151 0 files updated, 0 files merged, 3 files removed, 0 files unresolved
152 $ rm -rf main/sub1
152 $ rm -rf main/sub1
153 $ rm -rf sub1/sub2
153 $ rm -rf sub1/sub2
154
154
155 Clone main
155 Clone main
156
156
157 $ hg --config extensions.largefiles= clone main cloned
157 $ hg --config extensions.largefiles= clone main cloned
158 \r (no-eol) (esc)
158 \r (no-eol) (esc)
159 linking [ <=> ] 1\r (no-eol) (esc)
159 linking [ <=> ] 1\r (no-eol) (esc)
160 linking [ <=> ] 2\r (no-eol) (esc)
160 linking [ <=> ] 2\r (no-eol) (esc)
161 linking [ <=> ] 3\r (no-eol) (esc)
161 linking [ <=> ] 3\r (no-eol) (esc)
162 linking [ <=> ] 4\r (no-eol) (esc)
162 linking [ <=> ] 4\r (no-eol) (esc)
163 linking [ <=> ] 5\r (no-eol) (esc)
163 linking [ <=> ] 5\r (no-eol) (esc)
164 linking [ <=> ] 6\r (no-eol) (esc)
164 linking [ <=> ] 6\r (no-eol) (esc)
165 linking [ <=> ] 7\r (no-eol) (esc)
165 linking [ <=> ] 7\r (no-eol) (esc)
166 linking [ <=> ] 8\r (no-eol) (esc)
166 linking [ <=> ] 8\r (no-eol) (esc)
167 linking [ <=> ] 9\r (no-eol) (esc) (reposimplestore !)
167 linking [ <=> ] 9\r (no-eol) (esc) (reposimplestore !)
168 linking [ <=> ] 10\r (no-eol) (esc) (reposimplestore !)
168 linking [ <=> ] 10\r (no-eol) (esc) (reposimplestore !)
169 \r (no-eol) (esc)
169 \r (no-eol) (esc)
170 \r (no-eol) (esc)
170 \r (no-eol) (esc)
171 updating [===========================================>] 3/3\r (no-eol) (esc)
171 updating [===========================================>] 3/3\r (no-eol) (esc)
172 \r (no-eol) (esc)
172 \r (no-eol) (esc)
173 \r (no-eol) (esc)
173 \r (no-eol) (esc)
174 linking [ <=> ] 1\r (no-eol) (esc)
174 linking [ <=> ] 1\r (no-eol) (esc)
175 linking [ <=> ] 2\r (no-eol) (esc)
175 linking [ <=> ] 2\r (no-eol) (esc)
176 linking [ <=> ] 3\r (no-eol) (esc)
176 linking [ <=> ] 3\r (no-eol) (esc)
177 linking [ <=> ] 4\r (no-eol) (esc)
177 linking [ <=> ] 4\r (no-eol) (esc)
178 linking [ <=> ] 5\r (no-eol) (esc)
178 linking [ <=> ] 5\r (no-eol) (esc)
179 linking [ <=> ] 6\r (no-eol) (esc)
179 linking [ <=> ] 6\r (no-eol) (esc)
180 linking [ <=> ] 7\r (no-eol) (esc)
180 linking [ <=> ] 7\r (no-eol) (esc)
181 linking [ <=> ] 8\r (no-eol) (esc)
181 linking [ <=> ] 8\r (no-eol) (esc)
182 linking [ <=> ] 9\r (no-eol) (esc) (reposimplestore !)
182 linking [ <=> ] 9\r (no-eol) (esc) (reposimplestore !)
183 linking [ <=> ] 10\r (no-eol) (esc) (reposimplestore !)
183 linking [ <=> ] 10\r (no-eol) (esc) (reposimplestore !)
184 updating [===========================================>] 3/3\r (no-eol) (esc)
184 updating [===========================================>] 3/3\r (no-eol) (esc)
185 \r (no-eol) (esc)
185 \r (no-eol) (esc)
186 \r (no-eol) (esc)
186 \r (no-eol) (esc)
187 linking [ <=> ] 1\r (no-eol) (esc) (reporevlogstore !)
187 linking [ <=> ] 1\r (no-eol) (esc) (reporevlogstore !)
188 linking [ <=> ] 2\r (no-eol) (esc) (reporevlogstore !)
188 linking [ <=> ] 2\r (no-eol) (esc) (reporevlogstore !)
189 linking [ <=> ] 3\r (no-eol) (esc) (reporevlogstore !)
189 linking [ <=> ] 3\r (no-eol) (esc) (reporevlogstore !)
190 linking [ <=> ] 4\r (no-eol) (esc) (reporevlogstore !)
190 linking [ <=> ] 4\r (no-eol) (esc) (reporevlogstore !)
191 linking [ <=> ] 5\r (no-eol) (esc) (reporevlogstore !)
191 linking [ <=> ] 5\r (no-eol) (esc) (reporevlogstore !)
192 linking [ <=> ] 6\r (no-eol) (esc) (reporevlogstore !)
192 linking [ <=> ] 6\r (no-eol) (esc) (reporevlogstore !)
193 linking [ <=> ] 1\r (no-eol) (esc) (reposimplestore !)
193 linking [ <=> ] 1\r (no-eol) (esc) (reposimplestore !)
194 linking [ <=> ] 2\r (no-eol) (esc) (reposimplestore !)
194 linking [ <=> ] 2\r (no-eol) (esc) (reposimplestore !)
195 linking [ <=> ] 3\r (no-eol) (esc) (reposimplestore !)
195 linking [ <=> ] 3\r (no-eol) (esc) (reposimplestore !)
196 linking [ <=> ] 4\r (no-eol) (esc) (reposimplestore !)
196 linking [ <=> ] 4\r (no-eol) (esc) (reposimplestore !)
197 linking [ <=> ] 5\r (no-eol) (esc) (reposimplestore !)
197 linking [ <=> ] 5\r (no-eol) (esc) (reposimplestore !)
198 linking [ <=> ] 6\r (no-eol) (esc) (reposimplestore !)
198 linking [ <=> ] 6\r (no-eol) (esc) (reposimplestore !)
199 updating [===========================================>] 1/1\r (no-eol) (esc)
199 updating [===========================================>] 1/1\r (no-eol) (esc)
200 \r (no-eol) (esc)
200 \r (no-eol) (esc)
201 updating to branch default
201 updating to branch default
202 cloning subrepo sub1 from $TESTTMP/sub1
202 cloning subrepo sub1 from $TESTTMP/sub1
203 cloning subrepo sub1/sub2 from $TESTTMP/sub2
203 cloning subrepo sub1/sub2 from $TESTTMP/sub2
204 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
204 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
205
205
206 Largefiles is NOT enabled in the clone if the source repo doesn't require it
206 Largefiles is NOT enabled in the clone if the source repo doesn't require it
207 $ grep largefiles cloned/.hg/hgrc
207 $ grep largefiles cloned/.hg/hgrc
208 [1]
208 [1]
209
209
210 Checking cloned repo ids
210 Checking cloned repo ids
211
211
212 $ printf "cloned " ; hg id -R cloned
212 $ printf "cloned " ; hg id -R cloned
213 cloned 7f491f53a367 tip
213 cloned 7f491f53a367 tip
214 $ printf "cloned/sub1 " ; hg id -R cloned/sub1
214 $ printf "cloned/sub1 " ; hg id -R cloned/sub1
215 cloned/sub1 fc3b4ce2696f tip
215 cloned/sub1 fc3b4ce2696f tip
216 $ printf "cloned/sub1/sub2 " ; hg id -R cloned/sub1/sub2
216 $ printf "cloned/sub1/sub2 " ; hg id -R cloned/sub1/sub2
217 cloned/sub1/sub2 c57a0840e3ba tip
217 cloned/sub1/sub2 c57a0840e3ba tip
218
218
219 debugsub output for main and sub1
219 debugsub output for main and sub1
220
220
221 $ hg debugsub -R cloned
221 $ hg debugsub -R cloned
222 path sub1
222 path sub1
223 source ../sub1
223 source ../sub1
224 revision fc3b4ce2696f7741438c79207583768f2ce6b0dd
224 revision fc3b4ce2696f7741438c79207583768f2ce6b0dd
225 $ hg debugsub -R cloned/sub1
225 $ hg debugsub -R cloned/sub1
226 path sub2
226 path sub2
227 source ../sub2
227 source ../sub2
228 revision c57a0840e3badd667ef3c3ef65471609acb2ba3c
228 revision c57a0840e3badd667ef3c3ef65471609acb2ba3c
229
229
230 Modifying deeply nested 'sub2'
230 Modifying deeply nested 'sub2'
231
231
232 $ echo modified > cloned/sub1/sub2/sub2
232 $ echo modified > cloned/sub1/sub2/sub2
233 $ hg commit --subrepos -m "deep nested modif should trigger a commit" -R cloned
233 $ hg commit --subrepos -m "deep nested modif should trigger a commit" -R cloned
234 committing subrepository sub1
234 committing subrepository sub1
235 committing subrepository sub1/sub2
235 committing subrepository sub1/sub2
236
236
237 Checking modified node ids
237 Checking modified node ids
238
238
239 $ printf "cloned " ; hg id -R cloned
239 $ printf "cloned " ; hg id -R cloned
240 cloned ffe6649062fe tip
240 cloned ffe6649062fe tip
241 $ printf "cloned/sub1 " ; hg id -R cloned/sub1
241 $ printf "cloned/sub1 " ; hg id -R cloned/sub1
242 cloned/sub1 2ecb03bf44a9 tip
242 cloned/sub1 2ecb03bf44a9 tip
243 $ printf "cloned/sub1/sub2 " ; hg id -R cloned/sub1/sub2
243 $ printf "cloned/sub1/sub2 " ; hg id -R cloned/sub1/sub2
244 cloned/sub1/sub2 53dd3430bcaf tip
244 cloned/sub1/sub2 53dd3430bcaf tip
245
245
246 debugsub output for main and sub1
246 debugsub output for main and sub1
247
247
248 $ hg debugsub -R cloned
248 $ hg debugsub -R cloned
249 path sub1
249 path sub1
250 source ../sub1
250 source ../sub1
251 revision 2ecb03bf44a94e749e8669481dd9069526ce7cb9
251 revision 2ecb03bf44a94e749e8669481dd9069526ce7cb9
252 $ hg debugsub -R cloned/sub1
252 $ hg debugsub -R cloned/sub1
253 path sub2
253 path sub2
254 source ../sub2
254 source ../sub2
255 revision 53dd3430bcaf5ab4a7c48262bcad6d441f510487
255 revision 53dd3430bcaf5ab4a7c48262bcad6d441f510487
256
256
257 Check that deep archiving works
257 Check that deep archiving works
258
258
259 $ cd cloned
259 $ cd cloned
260 $ echo 'test' > sub1/sub2/test.txt
260 $ echo 'test' > sub1/sub2/test.txt
261 $ hg --config extensions.largefiles=! add sub1/sub2/test.txt
261 $ hg --config extensions.largefiles=! add sub1/sub2/test.txt
262 $ mkdir sub1/sub2/folder
262 $ mkdir sub1/sub2/folder
263 $ echo 'subfolder' > sub1/sub2/folder/test.txt
263 $ echo 'subfolder' > sub1/sub2/folder/test.txt
264 $ hg ci -ASm "add test.txt"
264 $ hg ci -ASm "add test.txt"
265 adding sub1/sub2/folder/test.txt
265 adding sub1/sub2/folder/test.txt
266 committing subrepository sub1
266 committing subrepository sub1
267 committing subrepository sub1/sub2
267 committing subrepository sub1/sub2
268
268
269 $ rm -r main
269 $ rm -r main
270 $ hg archive -S -qr 'wdir()' ../wdir
270 $ hg archive -S -qr 'wdir()' ../wdir
271 $ cat ../wdir/.hg_archival.txt
271 $ cat ../wdir/.hg_archival.txt
272 repo: 7f491f53a367861f47ee64a80eb997d1f341b77a
272 repo: 7f491f53a367861f47ee64a80eb997d1f341b77a
273 node: 9bb10eebee29dc0f1201dcf5977b811a540255fd+
273 node: 9bb10eebee29dc0f1201dcf5977b811a540255fd+
274 branch: default
274 branch: default
275 latesttag: null
275 latesttag: null
276 latesttagdistance: 4
276 latesttagdistance: 4
277 changessincelatesttag: 4
277 changessincelatesttag: 4
278 $ hg update -Cq .
278 $ hg update -Cq .
279
279
280 A deleted subrepo file is flagged as dirty, like the top level repo
280 A deleted subrepo file is flagged as dirty, like the top level repo
281
281
282 $ rm -r ../wdir sub1/sub2/folder/test.txt
282 $ rm -r ../wdir sub1/sub2/folder/test.txt
283 $ hg archive -S -qr 'wdir()' ../wdir
283 $ hg archive -S -qr 'wdir()' ../wdir
284 $ cat ../wdir/.hg_archival.txt
284 $ cat ../wdir/.hg_archival.txt
285 repo: 7f491f53a367861f47ee64a80eb997d1f341b77a
285 repo: 7f491f53a367861f47ee64a80eb997d1f341b77a
286 node: 9bb10eebee29dc0f1201dcf5977b811a540255fd+
286 node: 9bb10eebee29dc0f1201dcf5977b811a540255fd+
287 branch: default
287 branch: default
288 latesttag: null
288 latesttag: null
289 latesttagdistance: 4
289 latesttagdistance: 4
290 changessincelatesttag: 4
290 changessincelatesttag: 4
291 $ hg update -Cq .
291 $ hg update -Cq .
292 $ rm -r ../wdir
292 $ rm -r ../wdir
293
293
294 $ hg archive -S -qr 'wdir()' ../wdir \
294 $ hg archive -S -qr 'wdir()' ../wdir \
295 > --config 'experimental.archivemetatemplate=archived {node|short}\n'
295 > --config 'experimental.archivemetatemplate=archived {node|short}\n'
296 $ cat ../wdir/.hg_archival.txt
296 $ cat ../wdir/.hg_archival.txt
297 archived ffffffffffff
297 archived ffffffffffff
298 $ rm -r ../wdir
298 $ rm -r ../wdir
299
299
300 .. but first take a detour through some deep removal testing
300 .. but first take a detour through some deep removal testing
301
301
302 $ hg remove -S -I 're:.*.txt' .
302 $ hg remove -S -I 're:.*.txt' .
303 \r (no-eol) (esc)
303 \r (no-eol) (esc)
304 searching [==========================================>] 1/1\r (no-eol) (esc)
304 searching [==========================================>] 1/1\r (no-eol) (esc)
305 searching [==========================================>] 1/1\r (no-eol) (esc)
305 searching [==========================================>] 1/1\r (no-eol) (esc)
306 \r (no-eol) (esc)
306 \r (no-eol) (esc)
307 \r (no-eol) (esc)
307 \r (no-eol) (esc)
308 deleting [=====================> ] 1/2\r (no-eol) (esc)
308 deleting [=====================> ] 1/2\r (no-eol) (esc)
309 \r (no-eol) (esc)
309 \r (no-eol) (esc)
310 \r (no-eol) (esc)
310 \r (no-eol) (esc)
311 deleting [===========================================>] 2/2\r (no-eol) (esc)
311 deleting [===========================================>] 2/2\r (no-eol) (esc)
312 \r (no-eol) (esc)
312 \r (no-eol) (esc)
313 removing sub1/sub2/folder/test.txt
313 removing sub1/sub2/folder/test.txt
314 removing sub1/sub2/test.txt
314 removing sub1/sub2/test.txt
315 $ hg status -S
315 $ hg status -S
316 R sub1/sub2/folder/test.txt
316 R sub1/sub2/folder/test.txt
317 R sub1/sub2/test.txt
317 R sub1/sub2/test.txt
318 $ hg update -Cq
318 $ hg update -Cq
319 $ hg remove -I 're:.*.txt' sub1
319 $ hg remove -I 're:.*.txt' sub1
320 \r (no-eol) (esc)
320 \r (no-eol) (esc)
321 searching [==========================================>] 1/1\r (no-eol) (esc)
321 searching [==========================================>] 1/1\r (no-eol) (esc)
322 \r (no-eol) (esc)
322 \r (no-eol) (esc)
323 \r (no-eol) (esc)
323 \r (no-eol) (esc)
324 deleting [===========================================>] 1/1\r (no-eol) (esc)
324 deleting [===========================================>] 1/1\r (no-eol) (esc)
325 \r (no-eol) (esc)
325 \r (no-eol) (esc)
326 $ hg status -S
326 $ hg status -S
327 $ hg remove sub1/sub2/folder/test.txt
327 $ hg remove sub1/sub2/folder/test.txt
328 \r (no-eol) (esc)
328 \r (no-eol) (esc)
329 searching [==========================================>] 1/1\r (no-eol) (esc)
329 searching [==========================================>] 1/1\r (no-eol) (esc)
330 searching [==========================================>] 1/1\r (no-eol) (esc)
330 searching [==========================================>] 1/1\r (no-eol) (esc)
331 \r (no-eol) (esc)
331 \r (no-eol) (esc)
332 \r (no-eol) (esc)
332 \r (no-eol) (esc)
333 deleting [===========================================>] 1/1\r (no-eol) (esc)
333 deleting [===========================================>] 1/1\r (no-eol) (esc)
334 \r (no-eol) (esc)
334 \r (no-eol) (esc)
335 \r (no-eol) (esc)
335 \r (no-eol) (esc)
336 deleting [===========================================>] 1/1\r (no-eol) (esc)
336 deleting [===========================================>] 1/1\r (no-eol) (esc)
337 \r (no-eol) (esc)
337 \r (no-eol) (esc)
338 \r (no-eol) (esc)
338 \r (no-eol) (esc)
339 deleting [===========================================>] 1/1\r (no-eol) (esc)
339 deleting [===========================================>] 1/1\r (no-eol) (esc)
340 \r (no-eol) (esc)
340 \r (no-eol) (esc)
341 $ hg remove sub1/.hgsubstate
341 $ hg remove sub1/.hgsubstate
342 \r (no-eol) (esc)
342 \r (no-eol) (esc)
343 searching [==========================================>] 1/1\r (no-eol) (esc)
343 searching [==========================================>] 1/1\r (no-eol) (esc)
344 \r (no-eol) (esc)
344 \r (no-eol) (esc)
345 \r (no-eol) (esc)
345 \r (no-eol) (esc)
346 deleting [===========================================>] 1/1\r (no-eol) (esc)
346 deleting [===========================================>] 1/1\r (no-eol) (esc)
347 \r (no-eol) (esc)
347 \r (no-eol) (esc)
348 \r (no-eol) (esc)
348 \r (no-eol) (esc)
349 deleting [===========================================>] 1/1\r (no-eol) (esc)
349 deleting [===========================================>] 1/1\r (no-eol) (esc)
350 \r (no-eol) (esc)
350 \r (no-eol) (esc)
351 $ mv sub1/.hgsub sub1/x.hgsub
351 $ mv sub1/.hgsub sub1/x.hgsub
352 $ hg status -S
352 $ hg status -S
353 warning: subrepo spec file 'sub1/.hgsub' not found
353 warning: subrepo spec file 'sub1/.hgsub' not found
354 R sub1/.hgsubstate
354 R sub1/.hgsubstate
355 R sub1/sub2/folder/test.txt
355 R sub1/sub2/folder/test.txt
356 ! sub1/.hgsub
356 ! sub1/.hgsub
357 ? sub1/x.hgsub
357 ? sub1/x.hgsub
358 $ hg status -R sub1
359 warning: subrepo spec file 'sub1/.hgsub' not found
360 R .hgsubstate
361 ! .hgsub
362 ? x.hgsub
358 $ mv sub1/x.hgsub sub1/.hgsub
363 $ mv sub1/x.hgsub sub1/.hgsub
359 $ hg update -Cq
364 $ hg update -Cq
360 $ touch sub1/foo
365 $ touch sub1/foo
361 $ hg forget sub1/sub2/folder/test.txt
366 $ hg forget sub1/sub2/folder/test.txt
362 $ rm sub1/sub2/test.txt
367 $ rm sub1/sub2/test.txt
363
368
364 Test relative path printing + subrepos
369 Test relative path printing + subrepos
365 $ mkdir -p foo/bar
370 $ mkdir -p foo/bar
366 $ cd foo
371 $ cd foo
367 $ touch bar/abc
372 $ touch bar/abc
368 $ hg addremove -S ..
373 $ hg addremove -S ..
369 \r (no-eol) (esc)
374 \r (no-eol) (esc)
370 searching for exact renames [========================>] 1/1\r (no-eol) (esc)
375 searching for exact renames [========================>] 1/1\r (no-eol) (esc)
371 \r (no-eol) (esc)
376 \r (no-eol) (esc)
372 adding ../sub1/sub2/folder/test.txt
377 adding ../sub1/sub2/folder/test.txt
373 removing ../sub1/sub2/test.txt
378 removing ../sub1/sub2/test.txt
374 adding ../sub1/foo
379 adding ../sub1/foo
375 adding bar/abc
380 adding bar/abc
376 $ cd ..
381 $ cd ..
377 $ hg status -S
382 $ hg status -S
378 A foo/bar/abc
383 A foo/bar/abc
379 A sub1/foo
384 A sub1/foo
380 R sub1/sub2/test.txt
385 R sub1/sub2/test.txt
381
386
382 Archive wdir() with subrepos
387 Archive wdir() with subrepos
383 $ hg rm main
388 $ hg rm main
384 \r (no-eol) (esc)
389 \r (no-eol) (esc)
385 deleting [===========================================>] 1/1\r (no-eol) (esc)
390 deleting [===========================================>] 1/1\r (no-eol) (esc)
386 \r (no-eol) (esc)
391 \r (no-eol) (esc)
387 $ hg archive -S -r 'wdir()' ../wdir
392 $ hg archive -S -r 'wdir()' ../wdir
388 \r (no-eol) (esc)
393 \r (no-eol) (esc)
389 archiving [ ] 0/3\r (no-eol) (esc)
394 archiving [ ] 0/3\r (no-eol) (esc)
390 archiving [=============> ] 1/3\r (no-eol) (esc)
395 archiving [=============> ] 1/3\r (no-eol) (esc)
391 archiving [===========================> ] 2/3\r (no-eol) (esc)
396 archiving [===========================> ] 2/3\r (no-eol) (esc)
392 archiving [==========================================>] 3/3\r (no-eol) (esc)
397 archiving [==========================================>] 3/3\r (no-eol) (esc)
393 \r (no-eol) (esc)
398 \r (no-eol) (esc)
394 \r (no-eol) (esc)
399 \r (no-eol) (esc)
395 archiving (sub1) [ ] 0/4\r (no-eol) (esc)
400 archiving (sub1) [ ] 0/4\r (no-eol) (esc)
396 archiving (sub1) [========> ] 1/4\r (no-eol) (esc)
401 archiving (sub1) [========> ] 1/4\r (no-eol) (esc)
397 archiving (sub1) [=================> ] 2/4\r (no-eol) (esc)
402 archiving (sub1) [=================> ] 2/4\r (no-eol) (esc)
398 archiving (sub1) [==========================> ] 3/4\r (no-eol) (esc)
403 archiving (sub1) [==========================> ] 3/4\r (no-eol) (esc)
399 archiving (sub1) [===================================>] 4/4\r (no-eol) (esc)
404 archiving (sub1) [===================================>] 4/4\r (no-eol) (esc)
400 \r (no-eol) (esc)
405 \r (no-eol) (esc)
401 \r (no-eol) (esc)
406 \r (no-eol) (esc)
402 archiving (sub1/sub2) [ ] 0/2\r (no-eol) (esc)
407 archiving (sub1/sub2) [ ] 0/2\r (no-eol) (esc)
403 archiving (sub1/sub2) [==============> ] 1/2\r (no-eol) (esc)
408 archiving (sub1/sub2) [==============> ] 1/2\r (no-eol) (esc)
404 archiving (sub1/sub2) [==============================>] 2/2\r (no-eol) (esc)
409 archiving (sub1/sub2) [==============================>] 2/2\r (no-eol) (esc)
405 \r (no-eol) (esc)
410 \r (no-eol) (esc)
406 $ diff -r . ../wdir | egrep -v '\.hg$|^Common subdirectories:'
411 $ diff -r . ../wdir | egrep -v '\.hg$|^Common subdirectories:'
407 Only in ../wdir: .hg_archival.txt
412 Only in ../wdir: .hg_archival.txt
408
413
409 $ find ../wdir -type f | sort
414 $ find ../wdir -type f | sort
410 ../wdir/.hg_archival.txt
415 ../wdir/.hg_archival.txt
411 ../wdir/.hgsub
416 ../wdir/.hgsub
412 ../wdir/.hgsubstate
417 ../wdir/.hgsubstate
413 ../wdir/foo/bar/abc
418 ../wdir/foo/bar/abc
414 ../wdir/sub1/.hgsub
419 ../wdir/sub1/.hgsub
415 ../wdir/sub1/.hgsubstate
420 ../wdir/sub1/.hgsubstate
416 ../wdir/sub1/foo
421 ../wdir/sub1/foo
417 ../wdir/sub1/sub1
422 ../wdir/sub1/sub1
418 ../wdir/sub1/sub2/folder/test.txt
423 ../wdir/sub1/sub2/folder/test.txt
419 ../wdir/sub1/sub2/sub2
424 ../wdir/sub1/sub2/sub2
420
425
421 $ cat ../wdir/.hg_archival.txt
426 $ cat ../wdir/.hg_archival.txt
422 repo: 7f491f53a367861f47ee64a80eb997d1f341b77a
427 repo: 7f491f53a367861f47ee64a80eb997d1f341b77a
423 node: 9bb10eebee29dc0f1201dcf5977b811a540255fd+
428 node: 9bb10eebee29dc0f1201dcf5977b811a540255fd+
424 branch: default
429 branch: default
425 latesttag: null
430 latesttag: null
426 latesttagdistance: 4
431 latesttagdistance: 4
427 changessincelatesttag: 4
432 changessincelatesttag: 4
428
433
429 Attempting to archive 'wdir()' with a missing file is handled gracefully
434 Attempting to archive 'wdir()' with a missing file is handled gracefully
430 $ rm sub1/sub1
435 $ rm sub1/sub1
431 $ rm -r ../wdir
436 $ rm -r ../wdir
432 $ hg archive -v -S -r 'wdir()' ../wdir
437 $ hg archive -v -S -r 'wdir()' ../wdir
433 \r (no-eol) (esc)
438 \r (no-eol) (esc)
434 archiving [ ] 0/3\r (no-eol) (esc)
439 archiving [ ] 0/3\r (no-eol) (esc)
435 archiving [=============> ] 1/3\r (no-eol) (esc)
440 archiving [=============> ] 1/3\r (no-eol) (esc)
436 archiving [===========================> ] 2/3\r (no-eol) (esc)
441 archiving [===========================> ] 2/3\r (no-eol) (esc)
437 archiving [==========================================>] 3/3\r (no-eol) (esc)
442 archiving [==========================================>] 3/3\r (no-eol) (esc)
438 \r (no-eol) (esc)
443 \r (no-eol) (esc)
439 \r (no-eol) (esc)
444 \r (no-eol) (esc)
440 archiving (sub1) [ ] 0/3\r (no-eol) (esc)
445 archiving (sub1) [ ] 0/3\r (no-eol) (esc)
441 archiving (sub1) [===========> ] 1/3\r (no-eol) (esc)
446 archiving (sub1) [===========> ] 1/3\r (no-eol) (esc)
442 archiving (sub1) [=======================> ] 2/3\r (no-eol) (esc)
447 archiving (sub1) [=======================> ] 2/3\r (no-eol) (esc)
443 archiving (sub1) [===================================>] 3/3\r (no-eol) (esc)
448 archiving (sub1) [===================================>] 3/3\r (no-eol) (esc)
444 \r (no-eol) (esc)
449 \r (no-eol) (esc)
445 \r (no-eol) (esc)
450 \r (no-eol) (esc)
446 archiving (sub1/sub2) [ ] 0/2\r (no-eol) (esc)
451 archiving (sub1/sub2) [ ] 0/2\r (no-eol) (esc)
447 archiving (sub1/sub2) [==============> ] 1/2\r (no-eol) (esc)
452 archiving (sub1/sub2) [==============> ] 1/2\r (no-eol) (esc)
448 archiving (sub1/sub2) [==============================>] 2/2\r (no-eol) (esc)
453 archiving (sub1/sub2) [==============================>] 2/2\r (no-eol) (esc)
449 \r (no-eol) (esc)
454 \r (no-eol) (esc)
450 $ find ../wdir -type f | sort
455 $ find ../wdir -type f | sort
451 ../wdir/.hg_archival.txt
456 ../wdir/.hg_archival.txt
452 ../wdir/.hgsub
457 ../wdir/.hgsub
453 ../wdir/.hgsubstate
458 ../wdir/.hgsubstate
454 ../wdir/foo/bar/abc
459 ../wdir/foo/bar/abc
455 ../wdir/sub1/.hgsub
460 ../wdir/sub1/.hgsub
456 ../wdir/sub1/.hgsubstate
461 ../wdir/sub1/.hgsubstate
457 ../wdir/sub1/foo
462 ../wdir/sub1/foo
458 ../wdir/sub1/sub2/folder/test.txt
463 ../wdir/sub1/sub2/folder/test.txt
459 ../wdir/sub1/sub2/sub2
464 ../wdir/sub1/sub2/sub2
460
465
461 Continue relative path printing + subrepos
466 Continue relative path printing + subrepos
462 $ hg update -Cq
467 $ hg update -Cq
463 $ rm -r ../wdir
468 $ rm -r ../wdir
464 $ hg archive -S -r 'wdir()' ../wdir
469 $ hg archive -S -r 'wdir()' ../wdir
465 \r (no-eol) (esc)
470 \r (no-eol) (esc)
466 archiving [ ] 0/3\r (no-eol) (esc)
471 archiving [ ] 0/3\r (no-eol) (esc)
467 archiving [=============> ] 1/3\r (no-eol) (esc)
472 archiving [=============> ] 1/3\r (no-eol) (esc)
468 archiving [===========================> ] 2/3\r (no-eol) (esc)
473 archiving [===========================> ] 2/3\r (no-eol) (esc)
469 archiving [==========================================>] 3/3\r (no-eol) (esc)
474 archiving [==========================================>] 3/3\r (no-eol) (esc)
470 \r (no-eol) (esc)
475 \r (no-eol) (esc)
471 \r (no-eol) (esc)
476 \r (no-eol) (esc)
472 archiving (sub1) [ ] 0/3\r (no-eol) (esc)
477 archiving (sub1) [ ] 0/3\r (no-eol) (esc)
473 archiving (sub1) [===========> ] 1/3\r (no-eol) (esc)
478 archiving (sub1) [===========> ] 1/3\r (no-eol) (esc)
474 archiving (sub1) [=======================> ] 2/3\r (no-eol) (esc)
479 archiving (sub1) [=======================> ] 2/3\r (no-eol) (esc)
475 archiving (sub1) [===================================>] 3/3\r (no-eol) (esc)
480 archiving (sub1) [===================================>] 3/3\r (no-eol) (esc)
476 \r (no-eol) (esc)
481 \r (no-eol) (esc)
477 \r (no-eol) (esc)
482 \r (no-eol) (esc)
478 archiving (sub1/sub2) [ ] 0/3\r (no-eol) (esc)
483 archiving (sub1/sub2) [ ] 0/3\r (no-eol) (esc)
479 archiving (sub1/sub2) [=========> ] 1/3\r (no-eol) (esc)
484 archiving (sub1/sub2) [=========> ] 1/3\r (no-eol) (esc)
480 archiving (sub1/sub2) [===================> ] 2/3\r (no-eol) (esc)
485 archiving (sub1/sub2) [===================> ] 2/3\r (no-eol) (esc)
481 archiving (sub1/sub2) [==============================>] 3/3\r (no-eol) (esc)
486 archiving (sub1/sub2) [==============================>] 3/3\r (no-eol) (esc)
482 \r (no-eol) (esc)
487 \r (no-eol) (esc)
483 $ cat ../wdir/.hg_archival.txt
488 $ cat ../wdir/.hg_archival.txt
484 repo: 7f491f53a367861f47ee64a80eb997d1f341b77a
489 repo: 7f491f53a367861f47ee64a80eb997d1f341b77a
485 node: 9bb10eebee29dc0f1201dcf5977b811a540255fd
490 node: 9bb10eebee29dc0f1201dcf5977b811a540255fd
486 branch: default
491 branch: default
487 latesttag: null
492 latesttag: null
488 latesttagdistance: 4
493 latesttagdistance: 4
489 changessincelatesttag: 4
494 changessincelatesttag: 4
490
495
491 $ touch sub1/sub2/folder/bar
496 $ touch sub1/sub2/folder/bar
492 $ hg addremove sub1/sub2
497 $ hg addremove sub1/sub2
493 adding sub1/sub2/folder/bar
498 adding sub1/sub2/folder/bar
494 $ hg status -S
499 $ hg status -S
495 A sub1/sub2/folder/bar
500 A sub1/sub2/folder/bar
496 ? foo/bar/abc
501 ? foo/bar/abc
497 ? sub1/foo
502 ? sub1/foo
498 $ hg update -Cq
503 $ hg update -Cq
499 $ hg addremove sub1
504 $ hg addremove sub1
500 adding sub1/sub2/folder/bar
505 adding sub1/sub2/folder/bar
501 adding sub1/foo
506 adding sub1/foo
502 $ hg update -Cq
507 $ hg update -Cq
503 $ rm sub1/sub2/folder/test.txt
508 $ rm sub1/sub2/folder/test.txt
504 $ rm sub1/sub2/test.txt
509 $ rm sub1/sub2/test.txt
505 $ hg ci -ASm "remove test.txt"
510 $ hg ci -ASm "remove test.txt"
506 adding sub1/sub2/folder/bar
511 adding sub1/sub2/folder/bar
507 removing sub1/sub2/folder/test.txt
512 removing sub1/sub2/folder/test.txt
508 removing sub1/sub2/test.txt
513 removing sub1/sub2/test.txt
509 adding sub1/foo
514 adding sub1/foo
510 adding foo/bar/abc
515 adding foo/bar/abc
511 committing subrepository sub1
516 committing subrepository sub1
512 committing subrepository sub1/sub2
517 committing subrepository sub1/sub2
513
518
514 $ hg forget sub1/sub2/sub2
519 $ hg forget sub1/sub2/sub2
515 $ echo x > sub1/sub2/x.txt
520 $ echo x > sub1/sub2/x.txt
516 $ hg add sub1/sub2/x.txt
521 $ hg add sub1/sub2/x.txt
517
522
518 Files sees uncommitted adds and removes in subrepos
523 Files sees uncommitted adds and removes in subrepos
519 $ hg files -S
524 $ hg files -S
520 .hgsub
525 .hgsub
521 .hgsubstate
526 .hgsubstate
522 foo/bar/abc
527 foo/bar/abc
523 main
528 main
524 sub1/.hgsub
529 sub1/.hgsub
525 sub1/.hgsubstate
530 sub1/.hgsubstate
526 sub1/foo
531 sub1/foo
527 sub1/sub1
532 sub1/sub1
528 sub1/sub2/folder/bar
533 sub1/sub2/folder/bar
529 sub1/sub2/x.txt
534 sub1/sub2/x.txt
530
535
531 $ hg files -S "set:eol('dos') or eol('unix') or size('<= 0')"
536 $ hg files -S "set:eol('dos') or eol('unix') or size('<= 0')"
532 .hgsub
537 .hgsub
533 .hgsubstate
538 .hgsubstate
534 foo/bar/abc
539 foo/bar/abc
535 main
540 main
536 sub1/.hgsub
541 sub1/.hgsub
537 sub1/.hgsubstate
542 sub1/.hgsubstate
538 sub1/foo
543 sub1/foo
539 sub1/sub1
544 sub1/sub1
540 sub1/sub2/folder/bar
545 sub1/sub2/folder/bar
541 sub1/sub2/x.txt
546 sub1/sub2/x.txt
542
547
543 $ hg files -r '.^' -S "set:eol('dos') or eol('unix')"
548 $ hg files -r '.^' -S "set:eol('dos') or eol('unix')"
544 .hgsub
549 .hgsub
545 .hgsubstate
550 .hgsubstate
546 main
551 main
547 sub1/.hgsub
552 sub1/.hgsub
548 sub1/.hgsubstate
553 sub1/.hgsubstate
549 sub1/sub1
554 sub1/sub1
550 sub1/sub2/folder/test.txt
555 sub1/sub2/folder/test.txt
551 sub1/sub2/sub2
556 sub1/sub2/sub2
552 sub1/sub2/test.txt
557 sub1/sub2/test.txt
553
558
554 $ hg files sub1
559 $ hg files sub1
555 sub1/.hgsub
560 sub1/.hgsub
556 sub1/.hgsubstate
561 sub1/.hgsubstate
557 sub1/foo
562 sub1/foo
558 sub1/sub1
563 sub1/sub1
559 sub1/sub2/folder/bar
564 sub1/sub2/folder/bar
560 sub1/sub2/x.txt
565 sub1/sub2/x.txt
561
566
562 $ hg files sub1/sub2
567 $ hg files sub1/sub2
563 sub1/sub2/folder/bar
568 sub1/sub2/folder/bar
564 sub1/sub2/x.txt
569 sub1/sub2/x.txt
565
570
566 $ hg files
571 $ hg files
567 .hgsub
572 .hgsub
568 .hgsubstate
573 .hgsubstate
569 foo/bar/abc
574 foo/bar/abc
570 main
575 main
571
576
572 $ hg files -S -r '.^' sub1/sub2/folder
577 $ hg files -S -r '.^' sub1/sub2/folder
573 sub1/sub2/folder/test.txt
578 sub1/sub2/folder/test.txt
574
579
575 $ hg files -S -r '.^' sub1/sub2/missing
580 $ hg files -S -r '.^' sub1/sub2/missing
576 sub1/sub2/missing: no such file in rev 78026e779ea6
581 sub1/sub2/missing: no such file in rev 78026e779ea6
577 [1]
582 [1]
578
583
579 $ hg files -r '.^' sub1/
584 $ hg files -r '.^' sub1/
580 sub1/.hgsub
585 sub1/.hgsub
581 sub1/.hgsubstate
586 sub1/.hgsubstate
582 sub1/sub1
587 sub1/sub1
583 sub1/sub2/folder/test.txt
588 sub1/sub2/folder/test.txt
584 sub1/sub2/sub2
589 sub1/sub2/sub2
585 sub1/sub2/test.txt
590 sub1/sub2/test.txt
586
591
587 $ hg files -r '.^' sub1/sub2
592 $ hg files -r '.^' sub1/sub2
588 sub1/sub2/folder/test.txt
593 sub1/sub2/folder/test.txt
589 sub1/sub2/sub2
594 sub1/sub2/sub2
590 sub1/sub2/test.txt
595 sub1/sub2/test.txt
591
596
592 $ hg rollback -q
597 $ hg rollback -q
593 $ hg up -Cq
598 $ hg up -Cq
594
599
595 $ hg --config extensions.largefiles=! archive -S ../archive_all
600 $ hg --config extensions.largefiles=! archive -S ../archive_all
596 \r (no-eol) (esc)
601 \r (no-eol) (esc)
597 archiving [ ] 0/3\r (no-eol) (esc)
602 archiving [ ] 0/3\r (no-eol) (esc)
598 archiving [=============> ] 1/3\r (no-eol) (esc)
603 archiving [=============> ] 1/3\r (no-eol) (esc)
599 archiving [===========================> ] 2/3\r (no-eol) (esc)
604 archiving [===========================> ] 2/3\r (no-eol) (esc)
600 archiving [==========================================>] 3/3\r (no-eol) (esc)
605 archiving [==========================================>] 3/3\r (no-eol) (esc)
601 \r (no-eol) (esc)
606 \r (no-eol) (esc)
602 \r (no-eol) (esc)
607 \r (no-eol) (esc)
603 archiving (sub1) [ ] 0/3\r (no-eol) (esc)
608 archiving (sub1) [ ] 0/3\r (no-eol) (esc)
604 archiving (sub1) [===========> ] 1/3\r (no-eol) (esc)
609 archiving (sub1) [===========> ] 1/3\r (no-eol) (esc)
605 archiving (sub1) [=======================> ] 2/3\r (no-eol) (esc)
610 archiving (sub1) [=======================> ] 2/3\r (no-eol) (esc)
606 archiving (sub1) [===================================>] 3/3\r (no-eol) (esc)
611 archiving (sub1) [===================================>] 3/3\r (no-eol) (esc)
607 \r (no-eol) (esc)
612 \r (no-eol) (esc)
608 \r (no-eol) (esc)
613 \r (no-eol) (esc)
609 archiving (sub1/sub2) [ ] 0/3\r (no-eol) (esc)
614 archiving (sub1/sub2) [ ] 0/3\r (no-eol) (esc)
610 archiving (sub1/sub2) [=========> ] 1/3\r (no-eol) (esc)
615 archiving (sub1/sub2) [=========> ] 1/3\r (no-eol) (esc)
611 archiving (sub1/sub2) [===================> ] 2/3\r (no-eol) (esc)
616 archiving (sub1/sub2) [===================> ] 2/3\r (no-eol) (esc)
612 archiving (sub1/sub2) [==============================>] 3/3\r (no-eol) (esc)
617 archiving (sub1/sub2) [==============================>] 3/3\r (no-eol) (esc)
613 \r (no-eol) (esc)
618 \r (no-eol) (esc)
614 $ find ../archive_all | sort
619 $ find ../archive_all | sort
615 ../archive_all
620 ../archive_all
616 ../archive_all/.hg_archival.txt
621 ../archive_all/.hg_archival.txt
617 ../archive_all/.hgsub
622 ../archive_all/.hgsub
618 ../archive_all/.hgsubstate
623 ../archive_all/.hgsubstate
619 ../archive_all/main
624 ../archive_all/main
620 ../archive_all/sub1
625 ../archive_all/sub1
621 ../archive_all/sub1/.hgsub
626 ../archive_all/sub1/.hgsub
622 ../archive_all/sub1/.hgsubstate
627 ../archive_all/sub1/.hgsubstate
623 ../archive_all/sub1/sub1
628 ../archive_all/sub1/sub1
624 ../archive_all/sub1/sub2
629 ../archive_all/sub1/sub2
625 ../archive_all/sub1/sub2/folder
630 ../archive_all/sub1/sub2/folder
626 ../archive_all/sub1/sub2/folder/test.txt
631 ../archive_all/sub1/sub2/folder/test.txt
627 ../archive_all/sub1/sub2/sub2
632 ../archive_all/sub1/sub2/sub2
628 ../archive_all/sub1/sub2/test.txt
633 ../archive_all/sub1/sub2/test.txt
629
634
630 Check that archive -X works in deep subrepos
635 Check that archive -X works in deep subrepos
631
636
632 $ hg --config extensions.largefiles=! archive -S -X '**test*' ../archive_exclude
637 $ hg --config extensions.largefiles=! archive -S -X '**test*' ../archive_exclude
633 \r (no-eol) (esc)
638 \r (no-eol) (esc)
634 archiving [ ] 0/3\r (no-eol) (esc)
639 archiving [ ] 0/3\r (no-eol) (esc)
635 archiving [=============> ] 1/3\r (no-eol) (esc)
640 archiving [=============> ] 1/3\r (no-eol) (esc)
636 archiving [===========================> ] 2/3\r (no-eol) (esc)
641 archiving [===========================> ] 2/3\r (no-eol) (esc)
637 archiving [==========================================>] 3/3\r (no-eol) (esc)
642 archiving [==========================================>] 3/3\r (no-eol) (esc)
638 \r (no-eol) (esc)
643 \r (no-eol) (esc)
639 \r (no-eol) (esc)
644 \r (no-eol) (esc)
640 archiving (sub1) [ ] 0/3\r (no-eol) (esc)
645 archiving (sub1) [ ] 0/3\r (no-eol) (esc)
641 archiving (sub1) [===========> ] 1/3\r (no-eol) (esc)
646 archiving (sub1) [===========> ] 1/3\r (no-eol) (esc)
642 archiving (sub1) [=======================> ] 2/3\r (no-eol) (esc)
647 archiving (sub1) [=======================> ] 2/3\r (no-eol) (esc)
643 archiving (sub1) [===================================>] 3/3\r (no-eol) (esc)
648 archiving (sub1) [===================================>] 3/3\r (no-eol) (esc)
644 \r (no-eol) (esc)
649 \r (no-eol) (esc)
645 \r (no-eol) (esc)
650 \r (no-eol) (esc)
646 archiving (sub1/sub2) [ ] 0/1\r (no-eol) (esc)
651 archiving (sub1/sub2) [ ] 0/1\r (no-eol) (esc)
647 archiving (sub1/sub2) [==============================>] 1/1\r (no-eol) (esc)
652 archiving (sub1/sub2) [==============================>] 1/1\r (no-eol) (esc)
648 \r (no-eol) (esc)
653 \r (no-eol) (esc)
649 $ find ../archive_exclude | sort
654 $ find ../archive_exclude | sort
650 ../archive_exclude
655 ../archive_exclude
651 ../archive_exclude/.hg_archival.txt
656 ../archive_exclude/.hg_archival.txt
652 ../archive_exclude/.hgsub
657 ../archive_exclude/.hgsub
653 ../archive_exclude/.hgsubstate
658 ../archive_exclude/.hgsubstate
654 ../archive_exclude/main
659 ../archive_exclude/main
655 ../archive_exclude/sub1
660 ../archive_exclude/sub1
656 ../archive_exclude/sub1/.hgsub
661 ../archive_exclude/sub1/.hgsub
657 ../archive_exclude/sub1/.hgsubstate
662 ../archive_exclude/sub1/.hgsubstate
658 ../archive_exclude/sub1/sub1
663 ../archive_exclude/sub1/sub1
659 ../archive_exclude/sub1/sub2
664 ../archive_exclude/sub1/sub2
660 ../archive_exclude/sub1/sub2/sub2
665 ../archive_exclude/sub1/sub2/sub2
661
666
662 $ hg --config extensions.largefiles=! archive -S -I '**test*' ../archive_include
667 $ hg --config extensions.largefiles=! archive -S -I '**test*' ../archive_include
663 \r (no-eol) (esc)
668 \r (no-eol) (esc)
664 archiving (sub1) [ <=> ] 0\r (no-eol) (esc)
669 archiving (sub1) [ <=> ] 0\r (no-eol) (esc)
665 \r (no-eol) (esc)
670 \r (no-eol) (esc)
666 \r (no-eol) (esc)
671 \r (no-eol) (esc)
667 archiving (sub1/sub2) [ ] 0/2\r (no-eol) (esc)
672 archiving (sub1/sub2) [ ] 0/2\r (no-eol) (esc)
668 archiving (sub1/sub2) [==============> ] 1/2\r (no-eol) (esc)
673 archiving (sub1/sub2) [==============> ] 1/2\r (no-eol) (esc)
669 archiving (sub1/sub2) [==============================>] 2/2\r (no-eol) (esc)
674 archiving (sub1/sub2) [==============================>] 2/2\r (no-eol) (esc)
670 \r (no-eol) (esc)
675 \r (no-eol) (esc)
671 $ find ../archive_include | sort
676 $ find ../archive_include | sort
672 ../archive_include
677 ../archive_include
673 ../archive_include/sub1
678 ../archive_include/sub1
674 ../archive_include/sub1/sub2
679 ../archive_include/sub1/sub2
675 ../archive_include/sub1/sub2/folder
680 ../archive_include/sub1/sub2/folder
676 ../archive_include/sub1/sub2/folder/test.txt
681 ../archive_include/sub1/sub2/folder/test.txt
677 ../archive_include/sub1/sub2/test.txt
682 ../archive_include/sub1/sub2/test.txt
678
683
679 Check that deep archive works with largefiles (which overrides hgsubrepo impl)
684 Check that deep archive works with largefiles (which overrides hgsubrepo impl)
680 This also tests the repo.ui regression in 43fb170a23bd, and that lf subrepo
685 This also tests the repo.ui regression in 43fb170a23bd, and that lf subrepo
681 subrepos are archived properly.
686 subrepos are archived properly.
682 Note that add --large through a subrepo currently adds the file as a normal file
687 Note that add --large through a subrepo currently adds the file as a normal file
683
688
684 $ echo "large" > sub1/sub2/large.bin
689 $ echo "large" > sub1/sub2/large.bin
685 $ hg --config extensions.largefiles= add --large -R sub1/sub2 sub1/sub2/large.bin
690 $ hg --config extensions.largefiles= add --large -R sub1/sub2 sub1/sub2/large.bin
686 $ echo "large" > large.bin
691 $ echo "large" > large.bin
687 $ hg --config extensions.largefiles= add --large large.bin
692 $ hg --config extensions.largefiles= add --large large.bin
688 $ hg --config extensions.largefiles= ci -S -m "add large files"
693 $ hg --config extensions.largefiles= ci -S -m "add large files"
689 committing subrepository sub1
694 committing subrepository sub1
690 committing subrepository sub1/sub2
695 committing subrepository sub1/sub2
691
696
692 $ hg --config extensions.largefiles= archive -S ../archive_lf
697 $ hg --config extensions.largefiles= archive -S ../archive_lf
693 $ find ../archive_lf | sort
698 $ find ../archive_lf | sort
694 ../archive_lf
699 ../archive_lf
695 ../archive_lf/.hg_archival.txt
700 ../archive_lf/.hg_archival.txt
696 ../archive_lf/.hgsub
701 ../archive_lf/.hgsub
697 ../archive_lf/.hgsubstate
702 ../archive_lf/.hgsubstate
698 ../archive_lf/large.bin
703 ../archive_lf/large.bin
699 ../archive_lf/main
704 ../archive_lf/main
700 ../archive_lf/sub1
705 ../archive_lf/sub1
701 ../archive_lf/sub1/.hgsub
706 ../archive_lf/sub1/.hgsub
702 ../archive_lf/sub1/.hgsubstate
707 ../archive_lf/sub1/.hgsubstate
703 ../archive_lf/sub1/sub1
708 ../archive_lf/sub1/sub1
704 ../archive_lf/sub1/sub2
709 ../archive_lf/sub1/sub2
705 ../archive_lf/sub1/sub2/folder
710 ../archive_lf/sub1/sub2/folder
706 ../archive_lf/sub1/sub2/folder/test.txt
711 ../archive_lf/sub1/sub2/folder/test.txt
707 ../archive_lf/sub1/sub2/large.bin
712 ../archive_lf/sub1/sub2/large.bin
708 ../archive_lf/sub1/sub2/sub2
713 ../archive_lf/sub1/sub2/sub2
709 ../archive_lf/sub1/sub2/test.txt
714 ../archive_lf/sub1/sub2/test.txt
710 $ rm -rf ../archive_lf
715 $ rm -rf ../archive_lf
711
716
712 Exclude large files from main and sub-sub repo
717 Exclude large files from main and sub-sub repo
713
718
714 $ hg --config extensions.largefiles= archive -S -X '**.bin' ../archive_lf
719 $ hg --config extensions.largefiles= archive -S -X '**.bin' ../archive_lf
715 $ find ../archive_lf | sort
720 $ find ../archive_lf | sort
716 ../archive_lf
721 ../archive_lf
717 ../archive_lf/.hg_archival.txt
722 ../archive_lf/.hg_archival.txt
718 ../archive_lf/.hgsub
723 ../archive_lf/.hgsub
719 ../archive_lf/.hgsubstate
724 ../archive_lf/.hgsubstate
720 ../archive_lf/main
725 ../archive_lf/main
721 ../archive_lf/sub1
726 ../archive_lf/sub1
722 ../archive_lf/sub1/.hgsub
727 ../archive_lf/sub1/.hgsub
723 ../archive_lf/sub1/.hgsubstate
728 ../archive_lf/sub1/.hgsubstate
724 ../archive_lf/sub1/sub1
729 ../archive_lf/sub1/sub1
725 ../archive_lf/sub1/sub2
730 ../archive_lf/sub1/sub2
726 ../archive_lf/sub1/sub2/folder
731 ../archive_lf/sub1/sub2/folder
727 ../archive_lf/sub1/sub2/folder/test.txt
732 ../archive_lf/sub1/sub2/folder/test.txt
728 ../archive_lf/sub1/sub2/sub2
733 ../archive_lf/sub1/sub2/sub2
729 ../archive_lf/sub1/sub2/test.txt
734 ../archive_lf/sub1/sub2/test.txt
730 $ rm -rf ../archive_lf
735 $ rm -rf ../archive_lf
731
736
732 Exclude normal files from main and sub-sub repo
737 Exclude normal files from main and sub-sub repo
733
738
734 $ hg --config extensions.largefiles= archive -S -X '**.txt' -p '.' ../archive_lf.tgz
739 $ hg --config extensions.largefiles= archive -S -X '**.txt' -p '.' ../archive_lf.tgz
735 $ tar -tzf ../archive_lf.tgz | sort
740 $ tar -tzf ../archive_lf.tgz | sort
736 .hgsub
741 .hgsub
737 .hgsubstate
742 .hgsubstate
738 large.bin
743 large.bin
739 main
744 main
740 sub1/.hgsub
745 sub1/.hgsub
741 sub1/.hgsubstate
746 sub1/.hgsubstate
742 sub1/sub1
747 sub1/sub1
743 sub1/sub2/large.bin
748 sub1/sub2/large.bin
744 sub1/sub2/sub2
749 sub1/sub2/sub2
745
750
746 Include normal files from within a largefiles subrepo
751 Include normal files from within a largefiles subrepo
747
752
748 $ hg --config extensions.largefiles= archive -S -I '**.txt' ../archive_lf
753 $ hg --config extensions.largefiles= archive -S -I '**.txt' ../archive_lf
749 $ find ../archive_lf | sort
754 $ find ../archive_lf | sort
750 ../archive_lf
755 ../archive_lf
751 ../archive_lf/.hg_archival.txt
756 ../archive_lf/.hg_archival.txt
752 ../archive_lf/sub1
757 ../archive_lf/sub1
753 ../archive_lf/sub1/sub2
758 ../archive_lf/sub1/sub2
754 ../archive_lf/sub1/sub2/folder
759 ../archive_lf/sub1/sub2/folder
755 ../archive_lf/sub1/sub2/folder/test.txt
760 ../archive_lf/sub1/sub2/folder/test.txt
756 ../archive_lf/sub1/sub2/test.txt
761 ../archive_lf/sub1/sub2/test.txt
757 $ rm -rf ../archive_lf
762 $ rm -rf ../archive_lf
758
763
759 Include large files from within a largefiles subrepo
764 Include large files from within a largefiles subrepo
760
765
761 $ hg --config extensions.largefiles= archive -S -I '**.bin' ../archive_lf
766 $ hg --config extensions.largefiles= archive -S -I '**.bin' ../archive_lf
762 $ find ../archive_lf | sort
767 $ find ../archive_lf | sort
763 ../archive_lf
768 ../archive_lf
764 ../archive_lf/large.bin
769 ../archive_lf/large.bin
765 ../archive_lf/sub1
770 ../archive_lf/sub1
766 ../archive_lf/sub1/sub2
771 ../archive_lf/sub1/sub2
767 ../archive_lf/sub1/sub2/large.bin
772 ../archive_lf/sub1/sub2/large.bin
768 $ rm -rf ../archive_lf
773 $ rm -rf ../archive_lf
769
774
770 Find an exact largefile match in a largefiles subrepo
775 Find an exact largefile match in a largefiles subrepo
771
776
772 $ hg --config extensions.largefiles= archive -S -I 'sub1/sub2/large.bin' ../archive_lf
777 $ hg --config extensions.largefiles= archive -S -I 'sub1/sub2/large.bin' ../archive_lf
773 $ find ../archive_lf | sort
778 $ find ../archive_lf | sort
774 ../archive_lf
779 ../archive_lf
775 ../archive_lf/sub1
780 ../archive_lf/sub1
776 ../archive_lf/sub1/sub2
781 ../archive_lf/sub1/sub2
777 ../archive_lf/sub1/sub2/large.bin
782 ../archive_lf/sub1/sub2/large.bin
778 $ rm -rf ../archive_lf
783 $ rm -rf ../archive_lf
779
784
780 The local repo enables largefiles if a largefiles repo is cloned
785 The local repo enables largefiles if a largefiles repo is cloned
781
786
782 $ hg showconfig extensions
787 $ hg showconfig extensions
783 extensions.largefiles=
788 extensions.largefiles=
784
789
785 $ hg --config extensions.largefiles= clone -qU . ../lfclone
790 $ hg --config extensions.largefiles= clone -qU . ../lfclone
786 $ grep largefiles ../lfclone/.hg/requires
791 $ grep largefiles ../lfclone/.hg/requires
787 largefiles
792 largefiles
788
793
789 Find an exact match to a standin (should archive nothing)
794 Find an exact match to a standin (should archive nothing)
790 $ hg --config extensions.largefiles= archive -S -I 'sub/sub2/.hglf/large.bin' ../archive_lf
795 $ hg --config extensions.largefiles= archive -S -I 'sub/sub2/.hglf/large.bin' ../archive_lf
791 $ find ../archive_lf 2> /dev/null | sort
796 $ find ../archive_lf 2> /dev/null | sort
792
797
793 $ cat >> $HGRCPATH <<EOF
798 $ cat >> $HGRCPATH <<EOF
794 > [extensions]
799 > [extensions]
795 > largefiles=
800 > largefiles=
796 > [largefiles]
801 > [largefiles]
797 > patterns=glob:**.dat
802 > patterns=glob:**.dat
798 > EOF
803 > EOF
799
804
800 Test forget through a deep subrepo with the largefiles extension, both a
805 Test forget through a deep subrepo with the largefiles extension, both a
801 largefile and a normal file. Then a largefile that hasn't been committed yet.
806 largefile and a normal file. Then a largefile that hasn't been committed yet.
802 $ touch sub1/sub2/untracked.txt
807 $ touch sub1/sub2/untracked.txt
803 $ touch sub1/sub2/large.dat
808 $ touch sub1/sub2/large.dat
804 $ hg forget sub1/sub2/large.bin sub1/sub2/test.txt sub1/sub2/untracked.txt
809 $ hg forget sub1/sub2/large.bin sub1/sub2/test.txt sub1/sub2/untracked.txt
805 not removing sub1/sub2/untracked.txt: file is already untracked
810 not removing sub1/sub2/untracked.txt: file is already untracked
806 [1]
811 [1]
807 $ hg add --large --dry-run -v sub1/sub2/untracked.txt
812 $ hg add --large --dry-run -v sub1/sub2/untracked.txt
808 adding sub1/sub2/untracked.txt as a largefile
813 adding sub1/sub2/untracked.txt as a largefile
809 $ hg add --large -v sub1/sub2/untracked.txt
814 $ hg add --large -v sub1/sub2/untracked.txt
810 adding sub1/sub2/untracked.txt as a largefile
815 adding sub1/sub2/untracked.txt as a largefile
811 $ hg add --normal -v sub1/sub2/large.dat
816 $ hg add --normal -v sub1/sub2/large.dat
812 adding sub1/sub2/large.dat
817 adding sub1/sub2/large.dat
813 $ hg forget -v sub1/sub2/untracked.txt
818 $ hg forget -v sub1/sub2/untracked.txt
814 removing sub1/sub2/untracked.txt
819 removing sub1/sub2/untracked.txt
815 $ hg status -S
820 $ hg status -S
816 A sub1/sub2/large.dat
821 A sub1/sub2/large.dat
817 R sub1/sub2/large.bin
822 R sub1/sub2/large.bin
818 R sub1/sub2/test.txt
823 R sub1/sub2/test.txt
819 ? foo/bar/abc
824 ? foo/bar/abc
820 ? sub1/sub2/untracked.txt
825 ? sub1/sub2/untracked.txt
821 ? sub1/sub2/x.txt
826 ? sub1/sub2/x.txt
822 $ hg add sub1/sub2
827 $ hg add sub1/sub2
823
828
824 $ hg archive -S -r 'wdir()' ../wdir2
829 $ hg archive -S -r 'wdir()' ../wdir2
825 $ diff -r . ../wdir2 | egrep -v '\.hg$|^Common subdirectories:'
830 $ diff -r . ../wdir2 | egrep -v '\.hg$|^Common subdirectories:'
826 Only in ../wdir2: .hg_archival.txt
831 Only in ../wdir2: .hg_archival.txt
827 Only in .: .hglf
832 Only in .: .hglf
828 Only in .: foo
833 Only in .: foo
829 Only in ./sub1/sub2: large.bin
834 Only in ./sub1/sub2: large.bin
830 Only in ./sub1/sub2: test.txt
835 Only in ./sub1/sub2: test.txt
831 Only in ./sub1/sub2: untracked.txt
836 Only in ./sub1/sub2: untracked.txt
832 Only in ./sub1/sub2: x.txt
837 Only in ./sub1/sub2: x.txt
833 $ find ../wdir2 -type f | sort
838 $ find ../wdir2 -type f | sort
834 ../wdir2/.hg_archival.txt
839 ../wdir2/.hg_archival.txt
835 ../wdir2/.hgsub
840 ../wdir2/.hgsub
836 ../wdir2/.hgsubstate
841 ../wdir2/.hgsubstate
837 ../wdir2/large.bin
842 ../wdir2/large.bin
838 ../wdir2/main
843 ../wdir2/main
839 ../wdir2/sub1/.hgsub
844 ../wdir2/sub1/.hgsub
840 ../wdir2/sub1/.hgsubstate
845 ../wdir2/sub1/.hgsubstate
841 ../wdir2/sub1/sub1
846 ../wdir2/sub1/sub1
842 ../wdir2/sub1/sub2/folder/test.txt
847 ../wdir2/sub1/sub2/folder/test.txt
843 ../wdir2/sub1/sub2/large.dat
848 ../wdir2/sub1/sub2/large.dat
844 ../wdir2/sub1/sub2/sub2
849 ../wdir2/sub1/sub2/sub2
845 $ hg status -S -mac -n | sort
850 $ hg status -S -mac -n | sort
846 .hgsub
851 .hgsub
847 .hgsubstate
852 .hgsubstate
848 large.bin
853 large.bin
849 main
854 main
850 sub1/.hgsub
855 sub1/.hgsub
851 sub1/.hgsubstate
856 sub1/.hgsubstate
852 sub1/sub1
857 sub1/sub1
853 sub1/sub2/folder/test.txt
858 sub1/sub2/folder/test.txt
854 sub1/sub2/large.dat
859 sub1/sub2/large.dat
855 sub1/sub2/sub2
860 sub1/sub2/sub2
856
861
857 $ hg ci -Sqm 'forget testing'
862 $ hg ci -Sqm 'forget testing'
858
863
859 Test 'wdir()' modified file archiving with largefiles
864 Test 'wdir()' modified file archiving with largefiles
860 $ echo 'mod' > main
865 $ echo 'mod' > main
861 $ echo 'mod' > large.bin
866 $ echo 'mod' > large.bin
862 $ echo 'mod' > sub1/sub2/large.dat
867 $ echo 'mod' > sub1/sub2/large.dat
863 $ hg archive -S -r 'wdir()' ../wdir3
868 $ hg archive -S -r 'wdir()' ../wdir3
864 $ diff -r . ../wdir3 | egrep -v '\.hg$|^Common subdirectories'
869 $ diff -r . ../wdir3 | egrep -v '\.hg$|^Common subdirectories'
865 Only in ../wdir3: .hg_archival.txt
870 Only in ../wdir3: .hg_archival.txt
866 Only in .: .hglf
871 Only in .: .hglf
867 Only in .: foo
872 Only in .: foo
868 Only in ./sub1/sub2: large.bin
873 Only in ./sub1/sub2: large.bin
869 Only in ./sub1/sub2: test.txt
874 Only in ./sub1/sub2: test.txt
870 Only in ./sub1/sub2: untracked.txt
875 Only in ./sub1/sub2: untracked.txt
871 Only in ./sub1/sub2: x.txt
876 Only in ./sub1/sub2: x.txt
872 $ find ../wdir3 -type f | sort
877 $ find ../wdir3 -type f | sort
873 ../wdir3/.hg_archival.txt
878 ../wdir3/.hg_archival.txt
874 ../wdir3/.hgsub
879 ../wdir3/.hgsub
875 ../wdir3/.hgsubstate
880 ../wdir3/.hgsubstate
876 ../wdir3/large.bin
881 ../wdir3/large.bin
877 ../wdir3/main
882 ../wdir3/main
878 ../wdir3/sub1/.hgsub
883 ../wdir3/sub1/.hgsub
879 ../wdir3/sub1/.hgsubstate
884 ../wdir3/sub1/.hgsubstate
880 ../wdir3/sub1/sub1
885 ../wdir3/sub1/sub1
881 ../wdir3/sub1/sub2/folder/test.txt
886 ../wdir3/sub1/sub2/folder/test.txt
882 ../wdir3/sub1/sub2/large.dat
887 ../wdir3/sub1/sub2/large.dat
883 ../wdir3/sub1/sub2/sub2
888 ../wdir3/sub1/sub2/sub2
884 $ hg up -Cq
889 $ hg up -Cq
885
890
886 Test issue4330: commit a directory where only normal files have changed
891 Test issue4330: commit a directory where only normal files have changed
887 $ touch foo/bar/large.dat
892 $ touch foo/bar/large.dat
888 $ hg add --large foo/bar/large.dat
893 $ hg add --large foo/bar/large.dat
889 $ hg ci -m 'add foo/bar/large.dat'
894 $ hg ci -m 'add foo/bar/large.dat'
890 $ touch a.txt
895 $ touch a.txt
891 $ touch a.dat
896 $ touch a.dat
892 $ hg add -v foo/bar/abc a.txt a.dat
897 $ hg add -v foo/bar/abc a.txt a.dat
893 adding a.dat as a largefile
898 adding a.dat as a largefile
894 adding a.txt
899 adding a.txt
895 adding foo/bar/abc
900 adding foo/bar/abc
896 $ hg ci -m 'dir commit with only normal file deltas' foo/bar
901 $ hg ci -m 'dir commit with only normal file deltas' foo/bar
897 $ hg status
902 $ hg status
898 A a.dat
903 A a.dat
899 A a.txt
904 A a.txt
900
905
901 Test a directory commit with a changed largefile and a changed normal file
906 Test a directory commit with a changed largefile and a changed normal file
902 $ echo changed > foo/bar/large.dat
907 $ echo changed > foo/bar/large.dat
903 $ echo changed > foo/bar/abc
908 $ echo changed > foo/bar/abc
904 $ hg ci -m 'dir commit with normal and lf file deltas' foo
909 $ hg ci -m 'dir commit with normal and lf file deltas' foo
905 $ hg status
910 $ hg status
906 A a.dat
911 A a.dat
907 A a.txt
912 A a.txt
908
913
909 $ hg ci -m "add a.*"
914 $ hg ci -m "add a.*"
910 $ hg mv a.dat b.dat
915 $ hg mv a.dat b.dat
911 $ hg mv foo/bar/abc foo/bar/def
916 $ hg mv foo/bar/abc foo/bar/def
912 $ hg status -C
917 $ hg status -C
913 A b.dat
918 A b.dat
914 a.dat
919 a.dat
915 A foo/bar/def
920 A foo/bar/def
916 foo/bar/abc
921 foo/bar/abc
917 R a.dat
922 R a.dat
918 R foo/bar/abc
923 R foo/bar/abc
919
924
920 $ hg ci -m "move large and normal"
925 $ hg ci -m "move large and normal"
921 $ hg status -C --rev '.^' --rev .
926 $ hg status -C --rev '.^' --rev .
922 A b.dat
927 A b.dat
923 a.dat
928 a.dat
924 A foo/bar/def
929 A foo/bar/def
925 foo/bar/abc
930 foo/bar/abc
926 R a.dat
931 R a.dat
927 R foo/bar/abc
932 R foo/bar/abc
928
933
929
934
930 $ echo foo > main
935 $ echo foo > main
931 $ hg ci -m "mod parent only"
936 $ hg ci -m "mod parent only"
932 $ hg init sub3
937 $ hg init sub3
933 $ echo "sub3 = sub3" >> .hgsub
938 $ echo "sub3 = sub3" >> .hgsub
934 $ echo xyz > sub3/a.txt
939 $ echo xyz > sub3/a.txt
935 $ hg add sub3/a.txt
940 $ hg add sub3/a.txt
936 $ hg ci -Sm "add sub3"
941 $ hg ci -Sm "add sub3"
937 committing subrepository sub3
942 committing subrepository sub3
938 $ cat .hgsub | grep -v sub3 > .hgsub1
943 $ cat .hgsub | grep -v sub3 > .hgsub1
939 $ mv .hgsub1 .hgsub
944 $ mv .hgsub1 .hgsub
940 $ hg ci -m "remove sub3"
945 $ hg ci -m "remove sub3"
941
946
942 $ hg log -r "subrepo()" --style compact
947 $ hg log -r "subrepo()" --style compact
943 0 7f491f53a367 1970-01-01 00:00 +0000 test
948 0 7f491f53a367 1970-01-01 00:00 +0000 test
944 main import
949 main import
945
950
946 1 ffe6649062fe 1970-01-01 00:00 +0000 test
951 1 ffe6649062fe 1970-01-01 00:00 +0000 test
947 deep nested modif should trigger a commit
952 deep nested modif should trigger a commit
948
953
949 2 9bb10eebee29 1970-01-01 00:00 +0000 test
954 2 9bb10eebee29 1970-01-01 00:00 +0000 test
950 add test.txt
955 add test.txt
951
956
952 3 7c64f035294f 1970-01-01 00:00 +0000 test
957 3 7c64f035294f 1970-01-01 00:00 +0000 test
953 add large files
958 add large files
954
959
955 4 f734a59e2e35 1970-01-01 00:00 +0000 test
960 4 f734a59e2e35 1970-01-01 00:00 +0000 test
956 forget testing
961 forget testing
957
962
958 11 9685a22af5db 1970-01-01 00:00 +0000 test
963 11 9685a22af5db 1970-01-01 00:00 +0000 test
959 add sub3
964 add sub3
960
965
961 12[tip] 2e0485b475b9 1970-01-01 00:00 +0000 test
966 12[tip] 2e0485b475b9 1970-01-01 00:00 +0000 test
962 remove sub3
967 remove sub3
963
968
964 $ hg log -r "subrepo('sub3')" --style compact
969 $ hg log -r "subrepo('sub3')" --style compact
965 11 9685a22af5db 1970-01-01 00:00 +0000 test
970 11 9685a22af5db 1970-01-01 00:00 +0000 test
966 add sub3
971 add sub3
967
972
968 12[tip] 2e0485b475b9 1970-01-01 00:00 +0000 test
973 12[tip] 2e0485b475b9 1970-01-01 00:00 +0000 test
969 remove sub3
974 remove sub3
970
975
971 $ hg log -r "subrepo('bogus')" --style compact
976 $ hg log -r "subrepo('bogus')" --style compact
972
977
973
978
974 Test .hgsubstate in the R state
979 Test .hgsubstate in the R state
975
980
976 $ hg rm .hgsub .hgsubstate
981 $ hg rm .hgsub .hgsubstate
977 \r (no-eol) (esc)
982 \r (no-eol) (esc)
978 deleting [=====================> ] 1/2\r (no-eol) (esc)
983 deleting [=====================> ] 1/2\r (no-eol) (esc)
979 deleting [===========================================>] 2/2\r (no-eol) (esc)
984 deleting [===========================================>] 2/2\r (no-eol) (esc)
980 \r (no-eol) (esc)
985 \r (no-eol) (esc)
981 $ hg ci -m 'trash subrepo tracking'
986 $ hg ci -m 'trash subrepo tracking'
982
987
983 $ hg log -r "subrepo('re:sub\d+')" --style compact
988 $ hg log -r "subrepo('re:sub\d+')" --style compact
984 0 7f491f53a367 1970-01-01 00:00 +0000 test
989 0 7f491f53a367 1970-01-01 00:00 +0000 test
985 main import
990 main import
986
991
987 1 ffe6649062fe 1970-01-01 00:00 +0000 test
992 1 ffe6649062fe 1970-01-01 00:00 +0000 test
988 deep nested modif should trigger a commit
993 deep nested modif should trigger a commit
989
994
990 2 9bb10eebee29 1970-01-01 00:00 +0000 test
995 2 9bb10eebee29 1970-01-01 00:00 +0000 test
991 add test.txt
996 add test.txt
992
997
993 3 7c64f035294f 1970-01-01 00:00 +0000 test
998 3 7c64f035294f 1970-01-01 00:00 +0000 test
994 add large files
999 add large files
995
1000
996 4 f734a59e2e35 1970-01-01 00:00 +0000 test
1001 4 f734a59e2e35 1970-01-01 00:00 +0000 test
997 forget testing
1002 forget testing
998
1003
999 11 9685a22af5db 1970-01-01 00:00 +0000 test
1004 11 9685a22af5db 1970-01-01 00:00 +0000 test
1000 add sub3
1005 add sub3
1001
1006
1002 12 2e0485b475b9 1970-01-01 00:00 +0000 test
1007 12 2e0485b475b9 1970-01-01 00:00 +0000 test
1003 remove sub3
1008 remove sub3
1004
1009
1005 13[tip] a68b2c361653 1970-01-01 00:00 +0000 test
1010 13[tip] a68b2c361653 1970-01-01 00:00 +0000 test
1006 trash subrepo tracking
1011 trash subrepo tracking
1007
1012
1008
1013
1009 Restore the trashed subrepo tracking
1014 Restore the trashed subrepo tracking
1010
1015
1011 $ hg rollback -q
1016 $ hg rollback -q
1012 $ hg update -Cq .
1017 $ hg update -Cq .
1013
1018
1014 Interaction with extdiff, largefiles and subrepos
1019 Interaction with extdiff, largefiles and subrepos
1015
1020
1016 $ hg --config extensions.extdiff= pdiff -S
1021 $ hg --config extensions.extdiff= pdiff -S
1017
1022
1018 $ hg --config extensions.extdiff= pdiff -r '.^' -S
1023 $ hg --config extensions.extdiff= pdiff -r '.^' -S
1019 \r (no-eol) (esc)
1024 \r (no-eol) (esc)
1020 archiving [ ] 0/2\r (no-eol) (esc)
1025 archiving [ ] 0/2\r (no-eol) (esc)
1021 archiving [====================> ] 1/2\r (no-eol) (esc)
1026 archiving [====================> ] 1/2\r (no-eol) (esc)
1022 archiving [==========================================>] 2/2\r (no-eol) (esc)
1027 archiving [==========================================>] 2/2\r (no-eol) (esc)
1023 \r (no-eol) (esc)
1028 \r (no-eol) (esc)
1024 \r (no-eol) (esc)
1029 \r (no-eol) (esc)
1025 archiving (sub1) [ <=> ] 0\r (no-eol) (esc)
1030 archiving (sub1) [ <=> ] 0\r (no-eol) (esc)
1026 \r (no-eol) (esc)
1031 \r (no-eol) (esc)
1027 \r (no-eol) (esc)
1032 \r (no-eol) (esc)
1028 archiving (sub1/sub2) [ <=> ] 0\r (no-eol) (esc)
1033 archiving (sub1/sub2) [ <=> ] 0\r (no-eol) (esc)
1029 \r (no-eol) (esc)
1034 \r (no-eol) (esc)
1030 \r (no-eol) (esc)
1035 \r (no-eol) (esc)
1031 archiving (sub3) [ <=> ] 0\r (no-eol) (esc)
1036 archiving (sub3) [ <=> ] 0\r (no-eol) (esc)
1032 \r (no-eol) (esc)
1037 \r (no-eol) (esc)
1033 \r (no-eol) (esc)
1038 \r (no-eol) (esc)
1034 archiving [ ] 0/2\r (no-eol) (esc)
1039 archiving [ ] 0/2\r (no-eol) (esc)
1035 archiving [====================> ] 1/2\r (no-eol) (esc)
1040 archiving [====================> ] 1/2\r (no-eol) (esc)
1036 archiving [==========================================>] 2/2\r (no-eol) (esc)
1041 archiving [==========================================>] 2/2\r (no-eol) (esc)
1037 \r (no-eol) (esc)
1042 \r (no-eol) (esc)
1038 \r (no-eol) (esc)
1043 \r (no-eol) (esc)
1039 archiving (sub1) [ <=> ] 0\r (no-eol) (esc)
1044 archiving (sub1) [ <=> ] 0\r (no-eol) (esc)
1040 \r (no-eol) (esc)
1045 \r (no-eol) (esc)
1041 \r (no-eol) (esc)
1046 \r (no-eol) (esc)
1042 archiving (sub1/sub2) [ <=> ] 0\r (no-eol) (esc)
1047 archiving (sub1/sub2) [ <=> ] 0\r (no-eol) (esc)
1043 \r (no-eol) (esc)
1048 \r (no-eol) (esc)
1044 diff -Nru cloned.*/.hgsub cloned/.hgsub (glob)
1049 diff -Nru cloned.*/.hgsub cloned/.hgsub (glob)
1045 --- cloned.*/.hgsub * (glob)
1050 --- cloned.*/.hgsub * (glob)
1046 +++ cloned/.hgsub * (glob)
1051 +++ cloned/.hgsub * (glob)
1047 @@ -1,2 +1* @@ (glob)
1052 @@ -1,2 +1* @@ (glob)
1048 sub1 = ../sub1
1053 sub1 = ../sub1
1049 -sub3 = sub3
1054 -sub3 = sub3
1050 diff -Nru cloned.*/.hgsubstate cloned/.hgsubstate (glob)
1055 diff -Nru cloned.*/.hgsubstate cloned/.hgsubstate (glob)
1051 --- cloned.*/.hgsubstate * (glob)
1056 --- cloned.*/.hgsubstate * (glob)
1052 +++ cloned/.hgsubstate * (glob)
1057 +++ cloned/.hgsubstate * (glob)
1053 @@ -1,2 +1* @@ (glob)
1058 @@ -1,2 +1* @@ (glob)
1054 7a36fa02b66e61f27f3d4a822809f159479b8ab2 sub1
1059 7a36fa02b66e61f27f3d4a822809f159479b8ab2 sub1
1055 -b1a26de6f2a045a9f079323693614ee322f1ff7e sub3
1060 -b1a26de6f2a045a9f079323693614ee322f1ff7e sub3
1056 [1]
1061 [1]
1057
1062
1058 $ hg --config extensions.extdiff= pdiff -r 0 -r '.^' -S
1063 $ hg --config extensions.extdiff= pdiff -r 0 -r '.^' -S
1059 \r (no-eol) (esc)
1064 \r (no-eol) (esc)
1060 archiving [ ] 0/3\r (no-eol) (esc)
1065 archiving [ ] 0/3\r (no-eol) (esc)
1061 archiving [=============> ] 1/3\r (no-eol) (esc)
1066 archiving [=============> ] 1/3\r (no-eol) (esc)
1062 archiving [===========================> ] 2/3\r (no-eol) (esc)
1067 archiving [===========================> ] 2/3\r (no-eol) (esc)
1063 archiving [==========================================>] 3/3\r (no-eol) (esc)
1068 archiving [==========================================>] 3/3\r (no-eol) (esc)
1064 \r (no-eol) (esc)
1069 \r (no-eol) (esc)
1065 \r (no-eol) (esc)
1070 \r (no-eol) (esc)
1066 archiving (sub1) [ ] 0/1\r (no-eol) (esc)
1071 archiving (sub1) [ ] 0/1\r (no-eol) (esc)
1067 archiving (sub1) [===================================>] 1/1\r (no-eol) (esc)
1072 archiving (sub1) [===================================>] 1/1\r (no-eol) (esc)
1068 \r (no-eol) (esc)
1073 \r (no-eol) (esc)
1069 \r (no-eol) (esc)
1074 \r (no-eol) (esc)
1070 archiving (sub1/sub2) [ ] 0/1\r (no-eol) (esc)
1075 archiving (sub1/sub2) [ ] 0/1\r (no-eol) (esc)
1071 archiving (sub1/sub2) [==============================>] 1/1\r (no-eol) (esc)
1076 archiving (sub1/sub2) [==============================>] 1/1\r (no-eol) (esc)
1072 \r (no-eol) (esc)
1077 \r (no-eol) (esc)
1073 \r (no-eol) (esc)
1078 \r (no-eol) (esc)
1074 archiving [ ] 0/8\r (no-eol) (esc)
1079 archiving [ ] 0/8\r (no-eol) (esc)
1075 archiving [====> ] 1/8\r (no-eol) (esc)
1080 archiving [====> ] 1/8\r (no-eol) (esc)
1076 archiving [=========> ] 2/8\r (no-eol) (esc)
1081 archiving [=========> ] 2/8\r (no-eol) (esc)
1077 archiving [===============> ] 3/8\r (no-eol) (esc)
1082 archiving [===============> ] 3/8\r (no-eol) (esc)
1078 archiving [====================> ] 4/8\r (no-eol) (esc)
1083 archiving [====================> ] 4/8\r (no-eol) (esc)
1079 archiving [=========================> ] 5/8\r (no-eol) (esc)
1084 archiving [=========================> ] 5/8\r (no-eol) (esc)
1080 archiving [===============================> ] 6/8\r (no-eol) (esc)
1085 archiving [===============================> ] 6/8\r (no-eol) (esc)
1081 archiving [====================================> ] 7/8\r (no-eol) (esc)
1086 archiving [====================================> ] 7/8\r (no-eol) (esc)
1082 archiving [==========================================>] 8/8\r (no-eol) (esc)
1087 archiving [==========================================>] 8/8\r (no-eol) (esc)
1083 \r (no-eol) (esc)
1088 \r (no-eol) (esc)
1084 \r (no-eol) (esc)
1089 \r (no-eol) (esc)
1085 archiving (sub1) [ ] 0/1\r (no-eol) (esc)
1090 archiving (sub1) [ ] 0/1\r (no-eol) (esc)
1086 archiving (sub1) [===================================>] 1/1\r (no-eol) (esc)
1091 archiving (sub1) [===================================>] 1/1\r (no-eol) (esc)
1087 \r (no-eol) (esc)
1092 \r (no-eol) (esc)
1088 \r (no-eol) (esc)
1093 \r (no-eol) (esc)
1089 archiving (sub1/sub2) [ ] 0/3\r (no-eol) (esc)
1094 archiving (sub1/sub2) [ ] 0/3\r (no-eol) (esc)
1090 archiving (sub1/sub2) [=========> ] 1/3\r (no-eol) (esc)
1095 archiving (sub1/sub2) [=========> ] 1/3\r (no-eol) (esc)
1091 archiving (sub1/sub2) [===================> ] 2/3\r (no-eol) (esc)
1096 archiving (sub1/sub2) [===================> ] 2/3\r (no-eol) (esc)
1092 archiving (sub1/sub2) [==============================>] 3/3\r (no-eol) (esc)
1097 archiving (sub1/sub2) [==============================>] 3/3\r (no-eol) (esc)
1093 \r (no-eol) (esc)
1098 \r (no-eol) (esc)
1094 \r (no-eol) (esc)
1099 \r (no-eol) (esc)
1095 archiving (sub3) [ ] 0/1\r (no-eol) (esc)
1100 archiving (sub3) [ ] 0/1\r (no-eol) (esc)
1096 archiving (sub3) [===================================>] 1/1\r (no-eol) (esc)
1101 archiving (sub3) [===================================>] 1/1\r (no-eol) (esc)
1097 \r (no-eol) (esc)
1102 \r (no-eol) (esc)
1098 diff -Nru cloned.*/.hglf/b.dat cloned.*/.hglf/b.dat (glob)
1103 diff -Nru cloned.*/.hglf/b.dat cloned.*/.hglf/b.dat (glob)
1099 --- cloned.*/.hglf/b.dat * (glob)
1104 --- cloned.*/.hglf/b.dat * (glob)
1100 +++ cloned.*/.hglf/b.dat * (glob)
1105 +++ cloned.*/.hglf/b.dat * (glob)
1101 @@ -*,0 +1* @@ (glob)
1106 @@ -*,0 +1* @@ (glob)
1102 +da39a3ee5e6b4b0d3255bfef95601890afd80709
1107 +da39a3ee5e6b4b0d3255bfef95601890afd80709
1103 diff -Nru cloned.*/.hglf/foo/bar/large.dat cloned.*/.hglf/foo/bar/large.dat (glob)
1108 diff -Nru cloned.*/.hglf/foo/bar/large.dat cloned.*/.hglf/foo/bar/large.dat (glob)
1104 --- cloned.*/.hglf/foo/bar/large.dat * (glob)
1109 --- cloned.*/.hglf/foo/bar/large.dat * (glob)
1105 +++ cloned.*/.hglf/foo/bar/large.dat * (glob)
1110 +++ cloned.*/.hglf/foo/bar/large.dat * (glob)
1106 @@ -*,0 +1* @@ (glob)
1111 @@ -*,0 +1* @@ (glob)
1107 +2f6933b5ee0f5fdd823d9717d8729f3c2523811b
1112 +2f6933b5ee0f5fdd823d9717d8729f3c2523811b
1108 diff -Nru cloned.*/.hglf/large.bin cloned.*/.hglf/large.bin (glob)
1113 diff -Nru cloned.*/.hglf/large.bin cloned.*/.hglf/large.bin (glob)
1109 --- cloned.*/.hglf/large.bin * (glob)
1114 --- cloned.*/.hglf/large.bin * (glob)
1110 +++ cloned.*/.hglf/large.bin * (glob)
1115 +++ cloned.*/.hglf/large.bin * (glob)
1111 @@ -*,0 +1* @@ (glob)
1116 @@ -*,0 +1* @@ (glob)
1112 +7f7097b041ccf68cc5561e9600da4655d21c6d18
1117 +7f7097b041ccf68cc5561e9600da4655d21c6d18
1113 diff -Nru cloned.*/.hgsub cloned.*/.hgsub (glob)
1118 diff -Nru cloned.*/.hgsub cloned.*/.hgsub (glob)
1114 --- cloned.*/.hgsub * (glob)
1119 --- cloned.*/.hgsub * (glob)
1115 +++ cloned.*/.hgsub * (glob)
1120 +++ cloned.*/.hgsub * (glob)
1116 @@ -1* +1,2 @@ (glob)
1121 @@ -1* +1,2 @@ (glob)
1117 sub1 = ../sub1
1122 sub1 = ../sub1
1118 +sub3 = sub3
1123 +sub3 = sub3
1119 diff -Nru cloned.*/.hgsubstate cloned.*/.hgsubstate (glob)
1124 diff -Nru cloned.*/.hgsubstate cloned.*/.hgsubstate (glob)
1120 --- cloned.*/.hgsubstate * (glob)
1125 --- cloned.*/.hgsubstate * (glob)
1121 +++ cloned.*/.hgsubstate * (glob)
1126 +++ cloned.*/.hgsubstate * (glob)
1122 @@ -1* +1,2 @@ (glob)
1127 @@ -1* +1,2 @@ (glob)
1123 -fc3b4ce2696f7741438c79207583768f2ce6b0dd sub1
1128 -fc3b4ce2696f7741438c79207583768f2ce6b0dd sub1
1124 +7a36fa02b66e61f27f3d4a822809f159479b8ab2 sub1
1129 +7a36fa02b66e61f27f3d4a822809f159479b8ab2 sub1
1125 +b1a26de6f2a045a9f079323693614ee322f1ff7e sub3
1130 +b1a26de6f2a045a9f079323693614ee322f1ff7e sub3
1126 diff -Nru cloned.*/foo/bar/def cloned.*/foo/bar/def (glob)
1131 diff -Nru cloned.*/foo/bar/def cloned.*/foo/bar/def (glob)
1127 --- cloned.*/foo/bar/def * (glob)
1132 --- cloned.*/foo/bar/def * (glob)
1128 +++ cloned.*/foo/bar/def * (glob)
1133 +++ cloned.*/foo/bar/def * (glob)
1129 @@ -*,0 +1* @@ (glob)
1134 @@ -*,0 +1* @@ (glob)
1130 +changed
1135 +changed
1131 diff -Nru cloned.*/main cloned.*/main (glob)
1136 diff -Nru cloned.*/main cloned.*/main (glob)
1132 --- cloned.*/main * (glob)
1137 --- cloned.*/main * (glob)
1133 +++ cloned.*/main * (glob)
1138 +++ cloned.*/main * (glob)
1134 @@ -1* +1* @@ (glob)
1139 @@ -1* +1* @@ (glob)
1135 -main
1140 -main
1136 +foo
1141 +foo
1137 diff -Nru cloned.*/sub1/.hgsubstate cloned.*/sub1/.hgsubstate (glob)
1142 diff -Nru cloned.*/sub1/.hgsubstate cloned.*/sub1/.hgsubstate (glob)
1138 --- cloned.*/sub1/.hgsubstate * (glob)
1143 --- cloned.*/sub1/.hgsubstate * (glob)
1139 +++ cloned.*/sub1/.hgsubstate * (glob)
1144 +++ cloned.*/sub1/.hgsubstate * (glob)
1140 @@ -1* +1* @@ (glob)
1145 @@ -1* +1* @@ (glob)
1141 -c57a0840e3badd667ef3c3ef65471609acb2ba3c sub2
1146 -c57a0840e3badd667ef3c3ef65471609acb2ba3c sub2
1142 +c77908c81ccea3794a896c79e98b0e004aee2e9e sub2
1147 +c77908c81ccea3794a896c79e98b0e004aee2e9e sub2
1143 diff -Nru cloned.*/sub1/sub2/folder/test.txt cloned.*/sub1/sub2/folder/test.txt (glob)
1148 diff -Nru cloned.*/sub1/sub2/folder/test.txt cloned.*/sub1/sub2/folder/test.txt (glob)
1144 --- cloned.*/sub1/sub2/folder/test.txt * (glob)
1149 --- cloned.*/sub1/sub2/folder/test.txt * (glob)
1145 +++ cloned.*/sub1/sub2/folder/test.txt * (glob)
1150 +++ cloned.*/sub1/sub2/folder/test.txt * (glob)
1146 @@ -*,0 +1* @@ (glob)
1151 @@ -*,0 +1* @@ (glob)
1147 +subfolder
1152 +subfolder
1148 diff -Nru cloned.*/sub1/sub2/sub2 cloned.*/sub1/sub2/sub2 (glob)
1153 diff -Nru cloned.*/sub1/sub2/sub2 cloned.*/sub1/sub2/sub2 (glob)
1149 --- cloned.*/sub1/sub2/sub2 * (glob)
1154 --- cloned.*/sub1/sub2/sub2 * (glob)
1150 +++ cloned.*/sub1/sub2/sub2 * (glob)
1155 +++ cloned.*/sub1/sub2/sub2 * (glob)
1151 @@ -1* +1* @@ (glob)
1156 @@ -1* +1* @@ (glob)
1152 -sub2
1157 -sub2
1153 +modified
1158 +modified
1154 diff -Nru cloned.*/sub3/a.txt cloned.*/sub3/a.txt (glob)
1159 diff -Nru cloned.*/sub3/a.txt cloned.*/sub3/a.txt (glob)
1155 --- cloned.*/sub3/a.txt * (glob)
1160 --- cloned.*/sub3/a.txt * (glob)
1156 +++ cloned.*/sub3/a.txt * (glob)
1161 +++ cloned.*/sub3/a.txt * (glob)
1157 @@ -*,0 +1* @@ (glob)
1162 @@ -*,0 +1* @@ (glob)
1158 +xyz
1163 +xyz
1159 [1]
1164 [1]
1160
1165
1161 $ echo mod > sub1/sub2/sub2
1166 $ echo mod > sub1/sub2/sub2
1162 $ hg --config extensions.extdiff= pdiff -S
1167 $ hg --config extensions.extdiff= pdiff -S
1163 \r (no-eol) (esc)
1168 \r (no-eol) (esc)
1164 archiving (sub1) [ <=> ] 0\r (no-eol) (esc)
1169 archiving (sub1) [ <=> ] 0\r (no-eol) (esc)
1165 \r (no-eol) (esc)
1170 \r (no-eol) (esc)
1166 \r (no-eol) (esc)
1171 \r (no-eol) (esc)
1167 archiving (sub1/sub2) [ ] 0/1\r (no-eol) (esc)
1172 archiving (sub1/sub2) [ ] 0/1\r (no-eol) (esc)
1168 archiving (sub1/sub2) [==============================>] 1/1\r (no-eol) (esc)
1173 archiving (sub1/sub2) [==============================>] 1/1\r (no-eol) (esc)
1169 \r (no-eol) (esc)
1174 \r (no-eol) (esc)
1170 --- */cloned.*/sub1/sub2/sub2 * (glob)
1175 --- */cloned.*/sub1/sub2/sub2 * (glob)
1171 +++ */cloned/sub1/sub2/sub2 * (glob)
1176 +++ */cloned/sub1/sub2/sub2 * (glob)
1172 @@ -1* +1* @@ (glob)
1177 @@ -1* +1* @@ (glob)
1173 -modified
1178 -modified
1174 +mod
1179 +mod
1175 [1]
1180 [1]
1176
1181
1177 $ cd ..
1182 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now