##// END OF EJS Templates
narrow: also warn when not deleting untracked or ignored files...
Martin von Zweigbergk -
r42347:aa8f8392 default draft
parent child Browse files
Show More
@@ -1,318 +1,322 b''
1 # narrowspec.py - methods for working with a narrow view of a repository
1 # narrowspec.py - methods for working with a narrow view of a repository
2 #
2 #
3 # Copyright 2017 Google, Inc.
3 # Copyright 2017 Google, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11
11
12 from .i18n import _
12 from .i18n import _
13 from . import (
13 from . import (
14 error,
14 error,
15 match as matchmod,
15 match as matchmod,
16 merge,
16 merge,
17 repository,
17 repository,
18 scmutil,
18 scmutil,
19 sparse,
19 sparse,
20 util,
20 util,
21 )
21 )
22
22
23 # The file in .hg/store/ that indicates which paths exit in the store
23 # The file in .hg/store/ that indicates which paths exit in the store
24 FILENAME = 'narrowspec'
24 FILENAME = 'narrowspec'
25 # The file in .hg/ that indicates which paths exit in the dirstate
25 # The file in .hg/ that indicates which paths exit in the dirstate
26 DIRSTATE_FILENAME = 'narrowspec.dirstate'
26 DIRSTATE_FILENAME = 'narrowspec.dirstate'
27
27
28 # Pattern prefixes that are allowed in narrow patterns. This list MUST
28 # Pattern prefixes that are allowed in narrow patterns. This list MUST
29 # only contain patterns that are fast and safe to evaluate. Keep in mind
29 # only contain patterns that are fast and safe to evaluate. Keep in mind
30 # that patterns are supplied by clients and executed on remote servers
30 # that patterns are supplied by clients and executed on remote servers
31 # as part of wire protocol commands. That means that changes to this
31 # as part of wire protocol commands. That means that changes to this
32 # data structure influence the wire protocol and should not be taken
32 # data structure influence the wire protocol and should not be taken
33 # lightly - especially removals.
33 # lightly - especially removals.
34 VALID_PREFIXES = (
34 VALID_PREFIXES = (
35 b'path:',
35 b'path:',
36 b'rootfilesin:',
36 b'rootfilesin:',
37 )
37 )
38
38
39 def normalizesplitpattern(kind, pat):
39 def normalizesplitpattern(kind, pat):
40 """Returns the normalized version of a pattern and kind.
40 """Returns the normalized version of a pattern and kind.
41
41
42 Returns a tuple with the normalized kind and normalized pattern.
42 Returns a tuple with the normalized kind and normalized pattern.
43 """
43 """
44 pat = pat.rstrip('/')
44 pat = pat.rstrip('/')
45 _validatepattern(pat)
45 _validatepattern(pat)
46 return kind, pat
46 return kind, pat
47
47
48 def _numlines(s):
48 def _numlines(s):
49 """Returns the number of lines in s, including ending empty lines."""
49 """Returns the number of lines in s, including ending empty lines."""
50 # We use splitlines because it is Unicode-friendly and thus Python 3
50 # We use splitlines because it is Unicode-friendly and thus Python 3
51 # compatible. However, it does not count empty lines at the end, so trick
51 # compatible. However, it does not count empty lines at the end, so trick
52 # it by adding a character at the end.
52 # it by adding a character at the end.
53 return len((s + 'x').splitlines())
53 return len((s + 'x').splitlines())
54
54
55 def _validatepattern(pat):
55 def _validatepattern(pat):
56 """Validates the pattern and aborts if it is invalid.
56 """Validates the pattern and aborts if it is invalid.
57
57
58 Patterns are stored in the narrowspec as newline-separated
58 Patterns are stored in the narrowspec as newline-separated
59 POSIX-style bytestring paths. There's no escaping.
59 POSIX-style bytestring paths. There's no escaping.
60 """
60 """
61
61
62 # We use newlines as separators in the narrowspec file, so don't allow them
62 # We use newlines as separators in the narrowspec file, so don't allow them
63 # in patterns.
63 # in patterns.
64 if _numlines(pat) > 1:
64 if _numlines(pat) > 1:
65 raise error.Abort(_('newlines are not allowed in narrowspec paths'))
65 raise error.Abort(_('newlines are not allowed in narrowspec paths'))
66
66
67 components = pat.split('/')
67 components = pat.split('/')
68 if '.' in components or '..' in components:
68 if '.' in components or '..' in components:
69 raise error.Abort(_('"." and ".." are not allowed in narrowspec paths'))
69 raise error.Abort(_('"." and ".." are not allowed in narrowspec paths'))
70
70
71 def normalizepattern(pattern, defaultkind='path'):
71 def normalizepattern(pattern, defaultkind='path'):
72 """Returns the normalized version of a text-format pattern.
72 """Returns the normalized version of a text-format pattern.
73
73
74 If the pattern has no kind, the default will be added.
74 If the pattern has no kind, the default will be added.
75 """
75 """
76 kind, pat = matchmod._patsplit(pattern, defaultkind)
76 kind, pat = matchmod._patsplit(pattern, defaultkind)
77 return '%s:%s' % normalizesplitpattern(kind, pat)
77 return '%s:%s' % normalizesplitpattern(kind, pat)
78
78
79 def parsepatterns(pats):
79 def parsepatterns(pats):
80 """Parses an iterable of patterns into a typed pattern set.
80 """Parses an iterable of patterns into a typed pattern set.
81
81
82 Patterns are assumed to be ``path:`` if no prefix is present.
82 Patterns are assumed to be ``path:`` if no prefix is present.
83 For safety and performance reasons, only some prefixes are allowed.
83 For safety and performance reasons, only some prefixes are allowed.
84 See ``validatepatterns()``.
84 See ``validatepatterns()``.
85
85
86 This function should be used on patterns that come from the user to
86 This function should be used on patterns that come from the user to
87 normalize and validate them to the internal data structure used for
87 normalize and validate them to the internal data structure used for
88 representing patterns.
88 representing patterns.
89 """
89 """
90 res = {normalizepattern(orig) for orig in pats}
90 res = {normalizepattern(orig) for orig in pats}
91 validatepatterns(res)
91 validatepatterns(res)
92 return res
92 return res
93
93
94 def validatepatterns(pats):
94 def validatepatterns(pats):
95 """Validate that patterns are in the expected data structure and format.
95 """Validate that patterns are in the expected data structure and format.
96
96
97 And that is a set of normalized patterns beginning with ``path:`` or
97 And that is a set of normalized patterns beginning with ``path:`` or
98 ``rootfilesin:``.
98 ``rootfilesin:``.
99
99
100 This function should be used to validate internal data structures
100 This function should be used to validate internal data structures
101 and patterns that are loaded from sources that use the internal,
101 and patterns that are loaded from sources that use the internal,
102 prefixed pattern representation (but can't necessarily be fully trusted).
102 prefixed pattern representation (but can't necessarily be fully trusted).
103 """
103 """
104 if not isinstance(pats, set):
104 if not isinstance(pats, set):
105 raise error.ProgrammingError('narrow patterns should be a set; '
105 raise error.ProgrammingError('narrow patterns should be a set; '
106 'got %r' % pats)
106 'got %r' % pats)
107
107
108 for pat in pats:
108 for pat in pats:
109 if not pat.startswith(VALID_PREFIXES):
109 if not pat.startswith(VALID_PREFIXES):
110 # Use a Mercurial exception because this can happen due to user
110 # Use a Mercurial exception because this can happen due to user
111 # bugs (e.g. manually updating spec file).
111 # bugs (e.g. manually updating spec file).
112 raise error.Abort(_('invalid prefix on narrow pattern: %s') % pat,
112 raise error.Abort(_('invalid prefix on narrow pattern: %s') % pat,
113 hint=_('narrow patterns must begin with one of '
113 hint=_('narrow patterns must begin with one of '
114 'the following: %s') %
114 'the following: %s') %
115 ', '.join(VALID_PREFIXES))
115 ', '.join(VALID_PREFIXES))
116
116
117 def format(includes, excludes):
117 def format(includes, excludes):
118 output = '[include]\n'
118 output = '[include]\n'
119 for i in sorted(includes - excludes):
119 for i in sorted(includes - excludes):
120 output += i + '\n'
120 output += i + '\n'
121 output += '[exclude]\n'
121 output += '[exclude]\n'
122 for e in sorted(excludes):
122 for e in sorted(excludes):
123 output += e + '\n'
123 output += e + '\n'
124 return output
124 return output
125
125
126 def match(root, include=None, exclude=None):
126 def match(root, include=None, exclude=None):
127 if not include:
127 if not include:
128 # Passing empty include and empty exclude to matchmod.match()
128 # Passing empty include and empty exclude to matchmod.match()
129 # gives a matcher that matches everything, so explicitly use
129 # gives a matcher that matches everything, so explicitly use
130 # the nevermatcher.
130 # the nevermatcher.
131 return matchmod.never()
131 return matchmod.never()
132 return matchmod.match(root, '', [], include=include or [],
132 return matchmod.match(root, '', [], include=include or [],
133 exclude=exclude or [])
133 exclude=exclude or [])
134
134
135 def parseconfig(ui, spec):
135 def parseconfig(ui, spec):
136 # maybe we should care about the profiles returned too
136 # maybe we should care about the profiles returned too
137 includepats, excludepats, profiles = sparse.parseconfig(ui, spec, 'narrow')
137 includepats, excludepats, profiles = sparse.parseconfig(ui, spec, 'narrow')
138 if profiles:
138 if profiles:
139 raise error.Abort(_("including other spec files using '%include' is not"
139 raise error.Abort(_("including other spec files using '%include' is not"
140 " supported in narrowspec"))
140 " supported in narrowspec"))
141
141
142 validatepatterns(includepats)
142 validatepatterns(includepats)
143 validatepatterns(excludepats)
143 validatepatterns(excludepats)
144
144
145 return includepats, excludepats
145 return includepats, excludepats
146
146
147 def load(repo):
147 def load(repo):
148 try:
148 try:
149 spec = repo.svfs.read(FILENAME)
149 spec = repo.svfs.read(FILENAME)
150 except IOError as e:
150 except IOError as e:
151 # Treat "narrowspec does not exist" the same as "narrowspec file exists
151 # Treat "narrowspec does not exist" the same as "narrowspec file exists
152 # and is empty".
152 # and is empty".
153 if e.errno == errno.ENOENT:
153 if e.errno == errno.ENOENT:
154 return set(), set()
154 return set(), set()
155 raise
155 raise
156
156
157 return parseconfig(repo.ui, spec)
157 return parseconfig(repo.ui, spec)
158
158
159 def save(repo, includepats, excludepats):
159 def save(repo, includepats, excludepats):
160 validatepatterns(includepats)
160 validatepatterns(includepats)
161 validatepatterns(excludepats)
161 validatepatterns(excludepats)
162 spec = format(includepats, excludepats)
162 spec = format(includepats, excludepats)
163 repo.svfs.write(FILENAME, spec)
163 repo.svfs.write(FILENAME, spec)
164
164
165 def copytoworkingcopy(repo):
165 def copytoworkingcopy(repo):
166 spec = repo.svfs.read(FILENAME)
166 spec = repo.svfs.read(FILENAME)
167 repo.vfs.write(DIRSTATE_FILENAME, spec)
167 repo.vfs.write(DIRSTATE_FILENAME, spec)
168
168
169 def savebackup(repo, backupname):
169 def savebackup(repo, backupname):
170 if repository.NARROW_REQUIREMENT not in repo.requirements:
170 if repository.NARROW_REQUIREMENT not in repo.requirements:
171 return
171 return
172 svfs = repo.svfs
172 svfs = repo.svfs
173 svfs.tryunlink(backupname)
173 svfs.tryunlink(backupname)
174 util.copyfile(svfs.join(FILENAME), svfs.join(backupname), hardlink=True)
174 util.copyfile(svfs.join(FILENAME), svfs.join(backupname), hardlink=True)
175
175
176 def restorebackup(repo, backupname):
176 def restorebackup(repo, backupname):
177 if repository.NARROW_REQUIREMENT not in repo.requirements:
177 if repository.NARROW_REQUIREMENT not in repo.requirements:
178 return
178 return
179 util.rename(repo.svfs.join(backupname), repo.svfs.join(FILENAME))
179 util.rename(repo.svfs.join(backupname), repo.svfs.join(FILENAME))
180
180
181 def savewcbackup(repo, backupname):
181 def savewcbackup(repo, backupname):
182 if repository.NARROW_REQUIREMENT not in repo.requirements:
182 if repository.NARROW_REQUIREMENT not in repo.requirements:
183 return
183 return
184 vfs = repo.vfs
184 vfs = repo.vfs
185 vfs.tryunlink(backupname)
185 vfs.tryunlink(backupname)
186 # It may not exist in old repos
186 # It may not exist in old repos
187 if vfs.exists(DIRSTATE_FILENAME):
187 if vfs.exists(DIRSTATE_FILENAME):
188 util.copyfile(vfs.join(DIRSTATE_FILENAME), vfs.join(backupname),
188 util.copyfile(vfs.join(DIRSTATE_FILENAME), vfs.join(backupname),
189 hardlink=True)
189 hardlink=True)
190
190
191 def restorewcbackup(repo, backupname):
191 def restorewcbackup(repo, backupname):
192 if repository.NARROW_REQUIREMENT not in repo.requirements:
192 if repository.NARROW_REQUIREMENT not in repo.requirements:
193 return
193 return
194 # It may not exist in old repos
194 # It may not exist in old repos
195 if repo.vfs.exists(backupname):
195 if repo.vfs.exists(backupname):
196 util.rename(repo.vfs.join(backupname), repo.vfs.join(DIRSTATE_FILENAME))
196 util.rename(repo.vfs.join(backupname), repo.vfs.join(DIRSTATE_FILENAME))
197
197
198 def clearwcbackup(repo, backupname):
198 def clearwcbackup(repo, backupname):
199 if repository.NARROW_REQUIREMENT not in repo.requirements:
199 if repository.NARROW_REQUIREMENT not in repo.requirements:
200 return
200 return
201 repo.vfs.tryunlink(backupname)
201 repo.vfs.tryunlink(backupname)
202
202
203 def restrictpatterns(req_includes, req_excludes, repo_includes, repo_excludes):
203 def restrictpatterns(req_includes, req_excludes, repo_includes, repo_excludes):
204 r""" Restricts the patterns according to repo settings,
204 r""" Restricts the patterns according to repo settings,
205 results in a logical AND operation
205 results in a logical AND operation
206
206
207 :param req_includes: requested includes
207 :param req_includes: requested includes
208 :param req_excludes: requested excludes
208 :param req_excludes: requested excludes
209 :param repo_includes: repo includes
209 :param repo_includes: repo includes
210 :param repo_excludes: repo excludes
210 :param repo_excludes: repo excludes
211 :return: include patterns, exclude patterns, and invalid include patterns.
211 :return: include patterns, exclude patterns, and invalid include patterns.
212
212
213 >>> restrictpatterns({'f1','f2'}, {}, ['f1'], [])
213 >>> restrictpatterns({'f1','f2'}, {}, ['f1'], [])
214 (set(['f1']), {}, [])
214 (set(['f1']), {}, [])
215 >>> restrictpatterns({'f1'}, {}, ['f1','f2'], [])
215 >>> restrictpatterns({'f1'}, {}, ['f1','f2'], [])
216 (set(['f1']), {}, [])
216 (set(['f1']), {}, [])
217 >>> restrictpatterns({'f1/fc1', 'f3/fc3'}, {}, ['f1','f2'], [])
217 >>> restrictpatterns({'f1/fc1', 'f3/fc3'}, {}, ['f1','f2'], [])
218 (set(['f1/fc1']), {}, [])
218 (set(['f1/fc1']), {}, [])
219 >>> restrictpatterns({'f1_fc1'}, {}, ['f1','f2'], [])
219 >>> restrictpatterns({'f1_fc1'}, {}, ['f1','f2'], [])
220 ([], set(['path:.']), [])
220 ([], set(['path:.']), [])
221 >>> restrictpatterns({'f1/../f2/fc2'}, {}, ['f1','f2'], [])
221 >>> restrictpatterns({'f1/../f2/fc2'}, {}, ['f1','f2'], [])
222 (set(['f2/fc2']), {}, [])
222 (set(['f2/fc2']), {}, [])
223 >>> restrictpatterns({'f1/../f3/fc3'}, {}, ['f1','f2'], [])
223 >>> restrictpatterns({'f1/../f3/fc3'}, {}, ['f1','f2'], [])
224 ([], set(['path:.']), [])
224 ([], set(['path:.']), [])
225 >>> restrictpatterns({'f1/$non_exitent_var'}, {}, ['f1','f2'], [])
225 >>> restrictpatterns({'f1/$non_exitent_var'}, {}, ['f1','f2'], [])
226 (set(['f1/$non_exitent_var']), {}, [])
226 (set(['f1/$non_exitent_var']), {}, [])
227 """
227 """
228 res_excludes = set(req_excludes)
228 res_excludes = set(req_excludes)
229 res_excludes.update(repo_excludes)
229 res_excludes.update(repo_excludes)
230 invalid_includes = []
230 invalid_includes = []
231 if not req_includes:
231 if not req_includes:
232 res_includes = set(repo_includes)
232 res_includes = set(repo_includes)
233 elif 'path:.' not in repo_includes:
233 elif 'path:.' not in repo_includes:
234 res_includes = []
234 res_includes = []
235 for req_include in req_includes:
235 for req_include in req_includes:
236 req_include = util.expandpath(util.normpath(req_include))
236 req_include = util.expandpath(util.normpath(req_include))
237 if req_include in repo_includes:
237 if req_include in repo_includes:
238 res_includes.append(req_include)
238 res_includes.append(req_include)
239 continue
239 continue
240 valid = False
240 valid = False
241 for repo_include in repo_includes:
241 for repo_include in repo_includes:
242 if req_include.startswith(repo_include + '/'):
242 if req_include.startswith(repo_include + '/'):
243 valid = True
243 valid = True
244 res_includes.append(req_include)
244 res_includes.append(req_include)
245 break
245 break
246 if not valid:
246 if not valid:
247 invalid_includes.append(req_include)
247 invalid_includes.append(req_include)
248 if len(res_includes) == 0:
248 if len(res_includes) == 0:
249 res_excludes = {'path:.'}
249 res_excludes = {'path:.'}
250 else:
250 else:
251 res_includes = set(res_includes)
251 res_includes = set(res_includes)
252 else:
252 else:
253 res_includes = set(req_includes)
253 res_includes = set(req_includes)
254 return res_includes, res_excludes, invalid_includes
254 return res_includes, res_excludes, invalid_includes
255
255
256 # These two are extracted for extensions (specifically for Google's CitC file
256 # These two are extracted for extensions (specifically for Google's CitC file
257 # system)
257 # system)
258 def _deletecleanfiles(repo, files):
258 def _deletecleanfiles(repo, files):
259 for f in files:
259 for f in files:
260 repo.wvfs.unlinkpath(f)
260 repo.wvfs.unlinkpath(f)
261
261
262 def _writeaddedfiles(repo, pctx, files):
262 def _writeaddedfiles(repo, pctx, files):
263 actions = merge.emptyactions()
263 actions = merge.emptyactions()
264 addgaction = actions[merge.ACTION_GET].append
264 addgaction = actions[merge.ACTION_GET].append
265 mf = repo['.'].manifest()
265 mf = repo['.'].manifest()
266 for f in files:
266 for f in files:
267 if not repo.wvfs.exists(f):
267 if not repo.wvfs.exists(f):
268 addgaction((f, (mf.flags(f), False), "narrowspec updated"))
268 addgaction((f, (mf.flags(f), False), "narrowspec updated"))
269 merge.applyupdates(repo, actions, wctx=repo[None],
269 merge.applyupdates(repo, actions, wctx=repo[None],
270 mctx=repo['.'], overwrite=False)
270 mctx=repo['.'], overwrite=False)
271
271
272 def checkworkingcopynarrowspec(repo):
272 def checkworkingcopynarrowspec(repo):
273 storespec = repo.svfs.tryread(FILENAME)
273 storespec = repo.svfs.tryread(FILENAME)
274 wcspec = repo.vfs.tryread(DIRSTATE_FILENAME)
274 wcspec = repo.vfs.tryread(DIRSTATE_FILENAME)
275 if wcspec != storespec:
275 if wcspec != storespec:
276 raise error.Abort(_("working copy's narrowspec is stale"),
276 raise error.Abort(_("working copy's narrowspec is stale"),
277 hint=_("run 'hg tracked --update-working-copy'"))
277 hint=_("run 'hg tracked --update-working-copy'"))
278
278
279 def updateworkingcopy(repo, assumeclean=False):
279 def updateworkingcopy(repo, assumeclean=False):
280 """updates the working copy and dirstate from the store narrowspec
280 """updates the working copy and dirstate from the store narrowspec
281
281
282 When assumeclean=True, files that are not known to be clean will also
282 When assumeclean=True, files that are not known to be clean will also
283 be deleted. It is then up to the caller to make sure they are clean.
283 be deleted. It is then up to the caller to make sure they are clean.
284 """
284 """
285 oldspec = repo.vfs.tryread(DIRSTATE_FILENAME)
285 oldspec = repo.vfs.tryread(DIRSTATE_FILENAME)
286 newspec = repo.svfs.tryread(FILENAME)
286 newspec = repo.svfs.tryread(FILENAME)
287
287
288 oldincludes, oldexcludes = parseconfig(repo.ui, oldspec)
288 oldincludes, oldexcludes = parseconfig(repo.ui, oldspec)
289 newincludes, newexcludes = parseconfig(repo.ui, newspec)
289 newincludes, newexcludes = parseconfig(repo.ui, newspec)
290 oldmatch = match(repo.root, include=oldincludes, exclude=oldexcludes)
290 oldmatch = match(repo.root, include=oldincludes, exclude=oldexcludes)
291 newmatch = match(repo.root, include=newincludes, exclude=newexcludes)
291 newmatch = match(repo.root, include=newincludes, exclude=newexcludes)
292 addedmatch = matchmod.differencematcher(newmatch, oldmatch)
292 addedmatch = matchmod.differencematcher(newmatch, oldmatch)
293 removedmatch = matchmod.differencematcher(oldmatch, newmatch)
293 removedmatch = matchmod.differencematcher(oldmatch, newmatch)
294
294
295 ds = repo.dirstate
295 ds = repo.dirstate
296 lookup, status = ds.status(removedmatch, subrepos=[], ignored=False,
296 lookup, status = ds.status(removedmatch, subrepos=[], ignored=True,
297 clean=True, unknown=False)
297 clean=True, unknown=True)
298 trackeddirty = status.modified + status.added
298 trackeddirty = status.modified + status.added
299 clean = status.clean
299 clean = status.clean
300 if assumeclean:
300 if assumeclean:
301 assert not trackeddirty
301 assert not trackeddirty
302 clean.extend(lookup)
302 clean.extend(lookup)
303 else:
303 else:
304 trackeddirty.extend(lookup)
304 trackeddirty.extend(lookup)
305 _deletecleanfiles(repo, clean)
305 _deletecleanfiles(repo, clean)
306 uipathfn = scmutil.getuipathfn(repo)
306 uipathfn = scmutil.getuipathfn(repo)
307 for f in sorted(trackeddirty):
307 for f in sorted(trackeddirty):
308 repo.ui.status(_('not deleting possibly dirty file %s\n') % uipathfn(f))
308 repo.ui.status(_('not deleting possibly dirty file %s\n') % uipathfn(f))
309 for f in sorted(status.unknown):
310 repo.ui.status(_('not deleting unknown file %s\n') % uipathfn(f))
311 for f in sorted(status.ignored):
312 repo.ui.status(_('not deleting ignored file %s\n') % uipathfn(f))
309 for f in clean + trackeddirty:
313 for f in clean + trackeddirty:
310 ds.drop(f)
314 ds.drop(f)
311
315
312 repo.narrowpats = newincludes, newexcludes
316 repo.narrowpats = newincludes, newexcludes
313 repo._narrowmatch = newmatch
317 repo._narrowmatch = newmatch
314 pctx = repo['.']
318 pctx = repo['.']
315 newfiles = [f for f in pctx.manifest().walk(addedmatch) if f not in ds]
319 newfiles = [f for f in pctx.manifest().walk(addedmatch) if f not in ds]
316 for f in newfiles:
320 for f in newfiles:
317 ds.normallookup(f)
321 ds.normallookup(f)
318 _writeaddedfiles(repo, pctx, newfiles)
322 _writeaddedfiles(repo, pctx, newfiles)
@@ -1,178 +1,191 b''
1 #testcases flat tree
1 #testcases flat tree
2
2
3 $ . "$TESTDIR/narrow-library.sh"
3 $ . "$TESTDIR/narrow-library.sh"
4
4
5 #if tree
5 #if tree
6 $ cat << EOF >> $HGRCPATH
6 $ cat << EOF >> $HGRCPATH
7 > [experimental]
7 > [experimental]
8 > treemanifest = 1
8 > treemanifest = 1
9 > EOF
9 > EOF
10 #endif
10 #endif
11
11
12 $ cat << EOF >> $HGRCPATH
12 $ cat << EOF >> $HGRCPATH
13 > [extensions]
13 > [extensions]
14 > share =
14 > share =
15 > EOF
15 > EOF
16
16
17 $ hg init remote
17 $ hg init remote
18 $ cd remote
18 $ cd remote
19 $ for x in `$TESTDIR/seq.py 0 10`
19 $ for x in `$TESTDIR/seq.py 0 10`
20 > do
20 > do
21 > mkdir d$x
21 > mkdir d$x
22 > echo $x > d$x/f
22 > echo $x > d$x/f
23 > hg add d$x/f
23 > hg add d$x/f
24 > hg commit -m "add d$x/f"
24 > hg commit -m "add d$x/f"
25 > done
25 > done
26 $ cd ..
26 $ cd ..
27
27
28 $ hg clone --narrow ssh://user@dummy/remote main -q \
28 $ hg clone --narrow ssh://user@dummy/remote main -q \
29 > --include d1 --include d3 --include d5 --include d7
29 > --include d1 --include d3 --include d5 --include d7
30
30
31 Ignore file called "ignored"
32 $ echo ignored > main/.hgignore
33
31 $ hg share main share
34 $ hg share main share
32 updating working directory
35 updating working directory
33 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
36 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
34 $ hg -R share tracked
37 $ hg -R share tracked
35 I path:d1
38 I path:d1
36 I path:d3
39 I path:d3
37 I path:d5
40 I path:d5
38 I path:d7
41 I path:d7
39 $ hg -R share files
42 $ hg -R share files
40 share/d1/f
43 share/d1/f
41 share/d3/f
44 share/d3/f
42 share/d5/f
45 share/d5/f
43 share/d7/f
46 share/d7/f
44
47
45 Narrow the share and check that the main repo's working copy gets updated
48 Narrow the share and check that the main repo's working copy gets updated
46
49
47 # Make sure the files that are supposed to be known-clean get their timestamps set in the dirstate
50 # Make sure the files that are supposed to be known-clean get their timestamps set in the dirstate
48 $ sleep 2
51 $ sleep 2
49 $ hg -R main st
52 $ hg -R main st
50 $ hg -R main debugdirstate --no-dates
53 $ hg -R main debugdirstate --no-dates
51 n 644 2 set d1/f
54 n 644 2 set d1/f
52 n 644 2 set d3/f
55 n 644 2 set d3/f
53 n 644 2 set d5/f
56 n 644 2 set d5/f
54 n 644 2 set d7/f
57 n 644 2 set d7/f
55 # Make d3/f dirty
58 # Make d3/f dirty
56 $ echo x >> main/d3/f
59 $ echo x >> main/d3/f
57 $ echo y >> main/d3/g
60 $ echo y >> main/d3/g
61 $ touch main/d3/ignored
62 $ touch main/d3/untracked
58 $ hg add main/d3/g
63 $ hg add main/d3/g
59 $ hg -R main st
64 $ hg -R main st
60 M d3/f
65 M d3/f
61 A d3/g
66 A d3/g
67 ? d3/untracked
62 # Make d5/f not match the dirstate timestamp even though it's clean
68 # Make d5/f not match the dirstate timestamp even though it's clean
63 $ sleep 2
69 $ sleep 2
64 $ hg -R main st
70 $ hg -R main st
65 M d3/f
71 M d3/f
66 A d3/g
72 A d3/g
73 ? d3/untracked
67 $ hg -R main debugdirstate --no-dates
74 $ hg -R main debugdirstate --no-dates
68 n 644 2 set d1/f
75 n 644 2 set d1/f
69 n 644 2 set d3/f
76 n 644 2 set d3/f
70 a 0 -1 unset d3/g
77 a 0 -1 unset d3/g
71 n 644 2 set d5/f
78 n 644 2 set d5/f
72 n 644 2 set d7/f
79 n 644 2 set d7/f
73 $ touch main/d5/f
80 $ touch main/d5/f
74 $ hg -R share tracked --removeinclude d1 --removeinclude d3 --removeinclude d5
81 $ hg -R share tracked --removeinclude d1 --removeinclude d3 --removeinclude d5
75 comparing with ssh://user@dummy/remote
82 comparing with ssh://user@dummy/remote
76 searching for changes
83 searching for changes
77 looking for local changes to affected paths
84 looking for local changes to affected paths
78 deleting data/d1/f.i
85 deleting data/d1/f.i
79 deleting data/d3/f.i
86 deleting data/d3/f.i
80 deleting data/d5/f.i
87 deleting data/d5/f.i
81 deleting meta/d1/00manifest.i (tree !)
88 deleting meta/d1/00manifest.i (tree !)
82 deleting meta/d3/00manifest.i (tree !)
89 deleting meta/d3/00manifest.i (tree !)
83 deleting meta/d5/00manifest.i (tree !)
90 deleting meta/d5/00manifest.i (tree !)
84 $ hg -R main tracked
91 $ hg -R main tracked
85 I path:d7
92 I path:d7
86 $ hg -R main files
93 $ hg -R main files
87 abort: working copy's narrowspec is stale
94 abort: working copy's narrowspec is stale
88 (run 'hg tracked --update-working-copy')
95 (run 'hg tracked --update-working-copy')
89 [255]
96 [255]
90 $ hg -R main tracked --update-working-copy
97 $ hg -R main tracked --update-working-copy
91 not deleting possibly dirty file d3/f
98 not deleting possibly dirty file d3/f
92 not deleting possibly dirty file d3/g
99 not deleting possibly dirty file d3/g
93 not deleting possibly dirty file d5/f
100 not deleting possibly dirty file d5/f
101 not deleting unknown file d3/untracked
102 not deleting ignored file d3/ignored
94 # d1/f, d3/f, d3/g and d5/f should no longer be reported
103 # d1/f, d3/f, d3/g and d5/f should no longer be reported
95 $ hg -R main files
104 $ hg -R main files
96 main/d7/f
105 main/d7/f
97 # d1/f should no longer be there, d3/f should be since it was dirty, d3/g should be there since
106 # d1/f should no longer be there, d3/f should be since it was dirty, d3/g should be there since
98 # it was added, and d5/f should be since we couldn't be sure it was clean
107 # it was added, and d5/f should be since we couldn't be sure it was clean
99 $ find main/d* -type f | sort
108 $ find main/d* -type f | sort
100 main/d3/f
109 main/d3/f
101 main/d3/g
110 main/d3/g
111 main/d3/ignored
112 main/d3/untracked
102 main/d5/f
113 main/d5/f
103 main/d7/f
114 main/d7/f
104
115
105 Widen the share and check that the main repo's working copy gets updated
116 Widen the share and check that the main repo's working copy gets updated
106
117
107 $ hg -R share tracked --addinclude d1 --addinclude d3 -q
118 $ hg -R share tracked --addinclude d1 --addinclude d3 -q
108 $ hg -R share tracked
119 $ hg -R share tracked
109 I path:d1
120 I path:d1
110 I path:d3
121 I path:d3
111 I path:d7
122 I path:d7
112 $ hg -R share files
123 $ hg -R share files
113 share/d1/f
124 share/d1/f
114 share/d3/f
125 share/d3/f
115 share/d7/f
126 share/d7/f
116 $ hg -R main tracked
127 $ hg -R main tracked
117 I path:d1
128 I path:d1
118 I path:d3
129 I path:d3
119 I path:d7
130 I path:d7
120 $ hg -R main files
131 $ hg -R main files
121 abort: working copy's narrowspec is stale
132 abort: working copy's narrowspec is stale
122 (run 'hg tracked --update-working-copy')
133 (run 'hg tracked --update-working-copy')
123 [255]
134 [255]
124 $ hg -R main tracked --update-working-copy
135 $ hg -R main tracked --update-working-copy
125 # d1/f, d3/f should be back
136 # d1/f, d3/f should be back
126 $ hg -R main files
137 $ hg -R main files
127 main/d1/f
138 main/d1/f
128 main/d3/f
139 main/d3/f
129 main/d7/f
140 main/d7/f
130 # d3/f should be modified (not clobbered by the widening), and d3/g should be untracked
141 # d3/f should be modified (not clobbered by the widening), and d3/g should be untracked
131 $ hg -R main st --all
142 $ hg -R main st --all
132 M d3/f
143 M d3/f
133 ? d3/g
144 ? d3/g
145 ? d3/untracked
146 I d3/ignored
134 C d1/f
147 C d1/f
135 C d7/f
148 C d7/f
136
149
137 We should also be able to unshare without breaking everything:
150 We should also be able to unshare without breaking everything:
138
151
139 $ hg share main share-unshare
152 $ hg share main share-unshare
140 updating working directory
153 updating working directory
141 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
154 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
142 $ cd share-unshare
155 $ cd share-unshare
143 $ hg unshare
156 $ hg unshare
144 $ hg verify
157 $ hg verify
145 checking changesets
158 checking changesets
146 checking manifests
159 checking manifests
147 checking directory manifests (tree !)
160 checking directory manifests (tree !)
148 crosschecking files in changesets and manifests
161 crosschecking files in changesets and manifests
149 checking files
162 checking files
150 checked 11 changesets with 3 changes to 3 files
163 checked 11 changesets with 3 changes to 3 files
151 $ cd ..
164 $ cd ..
152
165
153 Dirstate should be left alone when upgrading from version of hg that didn't support narrow+share
166 Dirstate should be left alone when upgrading from version of hg that didn't support narrow+share
154
167
155 $ hg share main share-upgrade
168 $ hg share main share-upgrade
156 updating working directory
169 updating working directory
157 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
170 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
158 $ cd share-upgrade
171 $ cd share-upgrade
159 $ echo x >> d1/f
172 $ echo x >> d1/f
160 $ echo y >> d3/g
173 $ echo y >> d3/g
161 $ hg add d3/g
174 $ hg add d3/g
162 $ hg rm d7/f
175 $ hg rm d7/f
163 $ hg st
176 $ hg st
164 M d1/f
177 M d1/f
165 A d3/g
178 A d3/g
166 R d7/f
179 R d7/f
167 Make it look like a repo from before narrow+share was supported
180 Make it look like a repo from before narrow+share was supported
168 $ rm .hg/narrowspec.dirstate
181 $ rm .hg/narrowspec.dirstate
169 $ hg ci -Am test
182 $ hg ci -Am test
170 abort: working copy's narrowspec is stale
183 abort: working copy's narrowspec is stale
171 (run 'hg tracked --update-working-copy')
184 (run 'hg tracked --update-working-copy')
172 [255]
185 [255]
173 $ hg tracked --update-working-copy
186 $ hg tracked --update-working-copy
174 $ hg st
187 $ hg st
175 M d1/f
188 M d1/f
176 A d3/g
189 A d3/g
177 R d7/f
190 R d7/f
178 $ cd ..
191 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now