##// END OF EJS Templates
narrow: fix crash when restoring backup in legacy repo...
Martin von Zweigbergk -
r41334:88a7c211 stable
parent child Browse files
Show More
@@ -1,314 +1,316 b''
1 # narrowspec.py - methods for working with a narrow view of a repository
1 # narrowspec.py - methods for working with a narrow view of a repository
2 #
2 #
3 # Copyright 2017 Google, Inc.
3 # Copyright 2017 Google, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11
11
12 from .i18n import _
12 from .i18n import _
13 from . import (
13 from . import (
14 error,
14 error,
15 match as matchmod,
15 match as matchmod,
16 merge,
16 merge,
17 repository,
17 repository,
18 sparse,
18 sparse,
19 util,
19 util,
20 )
20 )
21
21
22 # The file in .hg/store/ that indicates which paths exit in the store
22 # The file in .hg/store/ that indicates which paths exit in the store
23 FILENAME = 'narrowspec'
23 FILENAME = 'narrowspec'
24 # The file in .hg/ that indicates which paths exit in the dirstate
24 # The file in .hg/ that indicates which paths exit in the dirstate
25 DIRSTATE_FILENAME = 'narrowspec.dirstate'
25 DIRSTATE_FILENAME = 'narrowspec.dirstate'
26
26
27 # Pattern prefixes that are allowed in narrow patterns. This list MUST
27 # Pattern prefixes that are allowed in narrow patterns. This list MUST
28 # only contain patterns that are fast and safe to evaluate. Keep in mind
28 # only contain patterns that are fast and safe to evaluate. Keep in mind
29 # that patterns are supplied by clients and executed on remote servers
29 # that patterns are supplied by clients and executed on remote servers
30 # as part of wire protocol commands. That means that changes to this
30 # as part of wire protocol commands. That means that changes to this
31 # data structure influence the wire protocol and should not be taken
31 # data structure influence the wire protocol and should not be taken
32 # lightly - especially removals.
32 # lightly - especially removals.
33 VALID_PREFIXES = (
33 VALID_PREFIXES = (
34 b'path:',
34 b'path:',
35 b'rootfilesin:',
35 b'rootfilesin:',
36 )
36 )
37
37
38 def normalizesplitpattern(kind, pat):
38 def normalizesplitpattern(kind, pat):
39 """Returns the normalized version of a pattern and kind.
39 """Returns the normalized version of a pattern and kind.
40
40
41 Returns a tuple with the normalized kind and normalized pattern.
41 Returns a tuple with the normalized kind and normalized pattern.
42 """
42 """
43 pat = pat.rstrip('/')
43 pat = pat.rstrip('/')
44 _validatepattern(pat)
44 _validatepattern(pat)
45 return kind, pat
45 return kind, pat
46
46
47 def _numlines(s):
47 def _numlines(s):
48 """Returns the number of lines in s, including ending empty lines."""
48 """Returns the number of lines in s, including ending empty lines."""
49 # We use splitlines because it is Unicode-friendly and thus Python 3
49 # We use splitlines because it is Unicode-friendly and thus Python 3
50 # compatible. However, it does not count empty lines at the end, so trick
50 # compatible. However, it does not count empty lines at the end, so trick
51 # it by adding a character at the end.
51 # it by adding a character at the end.
52 return len((s + 'x').splitlines())
52 return len((s + 'x').splitlines())
53
53
54 def _validatepattern(pat):
54 def _validatepattern(pat):
55 """Validates the pattern and aborts if it is invalid.
55 """Validates the pattern and aborts if it is invalid.
56
56
57 Patterns are stored in the narrowspec as newline-separated
57 Patterns are stored in the narrowspec as newline-separated
58 POSIX-style bytestring paths. There's no escaping.
58 POSIX-style bytestring paths. There's no escaping.
59 """
59 """
60
60
61 # We use newlines as separators in the narrowspec file, so don't allow them
61 # We use newlines as separators in the narrowspec file, so don't allow them
62 # in patterns.
62 # in patterns.
63 if _numlines(pat) > 1:
63 if _numlines(pat) > 1:
64 raise error.Abort(_('newlines are not allowed in narrowspec paths'))
64 raise error.Abort(_('newlines are not allowed in narrowspec paths'))
65
65
66 components = pat.split('/')
66 components = pat.split('/')
67 if '.' in components or '..' in components:
67 if '.' in components or '..' in components:
68 raise error.Abort(_('"." and ".." are not allowed in narrowspec paths'))
68 raise error.Abort(_('"." and ".." are not allowed in narrowspec paths'))
69
69
70 def normalizepattern(pattern, defaultkind='path'):
70 def normalizepattern(pattern, defaultkind='path'):
71 """Returns the normalized version of a text-format pattern.
71 """Returns the normalized version of a text-format pattern.
72
72
73 If the pattern has no kind, the default will be added.
73 If the pattern has no kind, the default will be added.
74 """
74 """
75 kind, pat = matchmod._patsplit(pattern, defaultkind)
75 kind, pat = matchmod._patsplit(pattern, defaultkind)
76 return '%s:%s' % normalizesplitpattern(kind, pat)
76 return '%s:%s' % normalizesplitpattern(kind, pat)
77
77
78 def parsepatterns(pats):
78 def parsepatterns(pats):
79 """Parses an iterable of patterns into a typed pattern set.
79 """Parses an iterable of patterns into a typed pattern set.
80
80
81 Patterns are assumed to be ``path:`` if no prefix is present.
81 Patterns are assumed to be ``path:`` if no prefix is present.
82 For safety and performance reasons, only some prefixes are allowed.
82 For safety and performance reasons, only some prefixes are allowed.
83 See ``validatepatterns()``.
83 See ``validatepatterns()``.
84
84
85 This function should be used on patterns that come from the user to
85 This function should be used on patterns that come from the user to
86 normalize and validate them to the internal data structure used for
86 normalize and validate them to the internal data structure used for
87 representing patterns.
87 representing patterns.
88 """
88 """
89 res = {normalizepattern(orig) for orig in pats}
89 res = {normalizepattern(orig) for orig in pats}
90 validatepatterns(res)
90 validatepatterns(res)
91 return res
91 return res
92
92
93 def validatepatterns(pats):
93 def validatepatterns(pats):
94 """Validate that patterns are in the expected data structure and format.
94 """Validate that patterns are in the expected data structure and format.
95
95
96 And that is a set of normalized patterns beginning with ``path:`` or
96 And that is a set of normalized patterns beginning with ``path:`` or
97 ``rootfilesin:``.
97 ``rootfilesin:``.
98
98
99 This function should be used to validate internal data structures
99 This function should be used to validate internal data structures
100 and patterns that are loaded from sources that use the internal,
100 and patterns that are loaded from sources that use the internal,
101 prefixed pattern representation (but can't necessarily be fully trusted).
101 prefixed pattern representation (but can't necessarily be fully trusted).
102 """
102 """
103 if not isinstance(pats, set):
103 if not isinstance(pats, set):
104 raise error.ProgrammingError('narrow patterns should be a set; '
104 raise error.ProgrammingError('narrow patterns should be a set; '
105 'got %r' % pats)
105 'got %r' % pats)
106
106
107 for pat in pats:
107 for pat in pats:
108 if not pat.startswith(VALID_PREFIXES):
108 if not pat.startswith(VALID_PREFIXES):
109 # Use a Mercurial exception because this can happen due to user
109 # Use a Mercurial exception because this can happen due to user
110 # bugs (e.g. manually updating spec file).
110 # bugs (e.g. manually updating spec file).
111 raise error.Abort(_('invalid prefix on narrow pattern: %s') % pat,
111 raise error.Abort(_('invalid prefix on narrow pattern: %s') % pat,
112 hint=_('narrow patterns must begin with one of '
112 hint=_('narrow patterns must begin with one of '
113 'the following: %s') %
113 'the following: %s') %
114 ', '.join(VALID_PREFIXES))
114 ', '.join(VALID_PREFIXES))
115
115
116 def format(includes, excludes):
116 def format(includes, excludes):
117 output = '[include]\n'
117 output = '[include]\n'
118 for i in sorted(includes - excludes):
118 for i in sorted(includes - excludes):
119 output += i + '\n'
119 output += i + '\n'
120 output += '[exclude]\n'
120 output += '[exclude]\n'
121 for e in sorted(excludes):
121 for e in sorted(excludes):
122 output += e + '\n'
122 output += e + '\n'
123 return output
123 return output
124
124
125 def match(root, include=None, exclude=None):
125 def match(root, include=None, exclude=None):
126 if not include:
126 if not include:
127 # Passing empty include and empty exclude to matchmod.match()
127 # Passing empty include and empty exclude to matchmod.match()
128 # gives a matcher that matches everything, so explicitly use
128 # gives a matcher that matches everything, so explicitly use
129 # the nevermatcher.
129 # the nevermatcher.
130 return matchmod.never(root, '')
130 return matchmod.never(root, '')
131 return matchmod.match(root, '', [], include=include or [],
131 return matchmod.match(root, '', [], include=include or [],
132 exclude=exclude or [])
132 exclude=exclude or [])
133
133
134 def parseconfig(ui, spec):
134 def parseconfig(ui, spec):
135 # maybe we should care about the profiles returned too
135 # maybe we should care about the profiles returned too
136 includepats, excludepats, profiles = sparse.parseconfig(ui, spec, 'narrow')
136 includepats, excludepats, profiles = sparse.parseconfig(ui, spec, 'narrow')
137 if profiles:
137 if profiles:
138 raise error.Abort(_("including other spec files using '%include' is not"
138 raise error.Abort(_("including other spec files using '%include' is not"
139 " supported in narrowspec"))
139 " supported in narrowspec"))
140
140
141 validatepatterns(includepats)
141 validatepatterns(includepats)
142 validatepatterns(excludepats)
142 validatepatterns(excludepats)
143
143
144 return includepats, excludepats
144 return includepats, excludepats
145
145
146 def load(repo):
146 def load(repo):
147 try:
147 try:
148 spec = repo.svfs.read(FILENAME)
148 spec = repo.svfs.read(FILENAME)
149 except IOError as e:
149 except IOError as e:
150 # Treat "narrowspec does not exist" the same as "narrowspec file exists
150 # Treat "narrowspec does not exist" the same as "narrowspec file exists
151 # and is empty".
151 # and is empty".
152 if e.errno == errno.ENOENT:
152 if e.errno == errno.ENOENT:
153 return set(), set()
153 return set(), set()
154 raise
154 raise
155
155
156 return parseconfig(repo.ui, spec)
156 return parseconfig(repo.ui, spec)
157
157
158 def save(repo, includepats, excludepats):
158 def save(repo, includepats, excludepats):
159 validatepatterns(includepats)
159 validatepatterns(includepats)
160 validatepatterns(excludepats)
160 validatepatterns(excludepats)
161 spec = format(includepats, excludepats)
161 spec = format(includepats, excludepats)
162 repo.svfs.write(FILENAME, spec)
162 repo.svfs.write(FILENAME, spec)
163
163
164 def copytoworkingcopy(repo):
164 def copytoworkingcopy(repo):
165 spec = repo.svfs.read(FILENAME)
165 spec = repo.svfs.read(FILENAME)
166 repo.vfs.write(DIRSTATE_FILENAME, spec)
166 repo.vfs.write(DIRSTATE_FILENAME, spec)
167
167
168 def savebackup(repo, backupname):
168 def savebackup(repo, backupname):
169 if repository.NARROW_REQUIREMENT not in repo.requirements:
169 if repository.NARROW_REQUIREMENT not in repo.requirements:
170 return
170 return
171 svfs = repo.svfs
171 svfs = repo.svfs
172 svfs.tryunlink(backupname)
172 svfs.tryunlink(backupname)
173 util.copyfile(svfs.join(FILENAME), svfs.join(backupname), hardlink=True)
173 util.copyfile(svfs.join(FILENAME), svfs.join(backupname), hardlink=True)
174
174
175 def restorebackup(repo, backupname):
175 def restorebackup(repo, backupname):
176 if repository.NARROW_REQUIREMENT not in repo.requirements:
176 if repository.NARROW_REQUIREMENT not in repo.requirements:
177 return
177 return
178 util.rename(repo.svfs.join(backupname), repo.svfs.join(FILENAME))
178 util.rename(repo.svfs.join(backupname), repo.svfs.join(FILENAME))
179
179
180 def savewcbackup(repo, backupname):
180 def savewcbackup(repo, backupname):
181 if repository.NARROW_REQUIREMENT not in repo.requirements:
181 if repository.NARROW_REQUIREMENT not in repo.requirements:
182 return
182 return
183 vfs = repo.vfs
183 vfs = repo.vfs
184 vfs.tryunlink(backupname)
184 vfs.tryunlink(backupname)
185 # It may not exist in old repos
185 # It may not exist in old repos
186 if vfs.exists(DIRSTATE_FILENAME):
186 if vfs.exists(DIRSTATE_FILENAME):
187 util.copyfile(vfs.join(DIRSTATE_FILENAME), vfs.join(backupname),
187 util.copyfile(vfs.join(DIRSTATE_FILENAME), vfs.join(backupname),
188 hardlink=True)
188 hardlink=True)
189
189
190 def restorewcbackup(repo, backupname):
190 def restorewcbackup(repo, backupname):
191 if repository.NARROW_REQUIREMENT not in repo.requirements:
191 if repository.NARROW_REQUIREMENT not in repo.requirements:
192 return
192 return
193 util.rename(repo.vfs.join(backupname), repo.vfs.join(DIRSTATE_FILENAME))
193 # It may not exist in old repos
194 if repo.vfs.exists(backupname):
195 util.rename(repo.vfs.join(backupname), repo.vfs.join(DIRSTATE_FILENAME))
194
196
195 def clearwcbackup(repo, backupname):
197 def clearwcbackup(repo, backupname):
196 if repository.NARROW_REQUIREMENT not in repo.requirements:
198 if repository.NARROW_REQUIREMENT not in repo.requirements:
197 return
199 return
198 repo.vfs.unlink(backupname)
200 repo.vfs.tryunlink(backupname)
199
201
200 def restrictpatterns(req_includes, req_excludes, repo_includes, repo_excludes):
202 def restrictpatterns(req_includes, req_excludes, repo_includes, repo_excludes):
201 r""" Restricts the patterns according to repo settings,
203 r""" Restricts the patterns according to repo settings,
202 results in a logical AND operation
204 results in a logical AND operation
203
205
204 :param req_includes: requested includes
206 :param req_includes: requested includes
205 :param req_excludes: requested excludes
207 :param req_excludes: requested excludes
206 :param repo_includes: repo includes
208 :param repo_includes: repo includes
207 :param repo_excludes: repo excludes
209 :param repo_excludes: repo excludes
208 :return: include patterns, exclude patterns, and invalid include patterns.
210 :return: include patterns, exclude patterns, and invalid include patterns.
209
211
210 >>> restrictpatterns({'f1','f2'}, {}, ['f1'], [])
212 >>> restrictpatterns({'f1','f2'}, {}, ['f1'], [])
211 (set(['f1']), {}, [])
213 (set(['f1']), {}, [])
212 >>> restrictpatterns({'f1'}, {}, ['f1','f2'], [])
214 >>> restrictpatterns({'f1'}, {}, ['f1','f2'], [])
213 (set(['f1']), {}, [])
215 (set(['f1']), {}, [])
214 >>> restrictpatterns({'f1/fc1', 'f3/fc3'}, {}, ['f1','f2'], [])
216 >>> restrictpatterns({'f1/fc1', 'f3/fc3'}, {}, ['f1','f2'], [])
215 (set(['f1/fc1']), {}, [])
217 (set(['f1/fc1']), {}, [])
216 >>> restrictpatterns({'f1_fc1'}, {}, ['f1','f2'], [])
218 >>> restrictpatterns({'f1_fc1'}, {}, ['f1','f2'], [])
217 ([], set(['path:.']), [])
219 ([], set(['path:.']), [])
218 >>> restrictpatterns({'f1/../f2/fc2'}, {}, ['f1','f2'], [])
220 >>> restrictpatterns({'f1/../f2/fc2'}, {}, ['f1','f2'], [])
219 (set(['f2/fc2']), {}, [])
221 (set(['f2/fc2']), {}, [])
220 >>> restrictpatterns({'f1/../f3/fc3'}, {}, ['f1','f2'], [])
222 >>> restrictpatterns({'f1/../f3/fc3'}, {}, ['f1','f2'], [])
221 ([], set(['path:.']), [])
223 ([], set(['path:.']), [])
222 >>> restrictpatterns({'f1/$non_exitent_var'}, {}, ['f1','f2'], [])
224 >>> restrictpatterns({'f1/$non_exitent_var'}, {}, ['f1','f2'], [])
223 (set(['f1/$non_exitent_var']), {}, [])
225 (set(['f1/$non_exitent_var']), {}, [])
224 """
226 """
225 res_excludes = set(req_excludes)
227 res_excludes = set(req_excludes)
226 res_excludes.update(repo_excludes)
228 res_excludes.update(repo_excludes)
227 invalid_includes = []
229 invalid_includes = []
228 if not req_includes:
230 if not req_includes:
229 res_includes = set(repo_includes)
231 res_includes = set(repo_includes)
230 elif 'path:.' not in repo_includes:
232 elif 'path:.' not in repo_includes:
231 res_includes = []
233 res_includes = []
232 for req_include in req_includes:
234 for req_include in req_includes:
233 req_include = util.expandpath(util.normpath(req_include))
235 req_include = util.expandpath(util.normpath(req_include))
234 if req_include in repo_includes:
236 if req_include in repo_includes:
235 res_includes.append(req_include)
237 res_includes.append(req_include)
236 continue
238 continue
237 valid = False
239 valid = False
238 for repo_include in repo_includes:
240 for repo_include in repo_includes:
239 if req_include.startswith(repo_include + '/'):
241 if req_include.startswith(repo_include + '/'):
240 valid = True
242 valid = True
241 res_includes.append(req_include)
243 res_includes.append(req_include)
242 break
244 break
243 if not valid:
245 if not valid:
244 invalid_includes.append(req_include)
246 invalid_includes.append(req_include)
245 if len(res_includes) == 0:
247 if len(res_includes) == 0:
246 res_excludes = {'path:.'}
248 res_excludes = {'path:.'}
247 else:
249 else:
248 res_includes = set(res_includes)
250 res_includes = set(res_includes)
249 else:
251 else:
250 res_includes = set(req_includes)
252 res_includes = set(req_includes)
251 return res_includes, res_excludes, invalid_includes
253 return res_includes, res_excludes, invalid_includes
252
254
253 # These two are extracted for extensions (specifically for Google's CitC file
255 # These two are extracted for extensions (specifically for Google's CitC file
254 # system)
256 # system)
255 def _deletecleanfiles(repo, files):
257 def _deletecleanfiles(repo, files):
256 for f in files:
258 for f in files:
257 repo.wvfs.unlinkpath(f)
259 repo.wvfs.unlinkpath(f)
258
260
259 def _writeaddedfiles(repo, pctx, files):
261 def _writeaddedfiles(repo, pctx, files):
260 actions = merge.emptyactions()
262 actions = merge.emptyactions()
261 addgaction = actions[merge.ACTION_GET].append
263 addgaction = actions[merge.ACTION_GET].append
262 mf = repo['.'].manifest()
264 mf = repo['.'].manifest()
263 for f in files:
265 for f in files:
264 if not repo.wvfs.exists(f):
266 if not repo.wvfs.exists(f):
265 addgaction((f, (mf.flags(f), False), "narrowspec updated"))
267 addgaction((f, (mf.flags(f), False), "narrowspec updated"))
266 merge.applyupdates(repo, actions, wctx=repo[None],
268 merge.applyupdates(repo, actions, wctx=repo[None],
267 mctx=repo['.'], overwrite=False)
269 mctx=repo['.'], overwrite=False)
268
270
269 def checkworkingcopynarrowspec(repo):
271 def checkworkingcopynarrowspec(repo):
270 storespec = repo.svfs.tryread(FILENAME)
272 storespec = repo.svfs.tryread(FILENAME)
271 wcspec = repo.vfs.tryread(DIRSTATE_FILENAME)
273 wcspec = repo.vfs.tryread(DIRSTATE_FILENAME)
272 if wcspec != storespec:
274 if wcspec != storespec:
273 raise error.Abort(_("working copy's narrowspec is stale"),
275 raise error.Abort(_("working copy's narrowspec is stale"),
274 hint=_("run 'hg tracked --update-working-copy'"))
276 hint=_("run 'hg tracked --update-working-copy'"))
275
277
276 def updateworkingcopy(repo, assumeclean=False):
278 def updateworkingcopy(repo, assumeclean=False):
277 """updates the working copy and dirstate from the store narrowspec
279 """updates the working copy and dirstate from the store narrowspec
278
280
279 When assumeclean=True, files that are not known to be clean will also
281 When assumeclean=True, files that are not known to be clean will also
280 be deleted. It is then up to the caller to make sure they are clean.
282 be deleted. It is then up to the caller to make sure they are clean.
281 """
283 """
282 oldspec = repo.vfs.tryread(DIRSTATE_FILENAME)
284 oldspec = repo.vfs.tryread(DIRSTATE_FILENAME)
283 newspec = repo.svfs.tryread(FILENAME)
285 newspec = repo.svfs.tryread(FILENAME)
284
286
285 oldincludes, oldexcludes = parseconfig(repo.ui, oldspec)
287 oldincludes, oldexcludes = parseconfig(repo.ui, oldspec)
286 newincludes, newexcludes = parseconfig(repo.ui, newspec)
288 newincludes, newexcludes = parseconfig(repo.ui, newspec)
287 oldmatch = match(repo.root, include=oldincludes, exclude=oldexcludes)
289 oldmatch = match(repo.root, include=oldincludes, exclude=oldexcludes)
288 newmatch = match(repo.root, include=newincludes, exclude=newexcludes)
290 newmatch = match(repo.root, include=newincludes, exclude=newexcludes)
289 addedmatch = matchmod.differencematcher(newmatch, oldmatch)
291 addedmatch = matchmod.differencematcher(newmatch, oldmatch)
290 removedmatch = matchmod.differencematcher(oldmatch, newmatch)
292 removedmatch = matchmod.differencematcher(oldmatch, newmatch)
291
293
292 ds = repo.dirstate
294 ds = repo.dirstate
293 lookup, status = ds.status(removedmatch, subrepos=[], ignored=False,
295 lookup, status = ds.status(removedmatch, subrepos=[], ignored=False,
294 clean=True, unknown=False)
296 clean=True, unknown=False)
295 trackeddirty = status.modified + status.added
297 trackeddirty = status.modified + status.added
296 clean = status.clean
298 clean = status.clean
297 if assumeclean:
299 if assumeclean:
298 assert not trackeddirty
300 assert not trackeddirty
299 clean.extend(lookup)
301 clean.extend(lookup)
300 else:
302 else:
301 trackeddirty.extend(lookup)
303 trackeddirty.extend(lookup)
302 _deletecleanfiles(repo, clean)
304 _deletecleanfiles(repo, clean)
303 for f in sorted(trackeddirty):
305 for f in sorted(trackeddirty):
304 repo.ui.status(_('not deleting possibly dirty file %s\n') % f)
306 repo.ui.status(_('not deleting possibly dirty file %s\n') % f)
305 for f in clean + trackeddirty:
307 for f in clean + trackeddirty:
306 ds.drop(f)
308 ds.drop(f)
307
309
308 repo.narrowpats = newincludes, newexcludes
310 repo.narrowpats = newincludes, newexcludes
309 repo._narrowmatch = newmatch
311 repo._narrowmatch = newmatch
310 pctx = repo['.']
312 pctx = repo['.']
311 newfiles = [f for f in pctx.manifest().walk(addedmatch) if f not in ds]
313 newfiles = [f for f in pctx.manifest().walk(addedmatch) if f not in ds]
312 for f in newfiles:
314 for f in newfiles:
313 ds.normallookup(f)
315 ds.normallookup(f)
314 _writeaddedfiles(repo, pctx, newfiles)
316 _writeaddedfiles(repo, pctx, newfiles)
@@ -1,178 +1,178 b''
1 #testcases flat tree
1 #testcases flat tree
2
2
3 $ . "$TESTDIR/narrow-library.sh"
3 $ . "$TESTDIR/narrow-library.sh"
4
4
5 #if tree
5 #if tree
6 $ cat << EOF >> $HGRCPATH
6 $ cat << EOF >> $HGRCPATH
7 > [experimental]
7 > [experimental]
8 > treemanifest = 1
8 > treemanifest = 1
9 > EOF
9 > EOF
10 #endif
10 #endif
11
11
12 $ cat << EOF >> $HGRCPATH
12 $ cat << EOF >> $HGRCPATH
13 > [extensions]
13 > [extensions]
14 > share =
14 > share =
15 > EOF
15 > EOF
16
16
17 $ hg init remote
17 $ hg init remote
18 $ cd remote
18 $ cd remote
19 $ for x in `$TESTDIR/seq.py 0 10`
19 $ for x in `$TESTDIR/seq.py 0 10`
20 > do
20 > do
21 > mkdir d$x
21 > mkdir d$x
22 > echo $x > d$x/f
22 > echo $x > d$x/f
23 > hg add d$x/f
23 > hg add d$x/f
24 > hg commit -m "add d$x/f"
24 > hg commit -m "add d$x/f"
25 > done
25 > done
26 $ cd ..
26 $ cd ..
27
27
28 $ hg clone --narrow ssh://user@dummy/remote main -q \
28 $ hg clone --narrow ssh://user@dummy/remote main -q \
29 > --include d1 --include d3 --include d5 --include d7
29 > --include d1 --include d3 --include d5 --include d7
30
30
31 $ hg share main share
31 $ hg share main share
32 updating working directory
32 updating working directory
33 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
33 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
34 $ hg -R share tracked
34 $ hg -R share tracked
35 I path:d1
35 I path:d1
36 I path:d3
36 I path:d3
37 I path:d5
37 I path:d5
38 I path:d7
38 I path:d7
39 $ hg -R share files
39 $ hg -R share files
40 share/d1/f
40 share/d1/f
41 share/d3/f
41 share/d3/f
42 share/d5/f
42 share/d5/f
43 share/d7/f
43 share/d7/f
44
44
45 Narrow the share and check that the main repo's working copy gets updated
45 Narrow the share and check that the main repo's working copy gets updated
46
46
47 # Make sure the files that are supposed to be known-clean get their timestamps set in the dirstate
47 # Make sure the files that are supposed to be known-clean get their timestamps set in the dirstate
48 $ sleep 2
48 $ sleep 2
49 $ hg -R main st
49 $ hg -R main st
50 $ hg -R main debugdirstate --no-dates
50 $ hg -R main debugdirstate --no-dates
51 n 644 2 set d1/f
51 n 644 2 set d1/f
52 n 644 2 set d3/f
52 n 644 2 set d3/f
53 n 644 2 set d5/f
53 n 644 2 set d5/f
54 n 644 2 set d7/f
54 n 644 2 set d7/f
55 # Make d3/f dirty
55 # Make d3/f dirty
56 $ echo x >> main/d3/f
56 $ echo x >> main/d3/f
57 $ echo y >> main/d3/g
57 $ echo y >> main/d3/g
58 $ hg add main/d3/g
58 $ hg add main/d3/g
59 $ hg -R main st
59 $ hg -R main st
60 M d3/f
60 M d3/f
61 A d3/g
61 A d3/g
62 # Make d5/f not match the dirstate timestamp even though it's clean
62 # Make d5/f not match the dirstate timestamp even though it's clean
63 $ sleep 2
63 $ sleep 2
64 $ hg -R main st
64 $ hg -R main st
65 M d3/f
65 M d3/f
66 A d3/g
66 A d3/g
67 $ hg -R main debugdirstate --no-dates
67 $ hg -R main debugdirstate --no-dates
68 n 644 2 set d1/f
68 n 644 2 set d1/f
69 n 644 2 set d3/f
69 n 644 2 set d3/f
70 a 0 -1 unset d3/g
70 a 0 -1 unset d3/g
71 n 644 2 set d5/f
71 n 644 2 set d5/f
72 n 644 2 set d7/f
72 n 644 2 set d7/f
73 $ touch main/d5/f
73 $ touch main/d5/f
74 $ hg -R share tracked --removeinclude d1 --removeinclude d3 --removeinclude d5
74 $ hg -R share tracked --removeinclude d1 --removeinclude d3 --removeinclude d5
75 comparing with ssh://user@dummy/remote
75 comparing with ssh://user@dummy/remote
76 searching for changes
76 searching for changes
77 looking for local changes to affected paths
77 looking for local changes to affected paths
78 deleting data/d1/f.i
78 deleting data/d1/f.i
79 deleting data/d3/f.i
79 deleting data/d3/f.i
80 deleting data/d5/f.i
80 deleting data/d5/f.i
81 deleting meta/d1/00manifest.i (tree !)
81 deleting meta/d1/00manifest.i (tree !)
82 deleting meta/d3/00manifest.i (tree !)
82 deleting meta/d3/00manifest.i (tree !)
83 deleting meta/d5/00manifest.i (tree !)
83 deleting meta/d5/00manifest.i (tree !)
84 $ hg -R main tracked
84 $ hg -R main tracked
85 I path:d7
85 I path:d7
86 $ hg -R main files
86 $ hg -R main files
87 abort: working copy's narrowspec is stale
87 abort: working copy's narrowspec is stale
88 (run 'hg tracked --update-working-copy')
88 (run 'hg tracked --update-working-copy')
89 [255]
89 [255]
90 $ hg -R main tracked --update-working-copy
90 $ hg -R main tracked --update-working-copy
91 not deleting possibly dirty file d3/f
91 not deleting possibly dirty file d3/f
92 not deleting possibly dirty file d3/g
92 not deleting possibly dirty file d3/g
93 not deleting possibly dirty file d5/f
93 not deleting possibly dirty file d5/f
94 # d1/f, d3/f, d3/g and d5/f should no longer be reported
94 # d1/f, d3/f, d3/g and d5/f should no longer be reported
95 $ hg -R main files
95 $ hg -R main files
96 main/d7/f
96 main/d7/f
97 # d1/f should no longer be there, d3/f should be since it was dirty, d3/g should be there since
97 # d1/f should no longer be there, d3/f should be since it was dirty, d3/g should be there since
98 # it was added, and d5/f should be since we couldn't be sure it was clean
98 # it was added, and d5/f should be since we couldn't be sure it was clean
99 $ find main/d* -type f | sort
99 $ find main/d* -type f | sort
100 main/d3/f
100 main/d3/f
101 main/d3/g
101 main/d3/g
102 main/d5/f
102 main/d5/f
103 main/d7/f
103 main/d7/f
104
104
105 Widen the share and check that the main repo's working copy gets updated
105 Widen the share and check that the main repo's working copy gets updated
106
106
107 $ hg -R share tracked --addinclude d1 --addinclude d3 -q
107 $ hg -R share tracked --addinclude d1 --addinclude d3 -q
108 $ hg -R share tracked
108 $ hg -R share tracked
109 I path:d1
109 I path:d1
110 I path:d3
110 I path:d3
111 I path:d7
111 I path:d7
112 $ hg -R share files
112 $ hg -R share files
113 share/d1/f
113 share/d1/f
114 share/d3/f
114 share/d3/f
115 share/d7/f
115 share/d7/f
116 $ hg -R main tracked
116 $ hg -R main tracked
117 I path:d1
117 I path:d1
118 I path:d3
118 I path:d3
119 I path:d7
119 I path:d7
120 $ hg -R main files
120 $ hg -R main files
121 abort: working copy's narrowspec is stale
121 abort: working copy's narrowspec is stale
122 (run 'hg tracked --update-working-copy')
122 (run 'hg tracked --update-working-copy')
123 [255]
123 [255]
124 $ hg -R main tracked --update-working-copy
124 $ hg -R main tracked --update-working-copy
125 # d1/f, d3/f should be back
125 # d1/f, d3/f should be back
126 $ hg -R main files
126 $ hg -R main files
127 main/d1/f
127 main/d1/f
128 main/d3/f
128 main/d3/f
129 main/d7/f
129 main/d7/f
130 # d3/f should be modified (not clobbered by the widening), and d3/g should be untracked
130 # d3/f should be modified (not clobbered by the widening), and d3/g should be untracked
131 $ hg -R main st --all
131 $ hg -R main st --all
132 M d3/f
132 M d3/f
133 ? d3/g
133 ? d3/g
134 C d1/f
134 C d1/f
135 C d7/f
135 C d7/f
136
136
137 We should also be able to unshare without breaking everything:
137 We should also be able to unshare without breaking everything:
138
138
139 $ hg share main share-unshare
139 $ hg share main share-unshare
140 updating working directory
140 updating working directory
141 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
141 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
142 $ cd share-unshare
142 $ cd share-unshare
143 $ hg unshare
143 $ hg unshare
144 $ hg verify
144 $ hg verify
145 checking changesets
145 checking changesets
146 checking manifests
146 checking manifests
147 checking directory manifests (tree !)
147 checking directory manifests (tree !)
148 crosschecking files in changesets and manifests
148 crosschecking files in changesets and manifests
149 checking files
149 checking files
150 checked 11 changesets with 3 changes to 3 files
150 checked 11 changesets with 3 changes to 3 files
151 $ cd ..
151 $ cd ..
152
152
153 Dirstate should be left alone when upgrading from version of hg that didn't support narrow+share
153 Dirstate should be left alone when upgrading from version of hg that didn't support narrow+share
154
154
155 $ hg share main share-upgrade
155 $ hg share main share-upgrade
156 updating working directory
156 updating working directory
157 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
157 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
158 $ cd share-upgrade
158 $ cd share-upgrade
159 $ echo x >> d1/f
159 $ echo x >> d1/f
160 $ echo y >> d3/g
160 $ echo y >> d3/g
161 $ hg add d3/g
161 $ hg add d3/g
162 $ hg rm d7/f
162 $ hg rm d7/f
163 $ hg st
163 $ hg st
164 M d1/f
164 M d1/f
165 A d3/g
165 A d3/g
166 R d7/f
166 R d7/f
167 Make it look like a repo from before narrow+share was supported
167 Make it look like a repo from before narrow+share was supported
168 $ rm .hg/narrowspec.dirstate
168 $ rm .hg/narrowspec.dirstate
169 $ hg st
169 $ hg ci -Am test
170 abort: working copy's narrowspec is stale
170 abort: working copy's narrowspec is stale
171 (run 'hg tracked --update-working-copy')
171 (run 'hg tracked --update-working-copy')
172 [255]
172 [255]
173 $ hg tracked --update-working-copy
173 $ hg tracked --update-working-copy
174 $ hg st
174 $ hg st
175 M d1/f
175 M d1/f
176 A d3/g
176 A d3/g
177 R d7/f
177 R d7/f
178 $ cd ..
178 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now