##// END OF EJS Templates
narrow: use merge.ACTION_GET instead of duplicating 'g' constant...
Martin von Zweigbergk -
r41213:5838afea default
parent child Browse files
Show More
@@ -1,296 +1,296 b''
1 # narrowspec.py - methods for working with a narrow view of a repository
1 # narrowspec.py - methods for working with a narrow view of a repository
2 #
2 #
3 # Copyright 2017 Google, Inc.
3 # Copyright 2017 Google, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11
11
12 from .i18n import _
12 from .i18n import _
13 from . import (
13 from . import (
14 error,
14 error,
15 match as matchmod,
15 match as matchmod,
16 merge,
16 merge,
17 repository,
17 repository,
18 sparse,
18 sparse,
19 util,
19 util,
20 )
20 )
21
21
22 # The file in .hg/store/ that indicates which paths exit in the store
22 # The file in .hg/store/ that indicates which paths exit in the store
23 FILENAME = 'narrowspec'
23 FILENAME = 'narrowspec'
24 # The file in .hg/ that indicates which paths exit in the dirstate
24 # The file in .hg/ that indicates which paths exit in the dirstate
25 DIRSTATE_FILENAME = 'narrowspec.dirstate'
25 DIRSTATE_FILENAME = 'narrowspec.dirstate'
26
26
27 # Pattern prefixes that are allowed in narrow patterns. This list MUST
27 # Pattern prefixes that are allowed in narrow patterns. This list MUST
28 # only contain patterns that are fast and safe to evaluate. Keep in mind
28 # only contain patterns that are fast and safe to evaluate. Keep in mind
29 # that patterns are supplied by clients and executed on remote servers
29 # that patterns are supplied by clients and executed on remote servers
30 # as part of wire protocol commands. That means that changes to this
30 # as part of wire protocol commands. That means that changes to this
31 # data structure influence the wire protocol and should not be taken
31 # data structure influence the wire protocol and should not be taken
32 # lightly - especially removals.
32 # lightly - especially removals.
33 VALID_PREFIXES = (
33 VALID_PREFIXES = (
34 b'path:',
34 b'path:',
35 b'rootfilesin:',
35 b'rootfilesin:',
36 )
36 )
37
37
38 def normalizesplitpattern(kind, pat):
38 def normalizesplitpattern(kind, pat):
39 """Returns the normalized version of a pattern and kind.
39 """Returns the normalized version of a pattern and kind.
40
40
41 Returns a tuple with the normalized kind and normalized pattern.
41 Returns a tuple with the normalized kind and normalized pattern.
42 """
42 """
43 pat = pat.rstrip('/')
43 pat = pat.rstrip('/')
44 _validatepattern(pat)
44 _validatepattern(pat)
45 return kind, pat
45 return kind, pat
46
46
47 def _numlines(s):
47 def _numlines(s):
48 """Returns the number of lines in s, including ending empty lines."""
48 """Returns the number of lines in s, including ending empty lines."""
49 # We use splitlines because it is Unicode-friendly and thus Python 3
49 # We use splitlines because it is Unicode-friendly and thus Python 3
50 # compatible. However, it does not count empty lines at the end, so trick
50 # compatible. However, it does not count empty lines at the end, so trick
51 # it by adding a character at the end.
51 # it by adding a character at the end.
52 return len((s + 'x').splitlines())
52 return len((s + 'x').splitlines())
53
53
54 def _validatepattern(pat):
54 def _validatepattern(pat):
55 """Validates the pattern and aborts if it is invalid.
55 """Validates the pattern and aborts if it is invalid.
56
56
57 Patterns are stored in the narrowspec as newline-separated
57 Patterns are stored in the narrowspec as newline-separated
58 POSIX-style bytestring paths. There's no escaping.
58 POSIX-style bytestring paths. There's no escaping.
59 """
59 """
60
60
61 # We use newlines as separators in the narrowspec file, so don't allow them
61 # We use newlines as separators in the narrowspec file, so don't allow them
62 # in patterns.
62 # in patterns.
63 if _numlines(pat) > 1:
63 if _numlines(pat) > 1:
64 raise error.Abort(_('newlines are not allowed in narrowspec paths'))
64 raise error.Abort(_('newlines are not allowed in narrowspec paths'))
65
65
66 components = pat.split('/')
66 components = pat.split('/')
67 if '.' in components or '..' in components:
67 if '.' in components or '..' in components:
68 raise error.Abort(_('"." and ".." are not allowed in narrowspec paths'))
68 raise error.Abort(_('"." and ".." are not allowed in narrowspec paths'))
69
69
70 def normalizepattern(pattern, defaultkind='path'):
70 def normalizepattern(pattern, defaultkind='path'):
71 """Returns the normalized version of a text-format pattern.
71 """Returns the normalized version of a text-format pattern.
72
72
73 If the pattern has no kind, the default will be added.
73 If the pattern has no kind, the default will be added.
74 """
74 """
75 kind, pat = matchmod._patsplit(pattern, defaultkind)
75 kind, pat = matchmod._patsplit(pattern, defaultkind)
76 return '%s:%s' % normalizesplitpattern(kind, pat)
76 return '%s:%s' % normalizesplitpattern(kind, pat)
77
77
78 def parsepatterns(pats):
78 def parsepatterns(pats):
79 """Parses an iterable of patterns into a typed pattern set.
79 """Parses an iterable of patterns into a typed pattern set.
80
80
81 Patterns are assumed to be ``path:`` if no prefix is present.
81 Patterns are assumed to be ``path:`` if no prefix is present.
82 For safety and performance reasons, only some prefixes are allowed.
82 For safety and performance reasons, only some prefixes are allowed.
83 See ``validatepatterns()``.
83 See ``validatepatterns()``.
84
84
85 This function should be used on patterns that come from the user to
85 This function should be used on patterns that come from the user to
86 normalize and validate them to the internal data structure used for
86 normalize and validate them to the internal data structure used for
87 representing patterns.
87 representing patterns.
88 """
88 """
89 res = {normalizepattern(orig) for orig in pats}
89 res = {normalizepattern(orig) for orig in pats}
90 validatepatterns(res)
90 validatepatterns(res)
91 return res
91 return res
92
92
93 def validatepatterns(pats):
93 def validatepatterns(pats):
94 """Validate that patterns are in the expected data structure and format.
94 """Validate that patterns are in the expected data structure and format.
95
95
96 And that is a set of normalized patterns beginning with ``path:`` or
96 And that is a set of normalized patterns beginning with ``path:`` or
97 ``rootfilesin:``.
97 ``rootfilesin:``.
98
98
99 This function should be used to validate internal data structures
99 This function should be used to validate internal data structures
100 and patterns that are loaded from sources that use the internal,
100 and patterns that are loaded from sources that use the internal,
101 prefixed pattern representation (but can't necessarily be fully trusted).
101 prefixed pattern representation (but can't necessarily be fully trusted).
102 """
102 """
103 if not isinstance(pats, set):
103 if not isinstance(pats, set):
104 raise error.ProgrammingError('narrow patterns should be a set; '
104 raise error.ProgrammingError('narrow patterns should be a set; '
105 'got %r' % pats)
105 'got %r' % pats)
106
106
107 for pat in pats:
107 for pat in pats:
108 if not pat.startswith(VALID_PREFIXES):
108 if not pat.startswith(VALID_PREFIXES):
109 # Use a Mercurial exception because this can happen due to user
109 # Use a Mercurial exception because this can happen due to user
110 # bugs (e.g. manually updating spec file).
110 # bugs (e.g. manually updating spec file).
111 raise error.Abort(_('invalid prefix on narrow pattern: %s') % pat,
111 raise error.Abort(_('invalid prefix on narrow pattern: %s') % pat,
112 hint=_('narrow patterns must begin with one of '
112 hint=_('narrow patterns must begin with one of '
113 'the following: %s') %
113 'the following: %s') %
114 ', '.join(VALID_PREFIXES))
114 ', '.join(VALID_PREFIXES))
115
115
116 def format(includes, excludes):
116 def format(includes, excludes):
117 output = '[include]\n'
117 output = '[include]\n'
118 for i in sorted(includes - excludes):
118 for i in sorted(includes - excludes):
119 output += i + '\n'
119 output += i + '\n'
120 output += '[exclude]\n'
120 output += '[exclude]\n'
121 for e in sorted(excludes):
121 for e in sorted(excludes):
122 output += e + '\n'
122 output += e + '\n'
123 return output
123 return output
124
124
125 def match(root, include=None, exclude=None):
125 def match(root, include=None, exclude=None):
126 if not include:
126 if not include:
127 # Passing empty include and empty exclude to matchmod.match()
127 # Passing empty include and empty exclude to matchmod.match()
128 # gives a matcher that matches everything, so explicitly use
128 # gives a matcher that matches everything, so explicitly use
129 # the nevermatcher.
129 # the nevermatcher.
130 return matchmod.never(root, '')
130 return matchmod.never(root, '')
131 return matchmod.match(root, '', [], include=include or [],
131 return matchmod.match(root, '', [], include=include or [],
132 exclude=exclude or [])
132 exclude=exclude or [])
133
133
134 def parseconfig(ui, spec):
134 def parseconfig(ui, spec):
135 # maybe we should care about the profiles returned too
135 # maybe we should care about the profiles returned too
136 includepats, excludepats, profiles = sparse.parseconfig(ui, spec, 'narrow')
136 includepats, excludepats, profiles = sparse.parseconfig(ui, spec, 'narrow')
137 if profiles:
137 if profiles:
138 raise error.Abort(_("including other spec files using '%include' is not"
138 raise error.Abort(_("including other spec files using '%include' is not"
139 " supported in narrowspec"))
139 " supported in narrowspec"))
140
140
141 validatepatterns(includepats)
141 validatepatterns(includepats)
142 validatepatterns(excludepats)
142 validatepatterns(excludepats)
143
143
144 return includepats, excludepats
144 return includepats, excludepats
145
145
146 def load(repo):
146 def load(repo):
147 try:
147 try:
148 spec = repo.svfs.read(FILENAME)
148 spec = repo.svfs.read(FILENAME)
149 except IOError as e:
149 except IOError as e:
150 # Treat "narrowspec does not exist" the same as "narrowspec file exists
150 # Treat "narrowspec does not exist" the same as "narrowspec file exists
151 # and is empty".
151 # and is empty".
152 if e.errno == errno.ENOENT:
152 if e.errno == errno.ENOENT:
153 return set(), set()
153 return set(), set()
154 raise
154 raise
155
155
156 return parseconfig(repo.ui, spec)
156 return parseconfig(repo.ui, spec)
157
157
158 def save(repo, includepats, excludepats):
158 def save(repo, includepats, excludepats):
159 validatepatterns(includepats)
159 validatepatterns(includepats)
160 validatepatterns(excludepats)
160 validatepatterns(excludepats)
161 spec = format(includepats, excludepats)
161 spec = format(includepats, excludepats)
162 repo.svfs.write(FILENAME, spec)
162 repo.svfs.write(FILENAME, spec)
163
163
164 def copytoworkingcopy(repo, tr):
164 def copytoworkingcopy(repo, tr):
165 if tr:
165 if tr:
166 def write(file):
166 def write(file):
167 spec = repo.svfs.read(FILENAME)
167 spec = repo.svfs.read(FILENAME)
168 file.write(spec)
168 file.write(spec)
169 file.close()
169 file.close()
170 tr.addfilegenerator('narrowspec', (DIRSTATE_FILENAME,), write,
170 tr.addfilegenerator('narrowspec', (DIRSTATE_FILENAME,), write,
171 location='plain')
171 location='plain')
172 else:
172 else:
173 spec = repo.svfs.read(FILENAME)
173 spec = repo.svfs.read(FILENAME)
174 repo.vfs.write(DIRSTATE_FILENAME, spec)
174 repo.vfs.write(DIRSTATE_FILENAME, spec)
175
175
176 def savebackup(repo, backupname):
176 def savebackup(repo, backupname):
177 if repository.NARROW_REQUIREMENT not in repo.requirements:
177 if repository.NARROW_REQUIREMENT not in repo.requirements:
178 return
178 return
179 svfs = repo.svfs
179 svfs = repo.svfs
180 svfs.tryunlink(backupname)
180 svfs.tryunlink(backupname)
181 util.copyfile(svfs.join(FILENAME), svfs.join(backupname), hardlink=True)
181 util.copyfile(svfs.join(FILENAME), svfs.join(backupname), hardlink=True)
182
182
183 def restorebackup(repo, backupname):
183 def restorebackup(repo, backupname):
184 if repository.NARROW_REQUIREMENT not in repo.requirements:
184 if repository.NARROW_REQUIREMENT not in repo.requirements:
185 return
185 return
186 util.rename(repo.svfs.join(backupname), repo.svfs.join(FILENAME))
186 util.rename(repo.svfs.join(backupname), repo.svfs.join(FILENAME))
187
187
188 def clearbackup(repo, backupname):
188 def clearbackup(repo, backupname):
189 if repository.NARROW_REQUIREMENT not in repo.requirements:
189 if repository.NARROW_REQUIREMENT not in repo.requirements:
190 return
190 return
191 repo.svfs.unlink(backupname)
191 repo.svfs.unlink(backupname)
192
192
193 def restrictpatterns(req_includes, req_excludes, repo_includes, repo_excludes):
193 def restrictpatterns(req_includes, req_excludes, repo_includes, repo_excludes):
194 r""" Restricts the patterns according to repo settings,
194 r""" Restricts the patterns according to repo settings,
195 results in a logical AND operation
195 results in a logical AND operation
196
196
197 :param req_includes: requested includes
197 :param req_includes: requested includes
198 :param req_excludes: requested excludes
198 :param req_excludes: requested excludes
199 :param repo_includes: repo includes
199 :param repo_includes: repo includes
200 :param repo_excludes: repo excludes
200 :param repo_excludes: repo excludes
201 :return: include patterns, exclude patterns, and invalid include patterns.
201 :return: include patterns, exclude patterns, and invalid include patterns.
202
202
203 >>> restrictpatterns({'f1','f2'}, {}, ['f1'], [])
203 >>> restrictpatterns({'f1','f2'}, {}, ['f1'], [])
204 (set(['f1']), {}, [])
204 (set(['f1']), {}, [])
205 >>> restrictpatterns({'f1'}, {}, ['f1','f2'], [])
205 >>> restrictpatterns({'f1'}, {}, ['f1','f2'], [])
206 (set(['f1']), {}, [])
206 (set(['f1']), {}, [])
207 >>> restrictpatterns({'f1/fc1', 'f3/fc3'}, {}, ['f1','f2'], [])
207 >>> restrictpatterns({'f1/fc1', 'f3/fc3'}, {}, ['f1','f2'], [])
208 (set(['f1/fc1']), {}, [])
208 (set(['f1/fc1']), {}, [])
209 >>> restrictpatterns({'f1_fc1'}, {}, ['f1','f2'], [])
209 >>> restrictpatterns({'f1_fc1'}, {}, ['f1','f2'], [])
210 ([], set(['path:.']), [])
210 ([], set(['path:.']), [])
211 >>> restrictpatterns({'f1/../f2/fc2'}, {}, ['f1','f2'], [])
211 >>> restrictpatterns({'f1/../f2/fc2'}, {}, ['f1','f2'], [])
212 (set(['f2/fc2']), {}, [])
212 (set(['f2/fc2']), {}, [])
213 >>> restrictpatterns({'f1/../f3/fc3'}, {}, ['f1','f2'], [])
213 >>> restrictpatterns({'f1/../f3/fc3'}, {}, ['f1','f2'], [])
214 ([], set(['path:.']), [])
214 ([], set(['path:.']), [])
215 >>> restrictpatterns({'f1/$non_exitent_var'}, {}, ['f1','f2'], [])
215 >>> restrictpatterns({'f1/$non_exitent_var'}, {}, ['f1','f2'], [])
216 (set(['f1/$non_exitent_var']), {}, [])
216 (set(['f1/$non_exitent_var']), {}, [])
217 """
217 """
218 res_excludes = set(req_excludes)
218 res_excludes = set(req_excludes)
219 res_excludes.update(repo_excludes)
219 res_excludes.update(repo_excludes)
220 invalid_includes = []
220 invalid_includes = []
221 if not req_includes:
221 if not req_includes:
222 res_includes = set(repo_includes)
222 res_includes = set(repo_includes)
223 elif 'path:.' not in repo_includes:
223 elif 'path:.' not in repo_includes:
224 res_includes = []
224 res_includes = []
225 for req_include in req_includes:
225 for req_include in req_includes:
226 req_include = util.expandpath(util.normpath(req_include))
226 req_include = util.expandpath(util.normpath(req_include))
227 if req_include in repo_includes:
227 if req_include in repo_includes:
228 res_includes.append(req_include)
228 res_includes.append(req_include)
229 continue
229 continue
230 valid = False
230 valid = False
231 for repo_include in repo_includes:
231 for repo_include in repo_includes:
232 if req_include.startswith(repo_include + '/'):
232 if req_include.startswith(repo_include + '/'):
233 valid = True
233 valid = True
234 res_includes.append(req_include)
234 res_includes.append(req_include)
235 break
235 break
236 if not valid:
236 if not valid:
237 invalid_includes.append(req_include)
237 invalid_includes.append(req_include)
238 if len(res_includes) == 0:
238 if len(res_includes) == 0:
239 res_excludes = {'path:.'}
239 res_excludes = {'path:.'}
240 else:
240 else:
241 res_includes = set(res_includes)
241 res_includes = set(res_includes)
242 else:
242 else:
243 res_includes = set(req_includes)
243 res_includes = set(req_includes)
244 return res_includes, res_excludes, invalid_includes
244 return res_includes, res_excludes, invalid_includes
245
245
246 # These two are extracted for extensions (specifically for Google's CitC file
246 # These two are extracted for extensions (specifically for Google's CitC file
247 # system)
247 # system)
248 def _deletecleanfiles(repo, files):
248 def _deletecleanfiles(repo, files):
249 for f in files:
249 for f in files:
250 repo.wvfs.unlinkpath(f)
250 repo.wvfs.unlinkpath(f)
251
251
252 def _writeaddedfiles(repo, pctx, files):
252 def _writeaddedfiles(repo, pctx, files):
253 actions = merge.emptyactions()
253 actions = merge.emptyactions()
254 addgaction = actions['g'].append
254 addgaction = actions[merge.ACTION_GET].append
255 mf = repo['.'].manifest()
255 mf = repo['.'].manifest()
256 for f in files:
256 for f in files:
257 if not repo.wvfs.exists(f):
257 if not repo.wvfs.exists(f):
258 addgaction((f, (mf.flags(f), False), "narrowspec updated"))
258 addgaction((f, (mf.flags(f), False), "narrowspec updated"))
259 merge.applyupdates(repo, actions, wctx=repo[None],
259 merge.applyupdates(repo, actions, wctx=repo[None],
260 mctx=repo['.'], overwrite=False)
260 mctx=repo['.'], overwrite=False)
261
261
262 def checkworkingcopynarrowspec(repo):
262 def checkworkingcopynarrowspec(repo):
263 storespec = repo.svfs.tryread(FILENAME)
263 storespec = repo.svfs.tryread(FILENAME)
264 wcspec = repo.vfs.tryread(DIRSTATE_FILENAME)
264 wcspec = repo.vfs.tryread(DIRSTATE_FILENAME)
265 if wcspec != storespec:
265 if wcspec != storespec:
266 raise error.Abort(_("working copy's narrowspec is stale"),
266 raise error.Abort(_("working copy's narrowspec is stale"),
267 hint=_("run 'hg tracked --update-working-copy'"))
267 hint=_("run 'hg tracked --update-working-copy'"))
268
268
269 def updateworkingcopy(repo):
269 def updateworkingcopy(repo):
270 oldspec = repo.vfs.tryread(DIRSTATE_FILENAME)
270 oldspec = repo.vfs.tryread(DIRSTATE_FILENAME)
271 newspec = repo.svfs.tryread(FILENAME)
271 newspec = repo.svfs.tryread(FILENAME)
272
272
273 oldincludes, oldexcludes = parseconfig(repo.ui, oldspec)
273 oldincludes, oldexcludes = parseconfig(repo.ui, oldspec)
274 newincludes, newexcludes = parseconfig(repo.ui, newspec)
274 newincludes, newexcludes = parseconfig(repo.ui, newspec)
275 oldmatch = match(repo.root, include=oldincludes, exclude=oldexcludes)
275 oldmatch = match(repo.root, include=oldincludes, exclude=oldexcludes)
276 newmatch = match(repo.root, include=newincludes, exclude=newexcludes)
276 newmatch = match(repo.root, include=newincludes, exclude=newexcludes)
277 addedmatch = matchmod.differencematcher(newmatch, oldmatch)
277 addedmatch = matchmod.differencematcher(newmatch, oldmatch)
278 removedmatch = matchmod.differencematcher(oldmatch, newmatch)
278 removedmatch = matchmod.differencematcher(oldmatch, newmatch)
279
279
280 ds = repo.dirstate
280 ds = repo.dirstate
281 lookup, status = ds.status(removedmatch, subrepos=[], ignored=False,
281 lookup, status = ds.status(removedmatch, subrepos=[], ignored=False,
282 clean=True, unknown=False)
282 clean=True, unknown=False)
283 _deletecleanfiles(repo, status.clean)
283 _deletecleanfiles(repo, status.clean)
284 trackeddirty = lookup + status.modified + status.added
284 trackeddirty = lookup + status.modified + status.added
285 for f in sorted(trackeddirty):
285 for f in sorted(trackeddirty):
286 repo.ui.status(_('not deleting possibly dirty file %s\n') % f)
286 repo.ui.status(_('not deleting possibly dirty file %s\n') % f)
287 for f in status.clean + trackeddirty:
287 for f in status.clean + trackeddirty:
288 ds.drop(f)
288 ds.drop(f)
289
289
290 repo.narrowpats = newincludes, newexcludes
290 repo.narrowpats = newincludes, newexcludes
291 repo._narrowmatch = newmatch
291 repo._narrowmatch = newmatch
292 pctx = repo['.']
292 pctx = repo['.']
293 newfiles = [f for f in pctx.manifest().walk(addedmatch) if f not in ds]
293 newfiles = [f for f in pctx.manifest().walk(addedmatch) if f not in ds]
294 for f in newfiles:
294 for f in newfiles:
295 ds.normallookup(f)
295 ds.normallookup(f)
296 _writeaddedfiles(repo, pctx, newfiles)
296 _writeaddedfiles(repo, pctx, newfiles)
General Comments 0
You need to be logged in to leave comments. Login now