##// END OF EJS Templates
store: invoke "osutil.listdir()" via vfs...
FUJIWARA Katsunori -
r17747:aad3bce9 default
parent child Browse files
Show More
@@ -1,942 +1,945 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 import util, error, osutil, revset, similar, encoding, phases
9 import util, error, osutil, revset, similar, encoding, phases
10 import match as matchmod
10 import match as matchmod
11 import os, errno, re, stat, sys, glob
11 import os, errno, re, stat, sys, glob
12
12
13 def nochangesfound(ui, repo, excluded=None):
13 def nochangesfound(ui, repo, excluded=None):
14 '''Report no changes for push/pull, excluded is None or a list of
14 '''Report no changes for push/pull, excluded is None or a list of
15 nodes excluded from the push/pull.
15 nodes excluded from the push/pull.
16 '''
16 '''
17 secretlist = []
17 secretlist = []
18 if excluded:
18 if excluded:
19 for n in excluded:
19 for n in excluded:
20 ctx = repo[n]
20 ctx = repo[n]
21 if ctx.phase() >= phases.secret and not ctx.extinct():
21 if ctx.phase() >= phases.secret and not ctx.extinct():
22 secretlist.append(n)
22 secretlist.append(n)
23
23
24 if secretlist:
24 if secretlist:
25 ui.status(_("no changes found (ignored %d secret changesets)\n")
25 ui.status(_("no changes found (ignored %d secret changesets)\n")
26 % len(secretlist))
26 % len(secretlist))
27 else:
27 else:
28 ui.status(_("no changes found\n"))
28 ui.status(_("no changes found\n"))
29
29
30 def checkfilename(f):
30 def checkfilename(f):
31 '''Check that the filename f is an acceptable filename for a tracked file'''
31 '''Check that the filename f is an acceptable filename for a tracked file'''
32 if '\r' in f or '\n' in f:
32 if '\r' in f or '\n' in f:
33 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
33 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
34
34
35 def checkportable(ui, f):
35 def checkportable(ui, f):
36 '''Check if filename f is portable and warn or abort depending on config'''
36 '''Check if filename f is portable and warn or abort depending on config'''
37 checkfilename(f)
37 checkfilename(f)
38 abort, warn = checkportabilityalert(ui)
38 abort, warn = checkportabilityalert(ui)
39 if abort or warn:
39 if abort or warn:
40 msg = util.checkwinfilename(f)
40 msg = util.checkwinfilename(f)
41 if msg:
41 if msg:
42 msg = "%s: %r" % (msg, f)
42 msg = "%s: %r" % (msg, f)
43 if abort:
43 if abort:
44 raise util.Abort(msg)
44 raise util.Abort(msg)
45 ui.warn(_("warning: %s\n") % msg)
45 ui.warn(_("warning: %s\n") % msg)
46
46
47 def checkportabilityalert(ui):
47 def checkportabilityalert(ui):
48 '''check if the user's config requests nothing, a warning, or abort for
48 '''check if the user's config requests nothing, a warning, or abort for
49 non-portable filenames'''
49 non-portable filenames'''
50 val = ui.config('ui', 'portablefilenames', 'warn')
50 val = ui.config('ui', 'portablefilenames', 'warn')
51 lval = val.lower()
51 lval = val.lower()
52 bval = util.parsebool(val)
52 bval = util.parsebool(val)
53 abort = os.name == 'nt' or lval == 'abort'
53 abort = os.name == 'nt' or lval == 'abort'
54 warn = bval or lval == 'warn'
54 warn = bval or lval == 'warn'
55 if bval is None and not (warn or abort or lval == 'ignore'):
55 if bval is None and not (warn or abort or lval == 'ignore'):
56 raise error.ConfigError(
56 raise error.ConfigError(
57 _("ui.portablefilenames value is invalid ('%s')") % val)
57 _("ui.portablefilenames value is invalid ('%s')") % val)
58 return abort, warn
58 return abort, warn
59
59
60 class casecollisionauditor(object):
60 class casecollisionauditor(object):
61 def __init__(self, ui, abort, dirstate):
61 def __init__(self, ui, abort, dirstate):
62 self._ui = ui
62 self._ui = ui
63 self._abort = abort
63 self._abort = abort
64 allfiles = '\0'.join(dirstate._map)
64 allfiles = '\0'.join(dirstate._map)
65 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
65 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
66 self._dirstate = dirstate
66 self._dirstate = dirstate
67 # The purpose of _newfiles is so that we don't complain about
67 # The purpose of _newfiles is so that we don't complain about
68 # case collisions if someone were to call this object with the
68 # case collisions if someone were to call this object with the
69 # same filename twice.
69 # same filename twice.
70 self._newfiles = set()
70 self._newfiles = set()
71
71
72 def __call__(self, f):
72 def __call__(self, f):
73 fl = encoding.lower(f)
73 fl = encoding.lower(f)
74 if (fl in self._loweredfiles and f not in self._dirstate and
74 if (fl in self._loweredfiles and f not in self._dirstate and
75 f not in self._newfiles):
75 f not in self._newfiles):
76 msg = _('possible case-folding collision for %s') % f
76 msg = _('possible case-folding collision for %s') % f
77 if self._abort:
77 if self._abort:
78 raise util.Abort(msg)
78 raise util.Abort(msg)
79 self._ui.warn(_("warning: %s\n") % msg)
79 self._ui.warn(_("warning: %s\n") % msg)
80 self._loweredfiles.add(fl)
80 self._loweredfiles.add(fl)
81 self._newfiles.add(f)
81 self._newfiles.add(f)
82
82
83 class pathauditor(object):
83 class pathauditor(object):
84 '''ensure that a filesystem path contains no banned components.
84 '''ensure that a filesystem path contains no banned components.
85 the following properties of a path are checked:
85 the following properties of a path are checked:
86
86
87 - ends with a directory separator
87 - ends with a directory separator
88 - under top-level .hg
88 - under top-level .hg
89 - starts at the root of a windows drive
89 - starts at the root of a windows drive
90 - contains ".."
90 - contains ".."
91 - traverses a symlink (e.g. a/symlink_here/b)
91 - traverses a symlink (e.g. a/symlink_here/b)
92 - inside a nested repository (a callback can be used to approve
92 - inside a nested repository (a callback can be used to approve
93 some nested repositories, e.g., subrepositories)
93 some nested repositories, e.g., subrepositories)
94 '''
94 '''
95
95
96 def __init__(self, root, callback=None):
96 def __init__(self, root, callback=None):
97 self.audited = set()
97 self.audited = set()
98 self.auditeddir = set()
98 self.auditeddir = set()
99 self.root = root
99 self.root = root
100 self.callback = callback
100 self.callback = callback
101 if os.path.lexists(root) and not util.checkcase(root):
101 if os.path.lexists(root) and not util.checkcase(root):
102 self.normcase = util.normcase
102 self.normcase = util.normcase
103 else:
103 else:
104 self.normcase = lambda x: x
104 self.normcase = lambda x: x
105
105
106 def __call__(self, path):
106 def __call__(self, path):
107 '''Check the relative path.
107 '''Check the relative path.
108 path may contain a pattern (e.g. foodir/**.txt)'''
108 path may contain a pattern (e.g. foodir/**.txt)'''
109
109
110 path = util.localpath(path)
110 path = util.localpath(path)
111 normpath = self.normcase(path)
111 normpath = self.normcase(path)
112 if normpath in self.audited:
112 if normpath in self.audited:
113 return
113 return
114 # AIX ignores "/" at end of path, others raise EISDIR.
114 # AIX ignores "/" at end of path, others raise EISDIR.
115 if util.endswithsep(path):
115 if util.endswithsep(path):
116 raise util.Abort(_("path ends in directory separator: %s") % path)
116 raise util.Abort(_("path ends in directory separator: %s") % path)
117 parts = util.splitpath(path)
117 parts = util.splitpath(path)
118 if (os.path.splitdrive(path)[0]
118 if (os.path.splitdrive(path)[0]
119 or parts[0].lower() in ('.hg', '.hg.', '')
119 or parts[0].lower() in ('.hg', '.hg.', '')
120 or os.pardir in parts):
120 or os.pardir in parts):
121 raise util.Abort(_("path contains illegal component: %s") % path)
121 raise util.Abort(_("path contains illegal component: %s") % path)
122 if '.hg' in path.lower():
122 if '.hg' in path.lower():
123 lparts = [p.lower() for p in parts]
123 lparts = [p.lower() for p in parts]
124 for p in '.hg', '.hg.':
124 for p in '.hg', '.hg.':
125 if p in lparts[1:]:
125 if p in lparts[1:]:
126 pos = lparts.index(p)
126 pos = lparts.index(p)
127 base = os.path.join(*parts[:pos])
127 base = os.path.join(*parts[:pos])
128 raise util.Abort(_("path '%s' is inside nested repo %r")
128 raise util.Abort(_("path '%s' is inside nested repo %r")
129 % (path, base))
129 % (path, base))
130
130
131 normparts = util.splitpath(normpath)
131 normparts = util.splitpath(normpath)
132 assert len(parts) == len(normparts)
132 assert len(parts) == len(normparts)
133
133
134 parts.pop()
134 parts.pop()
135 normparts.pop()
135 normparts.pop()
136 prefixes = []
136 prefixes = []
137 while parts:
137 while parts:
138 prefix = os.sep.join(parts)
138 prefix = os.sep.join(parts)
139 normprefix = os.sep.join(normparts)
139 normprefix = os.sep.join(normparts)
140 if normprefix in self.auditeddir:
140 if normprefix in self.auditeddir:
141 break
141 break
142 curpath = os.path.join(self.root, prefix)
142 curpath = os.path.join(self.root, prefix)
143 try:
143 try:
144 st = os.lstat(curpath)
144 st = os.lstat(curpath)
145 except OSError, err:
145 except OSError, err:
146 # EINVAL can be raised as invalid path syntax under win32.
146 # EINVAL can be raised as invalid path syntax under win32.
147 # They must be ignored for patterns can be checked too.
147 # They must be ignored for patterns can be checked too.
148 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
148 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
149 raise
149 raise
150 else:
150 else:
151 if stat.S_ISLNK(st.st_mode):
151 if stat.S_ISLNK(st.st_mode):
152 raise util.Abort(
152 raise util.Abort(
153 _('path %r traverses symbolic link %r')
153 _('path %r traverses symbolic link %r')
154 % (path, prefix))
154 % (path, prefix))
155 elif (stat.S_ISDIR(st.st_mode) and
155 elif (stat.S_ISDIR(st.st_mode) and
156 os.path.isdir(os.path.join(curpath, '.hg'))):
156 os.path.isdir(os.path.join(curpath, '.hg'))):
157 if not self.callback or not self.callback(curpath):
157 if not self.callback or not self.callback(curpath):
158 raise util.Abort(_("path '%s' is inside nested "
158 raise util.Abort(_("path '%s' is inside nested "
159 "repo %r")
159 "repo %r")
160 % (path, prefix))
160 % (path, prefix))
161 prefixes.append(normprefix)
161 prefixes.append(normprefix)
162 parts.pop()
162 parts.pop()
163 normparts.pop()
163 normparts.pop()
164
164
165 self.audited.add(normpath)
165 self.audited.add(normpath)
166 # only add prefixes to the cache after checking everything: we don't
166 # only add prefixes to the cache after checking everything: we don't
167 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
167 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
168 self.auditeddir.update(prefixes)
168 self.auditeddir.update(prefixes)
169
169
170 class abstractvfs(object):
170 class abstractvfs(object):
171 """Abstract base class; cannot be instantiated"""
171 """Abstract base class; cannot be instantiated"""
172
172
173 def __init__(self, *args, **kwargs):
173 def __init__(self, *args, **kwargs):
174 '''Prevent instantiation; don't call this from subclasses.'''
174 '''Prevent instantiation; don't call this from subclasses.'''
175 raise NotImplementedError('attempted instantiating ' + str(type(self)))
175 raise NotImplementedError('attempted instantiating ' + str(type(self)))
176
176
177 def tryread(self, path):
177 def tryread(self, path):
178 '''gracefully return an empty string for missing files'''
178 '''gracefully return an empty string for missing files'''
179 try:
179 try:
180 return self.read(path)
180 return self.read(path)
181 except IOError, inst:
181 except IOError, inst:
182 if inst.errno != errno.ENOENT:
182 if inst.errno != errno.ENOENT:
183 raise
183 raise
184 return ""
184 return ""
185
185
186 def read(self, path):
186 def read(self, path):
187 fp = self(path, 'rb')
187 fp = self(path, 'rb')
188 try:
188 try:
189 return fp.read()
189 return fp.read()
190 finally:
190 finally:
191 fp.close()
191 fp.close()
192
192
193 def write(self, path, data):
193 def write(self, path, data):
194 fp = self(path, 'wb')
194 fp = self(path, 'wb')
195 try:
195 try:
196 return fp.write(data)
196 return fp.write(data)
197 finally:
197 finally:
198 fp.close()
198 fp.close()
199
199
200 def append(self, path, data):
200 def append(self, path, data):
201 fp = self(path, 'ab')
201 fp = self(path, 'ab')
202 try:
202 try:
203 return fp.write(data)
203 return fp.write(data)
204 finally:
204 finally:
205 fp.close()
205 fp.close()
206
206
207 def exists(self, path=None):
207 def exists(self, path=None):
208 return os.path.exists(self.join(path))
208 return os.path.exists(self.join(path))
209
209
210 def isdir(self, path=None):
210 def isdir(self, path=None):
211 return os.path.isdir(self.join(path))
211 return os.path.isdir(self.join(path))
212
212
213 def makedir(self, path=None, notindexed=True):
213 def makedir(self, path=None, notindexed=True):
214 return util.makedir(self.join(path), notindexed)
214 return util.makedir(self.join(path), notindexed)
215
215
216 def makedirs(self, path=None, mode=None):
216 def makedirs(self, path=None, mode=None):
217 return util.makedirs(self.join(path), mode)
217 return util.makedirs(self.join(path), mode)
218
218
219 def mkdir(self, path=None):
219 def mkdir(self, path=None):
220 return os.mkdir(self.join(path))
220 return os.mkdir(self.join(path))
221
221
222 def readdir(self, path=None, stat=None, skip=None):
223 return osutil.listdir(self.join(path), stat, skip)
224
222 def stat(self, path=None):
225 def stat(self, path=None):
223 return os.stat(self.join(path))
226 return os.stat(self.join(path))
224
227
225 class vfs(abstractvfs):
228 class vfs(abstractvfs):
226 '''Operate files relative to a base directory
229 '''Operate files relative to a base directory
227
230
228 This class is used to hide the details of COW semantics and
231 This class is used to hide the details of COW semantics and
229 remote file access from higher level code.
232 remote file access from higher level code.
230 '''
233 '''
231 def __init__(self, base, audit=True, expand=False):
234 def __init__(self, base, audit=True, expand=False):
232 if expand:
235 if expand:
233 base = os.path.realpath(util.expandpath(base))
236 base = os.path.realpath(util.expandpath(base))
234 self.base = base
237 self.base = base
235 self._setmustaudit(audit)
238 self._setmustaudit(audit)
236 self.createmode = None
239 self.createmode = None
237 self._trustnlink = None
240 self._trustnlink = None
238
241
239 def _getmustaudit(self):
242 def _getmustaudit(self):
240 return self._audit
243 return self._audit
241
244
242 def _setmustaudit(self, onoff):
245 def _setmustaudit(self, onoff):
243 self._audit = onoff
246 self._audit = onoff
244 if onoff:
247 if onoff:
245 self.auditor = pathauditor(self.base)
248 self.auditor = pathauditor(self.base)
246 else:
249 else:
247 self.auditor = util.always
250 self.auditor = util.always
248
251
249 mustaudit = property(_getmustaudit, _setmustaudit)
252 mustaudit = property(_getmustaudit, _setmustaudit)
250
253
251 @util.propertycache
254 @util.propertycache
252 def _cansymlink(self):
255 def _cansymlink(self):
253 return util.checklink(self.base)
256 return util.checklink(self.base)
254
257
255 def _fixfilemode(self, name):
258 def _fixfilemode(self, name):
256 if self.createmode is None:
259 if self.createmode is None:
257 return
260 return
258 os.chmod(name, self.createmode & 0666)
261 os.chmod(name, self.createmode & 0666)
259
262
260 def __call__(self, path, mode="r", text=False, atomictemp=False):
263 def __call__(self, path, mode="r", text=False, atomictemp=False):
261 if self._audit:
264 if self._audit:
262 r = util.checkosfilename(path)
265 r = util.checkosfilename(path)
263 if r:
266 if r:
264 raise util.Abort("%s: %r" % (r, path))
267 raise util.Abort("%s: %r" % (r, path))
265 self.auditor(path)
268 self.auditor(path)
266 f = self.join(path)
269 f = self.join(path)
267
270
268 if not text and "b" not in mode:
271 if not text and "b" not in mode:
269 mode += "b" # for that other OS
272 mode += "b" # for that other OS
270
273
271 nlink = -1
274 nlink = -1
272 dirname, basename = util.split(f)
275 dirname, basename = util.split(f)
273 # If basename is empty, then the path is malformed because it points
276 # If basename is empty, then the path is malformed because it points
274 # to a directory. Let the posixfile() call below raise IOError.
277 # to a directory. Let the posixfile() call below raise IOError.
275 if basename and mode not in ('r', 'rb'):
278 if basename and mode not in ('r', 'rb'):
276 if atomictemp:
279 if atomictemp:
277 if not os.path.isdir(dirname):
280 if not os.path.isdir(dirname):
278 util.makedirs(dirname, self.createmode)
281 util.makedirs(dirname, self.createmode)
279 return util.atomictempfile(f, mode, self.createmode)
282 return util.atomictempfile(f, mode, self.createmode)
280 try:
283 try:
281 if 'w' in mode:
284 if 'w' in mode:
282 util.unlink(f)
285 util.unlink(f)
283 nlink = 0
286 nlink = 0
284 else:
287 else:
285 # nlinks() may behave differently for files on Windows
288 # nlinks() may behave differently for files on Windows
286 # shares if the file is open.
289 # shares if the file is open.
287 fd = util.posixfile(f)
290 fd = util.posixfile(f)
288 nlink = util.nlinks(f)
291 nlink = util.nlinks(f)
289 if nlink < 1:
292 if nlink < 1:
290 nlink = 2 # force mktempcopy (issue1922)
293 nlink = 2 # force mktempcopy (issue1922)
291 fd.close()
294 fd.close()
292 except (OSError, IOError), e:
295 except (OSError, IOError), e:
293 if e.errno != errno.ENOENT:
296 if e.errno != errno.ENOENT:
294 raise
297 raise
295 nlink = 0
298 nlink = 0
296 if not os.path.isdir(dirname):
299 if not os.path.isdir(dirname):
297 util.makedirs(dirname, self.createmode)
300 util.makedirs(dirname, self.createmode)
298 if nlink > 0:
301 if nlink > 0:
299 if self._trustnlink is None:
302 if self._trustnlink is None:
300 self._trustnlink = nlink > 1 or util.checknlink(f)
303 self._trustnlink = nlink > 1 or util.checknlink(f)
301 if nlink > 1 or not self._trustnlink:
304 if nlink > 1 or not self._trustnlink:
302 util.rename(util.mktempcopy(f), f)
305 util.rename(util.mktempcopy(f), f)
303 fp = util.posixfile(f, mode)
306 fp = util.posixfile(f, mode)
304 if nlink == 0:
307 if nlink == 0:
305 self._fixfilemode(f)
308 self._fixfilemode(f)
306 return fp
309 return fp
307
310
308 def symlink(self, src, dst):
311 def symlink(self, src, dst):
309 self.auditor(dst)
312 self.auditor(dst)
310 linkname = self.join(dst)
313 linkname = self.join(dst)
311 try:
314 try:
312 os.unlink(linkname)
315 os.unlink(linkname)
313 except OSError:
316 except OSError:
314 pass
317 pass
315
318
316 dirname = os.path.dirname(linkname)
319 dirname = os.path.dirname(linkname)
317 if not os.path.exists(dirname):
320 if not os.path.exists(dirname):
318 util.makedirs(dirname, self.createmode)
321 util.makedirs(dirname, self.createmode)
319
322
320 if self._cansymlink:
323 if self._cansymlink:
321 try:
324 try:
322 os.symlink(src, linkname)
325 os.symlink(src, linkname)
323 except OSError, err:
326 except OSError, err:
324 raise OSError(err.errno, _('could not symlink to %r: %s') %
327 raise OSError(err.errno, _('could not symlink to %r: %s') %
325 (src, err.strerror), linkname)
328 (src, err.strerror), linkname)
326 else:
329 else:
327 f = self(dst, "w")
330 f = self(dst, "w")
328 f.write(src)
331 f.write(src)
329 f.close()
332 f.close()
330 self._fixfilemode(dst)
333 self._fixfilemode(dst)
331
334
332 def audit(self, path):
335 def audit(self, path):
333 self.auditor(path)
336 self.auditor(path)
334
337
335 def join(self, path):
338 def join(self, path):
336 if path:
339 if path:
337 return os.path.join(self.base, path)
340 return os.path.join(self.base, path)
338 else:
341 else:
339 return self.base
342 return self.base
340
343
341 opener = vfs
344 opener = vfs
342
345
343 class filtervfs(abstractvfs):
346 class filtervfs(abstractvfs):
344 '''Wrapper vfs for filtering filenames with a function.'''
347 '''Wrapper vfs for filtering filenames with a function.'''
345
348
346 def __init__(self, opener, filter):
349 def __init__(self, opener, filter):
347 self._filter = filter
350 self._filter = filter
348 self._orig = opener
351 self._orig = opener
349
352
350 def __call__(self, path, *args, **kwargs):
353 def __call__(self, path, *args, **kwargs):
351 return self._orig(self._filter(path), *args, **kwargs)
354 return self._orig(self._filter(path), *args, **kwargs)
352
355
353 def join(self, path):
356 def join(self, path):
354 if path:
357 if path:
355 return self._orig.join(self._filter(path))
358 return self._orig.join(self._filter(path))
356 else:
359 else:
357 return self._orig.join(path)
360 return self._orig.join(path)
358
361
359 filteropener = filtervfs
362 filteropener = filtervfs
360
363
361 def canonpath(root, cwd, myname, auditor=None):
364 def canonpath(root, cwd, myname, auditor=None):
362 '''return the canonical path of myname, given cwd and root'''
365 '''return the canonical path of myname, given cwd and root'''
363 if util.endswithsep(root):
366 if util.endswithsep(root):
364 rootsep = root
367 rootsep = root
365 else:
368 else:
366 rootsep = root + os.sep
369 rootsep = root + os.sep
367 name = myname
370 name = myname
368 if not os.path.isabs(name):
371 if not os.path.isabs(name):
369 name = os.path.join(root, cwd, name)
372 name = os.path.join(root, cwd, name)
370 name = os.path.normpath(name)
373 name = os.path.normpath(name)
371 if auditor is None:
374 if auditor is None:
372 auditor = pathauditor(root)
375 auditor = pathauditor(root)
373 if name != rootsep and name.startswith(rootsep):
376 if name != rootsep and name.startswith(rootsep):
374 name = name[len(rootsep):]
377 name = name[len(rootsep):]
375 auditor(name)
378 auditor(name)
376 return util.pconvert(name)
379 return util.pconvert(name)
377 elif name == root:
380 elif name == root:
378 return ''
381 return ''
379 else:
382 else:
380 # Determine whether `name' is in the hierarchy at or beneath `root',
383 # Determine whether `name' is in the hierarchy at or beneath `root',
381 # by iterating name=dirname(name) until that causes no change (can't
384 # by iterating name=dirname(name) until that causes no change (can't
382 # check name == '/', because that doesn't work on windows). The list
385 # check name == '/', because that doesn't work on windows). The list
383 # `rel' holds the reversed list of components making up the relative
386 # `rel' holds the reversed list of components making up the relative
384 # file name we want.
387 # file name we want.
385 rel = []
388 rel = []
386 while True:
389 while True:
387 try:
390 try:
388 s = util.samefile(name, root)
391 s = util.samefile(name, root)
389 except OSError:
392 except OSError:
390 s = False
393 s = False
391 if s:
394 if s:
392 if not rel:
395 if not rel:
393 # name was actually the same as root (maybe a symlink)
396 # name was actually the same as root (maybe a symlink)
394 return ''
397 return ''
395 rel.reverse()
398 rel.reverse()
396 name = os.path.join(*rel)
399 name = os.path.join(*rel)
397 auditor(name)
400 auditor(name)
398 return util.pconvert(name)
401 return util.pconvert(name)
399 dirname, basename = util.split(name)
402 dirname, basename = util.split(name)
400 rel.append(basename)
403 rel.append(basename)
401 if dirname == name:
404 if dirname == name:
402 break
405 break
403 name = dirname
406 name = dirname
404
407
405 raise util.Abort('%s not under root' % myname)
408 raise util.Abort('%s not under root' % myname)
406
409
407 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
410 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
408 '''yield every hg repository under path, always recursively.
411 '''yield every hg repository under path, always recursively.
409 The recurse flag will only control recursion into repo working dirs'''
412 The recurse flag will only control recursion into repo working dirs'''
410 def errhandler(err):
413 def errhandler(err):
411 if err.filename == path:
414 if err.filename == path:
412 raise err
415 raise err
413 samestat = getattr(os.path, 'samestat', None)
416 samestat = getattr(os.path, 'samestat', None)
414 if followsym and samestat is not None:
417 if followsym and samestat is not None:
415 def adddir(dirlst, dirname):
418 def adddir(dirlst, dirname):
416 match = False
419 match = False
417 dirstat = os.stat(dirname)
420 dirstat = os.stat(dirname)
418 for lstdirstat in dirlst:
421 for lstdirstat in dirlst:
419 if samestat(dirstat, lstdirstat):
422 if samestat(dirstat, lstdirstat):
420 match = True
423 match = True
421 break
424 break
422 if not match:
425 if not match:
423 dirlst.append(dirstat)
426 dirlst.append(dirstat)
424 return not match
427 return not match
425 else:
428 else:
426 followsym = False
429 followsym = False
427
430
428 if (seen_dirs is None) and followsym:
431 if (seen_dirs is None) and followsym:
429 seen_dirs = []
432 seen_dirs = []
430 adddir(seen_dirs, path)
433 adddir(seen_dirs, path)
431 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
434 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
432 dirs.sort()
435 dirs.sort()
433 if '.hg' in dirs:
436 if '.hg' in dirs:
434 yield root # found a repository
437 yield root # found a repository
435 qroot = os.path.join(root, '.hg', 'patches')
438 qroot = os.path.join(root, '.hg', 'patches')
436 if os.path.isdir(os.path.join(qroot, '.hg')):
439 if os.path.isdir(os.path.join(qroot, '.hg')):
437 yield qroot # we have a patch queue repo here
440 yield qroot # we have a patch queue repo here
438 if recurse:
441 if recurse:
439 # avoid recursing inside the .hg directory
442 # avoid recursing inside the .hg directory
440 dirs.remove('.hg')
443 dirs.remove('.hg')
441 else:
444 else:
442 dirs[:] = [] # don't descend further
445 dirs[:] = [] # don't descend further
443 elif followsym:
446 elif followsym:
444 newdirs = []
447 newdirs = []
445 for d in dirs:
448 for d in dirs:
446 fname = os.path.join(root, d)
449 fname = os.path.join(root, d)
447 if adddir(seen_dirs, fname):
450 if adddir(seen_dirs, fname):
448 if os.path.islink(fname):
451 if os.path.islink(fname):
449 for hgname in walkrepos(fname, True, seen_dirs):
452 for hgname in walkrepos(fname, True, seen_dirs):
450 yield hgname
453 yield hgname
451 else:
454 else:
452 newdirs.append(d)
455 newdirs.append(d)
453 dirs[:] = newdirs
456 dirs[:] = newdirs
454
457
455 def osrcpath():
458 def osrcpath():
456 '''return default os-specific hgrc search path'''
459 '''return default os-specific hgrc search path'''
457 path = systemrcpath()
460 path = systemrcpath()
458 path.extend(userrcpath())
461 path.extend(userrcpath())
459 path = [os.path.normpath(f) for f in path]
462 path = [os.path.normpath(f) for f in path]
460 return path
463 return path
461
464
462 _rcpath = None
465 _rcpath = None
463
466
464 def rcpath():
467 def rcpath():
465 '''return hgrc search path. if env var HGRCPATH is set, use it.
468 '''return hgrc search path. if env var HGRCPATH is set, use it.
466 for each item in path, if directory, use files ending in .rc,
469 for each item in path, if directory, use files ending in .rc,
467 else use item.
470 else use item.
468 make HGRCPATH empty to only look in .hg/hgrc of current repo.
471 make HGRCPATH empty to only look in .hg/hgrc of current repo.
469 if no HGRCPATH, use default os-specific path.'''
472 if no HGRCPATH, use default os-specific path.'''
470 global _rcpath
473 global _rcpath
471 if _rcpath is None:
474 if _rcpath is None:
472 if 'HGRCPATH' in os.environ:
475 if 'HGRCPATH' in os.environ:
473 _rcpath = []
476 _rcpath = []
474 for p in os.environ['HGRCPATH'].split(os.pathsep):
477 for p in os.environ['HGRCPATH'].split(os.pathsep):
475 if not p:
478 if not p:
476 continue
479 continue
477 p = util.expandpath(p)
480 p = util.expandpath(p)
478 if os.path.isdir(p):
481 if os.path.isdir(p):
479 for f, kind in osutil.listdir(p):
482 for f, kind in osutil.listdir(p):
480 if f.endswith('.rc'):
483 if f.endswith('.rc'):
481 _rcpath.append(os.path.join(p, f))
484 _rcpath.append(os.path.join(p, f))
482 else:
485 else:
483 _rcpath.append(p)
486 _rcpath.append(p)
484 else:
487 else:
485 _rcpath = osrcpath()
488 _rcpath = osrcpath()
486 return _rcpath
489 return _rcpath
487
490
488 if os.name != 'nt':
491 if os.name != 'nt':
489
492
490 def rcfiles(path):
493 def rcfiles(path):
491 rcs = [os.path.join(path, 'hgrc')]
494 rcs = [os.path.join(path, 'hgrc')]
492 rcdir = os.path.join(path, 'hgrc.d')
495 rcdir = os.path.join(path, 'hgrc.d')
493 try:
496 try:
494 rcs.extend([os.path.join(rcdir, f)
497 rcs.extend([os.path.join(rcdir, f)
495 for f, kind in osutil.listdir(rcdir)
498 for f, kind in osutil.listdir(rcdir)
496 if f.endswith(".rc")])
499 if f.endswith(".rc")])
497 except OSError:
500 except OSError:
498 pass
501 pass
499 return rcs
502 return rcs
500
503
501 def systemrcpath():
504 def systemrcpath():
502 path = []
505 path = []
503 if sys.platform == 'plan9':
506 if sys.platform == 'plan9':
504 root = 'lib/mercurial'
507 root = 'lib/mercurial'
505 else:
508 else:
506 root = 'etc/mercurial'
509 root = 'etc/mercurial'
507 # old mod_python does not set sys.argv
510 # old mod_python does not set sys.argv
508 if len(getattr(sys, 'argv', [])) > 0:
511 if len(getattr(sys, 'argv', [])) > 0:
509 p = os.path.dirname(os.path.dirname(sys.argv[0]))
512 p = os.path.dirname(os.path.dirname(sys.argv[0]))
510 path.extend(rcfiles(os.path.join(p, root)))
513 path.extend(rcfiles(os.path.join(p, root)))
511 path.extend(rcfiles('/' + root))
514 path.extend(rcfiles('/' + root))
512 return path
515 return path
513
516
514 def userrcpath():
517 def userrcpath():
515 if sys.platform == 'plan9':
518 if sys.platform == 'plan9':
516 return [os.environ['home'] + '/lib/hgrc']
519 return [os.environ['home'] + '/lib/hgrc']
517 else:
520 else:
518 return [os.path.expanduser('~/.hgrc')]
521 return [os.path.expanduser('~/.hgrc')]
519
522
520 else:
523 else:
521
524
522 import _winreg
525 import _winreg
523
526
524 def systemrcpath():
527 def systemrcpath():
525 '''return default os-specific hgrc search path'''
528 '''return default os-specific hgrc search path'''
526 rcpath = []
529 rcpath = []
527 filename = util.executablepath()
530 filename = util.executablepath()
528 # Use mercurial.ini found in directory with hg.exe
531 # Use mercurial.ini found in directory with hg.exe
529 progrc = os.path.join(os.path.dirname(filename), 'mercurial.ini')
532 progrc = os.path.join(os.path.dirname(filename), 'mercurial.ini')
530 if os.path.isfile(progrc):
533 if os.path.isfile(progrc):
531 rcpath.append(progrc)
534 rcpath.append(progrc)
532 return rcpath
535 return rcpath
533 # Use hgrc.d found in directory with hg.exe
536 # Use hgrc.d found in directory with hg.exe
534 progrcd = os.path.join(os.path.dirname(filename), 'hgrc.d')
537 progrcd = os.path.join(os.path.dirname(filename), 'hgrc.d')
535 if os.path.isdir(progrcd):
538 if os.path.isdir(progrcd):
536 for f, kind in osutil.listdir(progrcd):
539 for f, kind in osutil.listdir(progrcd):
537 if f.endswith('.rc'):
540 if f.endswith('.rc'):
538 rcpath.append(os.path.join(progrcd, f))
541 rcpath.append(os.path.join(progrcd, f))
539 return rcpath
542 return rcpath
540 # else look for a system rcpath in the registry
543 # else look for a system rcpath in the registry
541 value = util.lookupreg('SOFTWARE\\Mercurial', None,
544 value = util.lookupreg('SOFTWARE\\Mercurial', None,
542 _winreg.HKEY_LOCAL_MACHINE)
545 _winreg.HKEY_LOCAL_MACHINE)
543 if not isinstance(value, str) or not value:
546 if not isinstance(value, str) or not value:
544 return rcpath
547 return rcpath
545 value = util.localpath(value)
548 value = util.localpath(value)
546 for p in value.split(os.pathsep):
549 for p in value.split(os.pathsep):
547 if p.lower().endswith('mercurial.ini'):
550 if p.lower().endswith('mercurial.ini'):
548 rcpath.append(p)
551 rcpath.append(p)
549 elif os.path.isdir(p):
552 elif os.path.isdir(p):
550 for f, kind in osutil.listdir(p):
553 for f, kind in osutil.listdir(p):
551 if f.endswith('.rc'):
554 if f.endswith('.rc'):
552 rcpath.append(os.path.join(p, f))
555 rcpath.append(os.path.join(p, f))
553 return rcpath
556 return rcpath
554
557
555 def userrcpath():
558 def userrcpath():
556 '''return os-specific hgrc search path to the user dir'''
559 '''return os-specific hgrc search path to the user dir'''
557 home = os.path.expanduser('~')
560 home = os.path.expanduser('~')
558 path = [os.path.join(home, 'mercurial.ini'),
561 path = [os.path.join(home, 'mercurial.ini'),
559 os.path.join(home, '.hgrc')]
562 os.path.join(home, '.hgrc')]
560 userprofile = os.environ.get('USERPROFILE')
563 userprofile = os.environ.get('USERPROFILE')
561 if userprofile:
564 if userprofile:
562 path.append(os.path.join(userprofile, 'mercurial.ini'))
565 path.append(os.path.join(userprofile, 'mercurial.ini'))
563 path.append(os.path.join(userprofile, '.hgrc'))
566 path.append(os.path.join(userprofile, '.hgrc'))
564 return path
567 return path
565
568
566 def revsingle(repo, revspec, default='.'):
569 def revsingle(repo, revspec, default='.'):
567 if not revspec:
570 if not revspec:
568 return repo[default]
571 return repo[default]
569
572
570 l = revrange(repo, [revspec])
573 l = revrange(repo, [revspec])
571 if len(l) < 1:
574 if len(l) < 1:
572 raise util.Abort(_('empty revision set'))
575 raise util.Abort(_('empty revision set'))
573 return repo[l[-1]]
576 return repo[l[-1]]
574
577
575 def revpair(repo, revs):
578 def revpair(repo, revs):
576 if not revs:
579 if not revs:
577 return repo.dirstate.p1(), None
580 return repo.dirstate.p1(), None
578
581
579 l = revrange(repo, revs)
582 l = revrange(repo, revs)
580
583
581 if len(l) == 0:
584 if len(l) == 0:
582 if revs:
585 if revs:
583 raise util.Abort(_('empty revision range'))
586 raise util.Abort(_('empty revision range'))
584 return repo.dirstate.p1(), None
587 return repo.dirstate.p1(), None
585
588
586 if len(l) == 1 and len(revs) == 1 and _revrangesep not in revs[0]:
589 if len(l) == 1 and len(revs) == 1 and _revrangesep not in revs[0]:
587 return repo.lookup(l[0]), None
590 return repo.lookup(l[0]), None
588
591
589 return repo.lookup(l[0]), repo.lookup(l[-1])
592 return repo.lookup(l[0]), repo.lookup(l[-1])
590
593
591 _revrangesep = ':'
594 _revrangesep = ':'
592
595
593 def revrange(repo, revs):
596 def revrange(repo, revs):
594 """Yield revision as strings from a list of revision specifications."""
597 """Yield revision as strings from a list of revision specifications."""
595
598
596 def revfix(repo, val, defval):
599 def revfix(repo, val, defval):
597 if not val and val != 0 and defval is not None:
600 if not val and val != 0 and defval is not None:
598 return defval
601 return defval
599 return repo[val].rev()
602 return repo[val].rev()
600
603
601 seen, l = set(), []
604 seen, l = set(), []
602 for spec in revs:
605 for spec in revs:
603 if l and not seen:
606 if l and not seen:
604 seen = set(l)
607 seen = set(l)
605 # attempt to parse old-style ranges first to deal with
608 # attempt to parse old-style ranges first to deal with
606 # things like old-tag which contain query metacharacters
609 # things like old-tag which contain query metacharacters
607 try:
610 try:
608 if isinstance(spec, int):
611 if isinstance(spec, int):
609 seen.add(spec)
612 seen.add(spec)
610 l.append(spec)
613 l.append(spec)
611 continue
614 continue
612
615
613 if _revrangesep in spec:
616 if _revrangesep in spec:
614 start, end = spec.split(_revrangesep, 1)
617 start, end = spec.split(_revrangesep, 1)
615 start = revfix(repo, start, 0)
618 start = revfix(repo, start, 0)
616 end = revfix(repo, end, len(repo) - 1)
619 end = revfix(repo, end, len(repo) - 1)
617 step = start > end and -1 or 1
620 step = start > end and -1 or 1
618 if not seen and not l:
621 if not seen and not l:
619 # by far the most common case: revs = ["-1:0"]
622 # by far the most common case: revs = ["-1:0"]
620 l = range(start, end + step, step)
623 l = range(start, end + step, step)
621 # defer syncing seen until next iteration
624 # defer syncing seen until next iteration
622 continue
625 continue
623 newrevs = set(xrange(start, end + step, step))
626 newrevs = set(xrange(start, end + step, step))
624 if seen:
627 if seen:
625 newrevs.difference_update(seen)
628 newrevs.difference_update(seen)
626 seen.update(newrevs)
629 seen.update(newrevs)
627 else:
630 else:
628 seen = newrevs
631 seen = newrevs
629 l.extend(sorted(newrevs, reverse=start > end))
632 l.extend(sorted(newrevs, reverse=start > end))
630 continue
633 continue
631 elif spec and spec in repo: # single unquoted rev
634 elif spec and spec in repo: # single unquoted rev
632 rev = revfix(repo, spec, None)
635 rev = revfix(repo, spec, None)
633 if rev in seen:
636 if rev in seen:
634 continue
637 continue
635 seen.add(rev)
638 seen.add(rev)
636 l.append(rev)
639 l.append(rev)
637 continue
640 continue
638 except error.RepoLookupError:
641 except error.RepoLookupError:
639 pass
642 pass
640
643
641 # fall through to new-style queries if old-style fails
644 # fall through to new-style queries if old-style fails
642 m = revset.match(repo.ui, spec)
645 m = revset.match(repo.ui, spec)
643 dl = [r for r in m(repo, list(repo)) if r not in seen]
646 dl = [r for r in m(repo, list(repo)) if r not in seen]
644 l.extend(dl)
647 l.extend(dl)
645 seen.update(dl)
648 seen.update(dl)
646
649
647 return l
650 return l
648
651
649 def expandpats(pats):
652 def expandpats(pats):
650 if not util.expandglobs:
653 if not util.expandglobs:
651 return list(pats)
654 return list(pats)
652 ret = []
655 ret = []
653 for p in pats:
656 for p in pats:
654 kind, name = matchmod._patsplit(p, None)
657 kind, name = matchmod._patsplit(p, None)
655 if kind is None:
658 if kind is None:
656 try:
659 try:
657 globbed = glob.glob(name)
660 globbed = glob.glob(name)
658 except re.error:
661 except re.error:
659 globbed = [name]
662 globbed = [name]
660 if globbed:
663 if globbed:
661 ret.extend(globbed)
664 ret.extend(globbed)
662 continue
665 continue
663 ret.append(p)
666 ret.append(p)
664 return ret
667 return ret
665
668
666 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
669 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
667 if pats == ("",):
670 if pats == ("",):
668 pats = []
671 pats = []
669 if not globbed and default == 'relpath':
672 if not globbed and default == 'relpath':
670 pats = expandpats(pats or [])
673 pats = expandpats(pats or [])
671
674
672 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
675 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
673 default)
676 default)
674 def badfn(f, msg):
677 def badfn(f, msg):
675 ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
678 ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
676 m.bad = badfn
679 m.bad = badfn
677 return m, pats
680 return m, pats
678
681
679 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
682 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
680 return matchandpats(ctx, pats, opts, globbed, default)[0]
683 return matchandpats(ctx, pats, opts, globbed, default)[0]
681
684
682 def matchall(repo):
685 def matchall(repo):
683 return matchmod.always(repo.root, repo.getcwd())
686 return matchmod.always(repo.root, repo.getcwd())
684
687
685 def matchfiles(repo, files):
688 def matchfiles(repo, files):
686 return matchmod.exact(repo.root, repo.getcwd(), files)
689 return matchmod.exact(repo.root, repo.getcwd(), files)
687
690
688 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
691 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
689 if dry_run is None:
692 if dry_run is None:
690 dry_run = opts.get('dry_run')
693 dry_run = opts.get('dry_run')
691 if similarity is None:
694 if similarity is None:
692 similarity = float(opts.get('similarity') or 0)
695 similarity = float(opts.get('similarity') or 0)
693 # we'd use status here, except handling of symlinks and ignore is tricky
696 # we'd use status here, except handling of symlinks and ignore is tricky
694 added, unknown, deleted, removed = [], [], [], []
697 added, unknown, deleted, removed = [], [], [], []
695 audit_path = pathauditor(repo.root)
698 audit_path = pathauditor(repo.root)
696 m = match(repo[None], pats, opts)
699 m = match(repo[None], pats, opts)
697 rejected = []
700 rejected = []
698 m.bad = lambda x, y: rejected.append(x)
701 m.bad = lambda x, y: rejected.append(x)
699
702
700 for abs in repo.walk(m):
703 for abs in repo.walk(m):
701 target = repo.wjoin(abs)
704 target = repo.wjoin(abs)
702 good = True
705 good = True
703 try:
706 try:
704 audit_path(abs)
707 audit_path(abs)
705 except (OSError, util.Abort):
708 except (OSError, util.Abort):
706 good = False
709 good = False
707 rel = m.rel(abs)
710 rel = m.rel(abs)
708 exact = m.exact(abs)
711 exact = m.exact(abs)
709 if good and abs not in repo.dirstate:
712 if good and abs not in repo.dirstate:
710 unknown.append(abs)
713 unknown.append(abs)
711 if repo.ui.verbose or not exact:
714 if repo.ui.verbose or not exact:
712 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
715 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
713 elif (repo.dirstate[abs] != 'r' and
716 elif (repo.dirstate[abs] != 'r' and
714 (not good or not os.path.lexists(target) or
717 (not good or not os.path.lexists(target) or
715 (os.path.isdir(target) and not os.path.islink(target)))):
718 (os.path.isdir(target) and not os.path.islink(target)))):
716 deleted.append(abs)
719 deleted.append(abs)
717 if repo.ui.verbose or not exact:
720 if repo.ui.verbose or not exact:
718 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
721 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
719 # for finding renames
722 # for finding renames
720 elif repo.dirstate[abs] == 'r':
723 elif repo.dirstate[abs] == 'r':
721 removed.append(abs)
724 removed.append(abs)
722 elif repo.dirstate[abs] == 'a':
725 elif repo.dirstate[abs] == 'a':
723 added.append(abs)
726 added.append(abs)
724 copies = {}
727 copies = {}
725 if similarity > 0:
728 if similarity > 0:
726 for old, new, score in similar.findrenames(repo,
729 for old, new, score in similar.findrenames(repo,
727 added + unknown, removed + deleted, similarity):
730 added + unknown, removed + deleted, similarity):
728 if repo.ui.verbose or not m.exact(old) or not m.exact(new):
731 if repo.ui.verbose or not m.exact(old) or not m.exact(new):
729 repo.ui.status(_('recording removal of %s as rename to %s '
732 repo.ui.status(_('recording removal of %s as rename to %s '
730 '(%d%% similar)\n') %
733 '(%d%% similar)\n') %
731 (m.rel(old), m.rel(new), score * 100))
734 (m.rel(old), m.rel(new), score * 100))
732 copies[new] = old
735 copies[new] = old
733
736
734 if not dry_run:
737 if not dry_run:
735 wctx = repo[None]
738 wctx = repo[None]
736 wlock = repo.wlock()
739 wlock = repo.wlock()
737 try:
740 try:
738 wctx.forget(deleted)
741 wctx.forget(deleted)
739 wctx.add(unknown)
742 wctx.add(unknown)
740 for new, old in copies.iteritems():
743 for new, old in copies.iteritems():
741 wctx.copy(old, new)
744 wctx.copy(old, new)
742 finally:
745 finally:
743 wlock.release()
746 wlock.release()
744
747
745 for f in rejected:
748 for f in rejected:
746 if f in m.files():
749 if f in m.files():
747 return 1
750 return 1
748 return 0
751 return 0
749
752
750 def updatedir(ui, repo, patches, similarity=0):
753 def updatedir(ui, repo, patches, similarity=0):
751 '''Update dirstate after patch application according to metadata'''
754 '''Update dirstate after patch application according to metadata'''
752 if not patches:
755 if not patches:
753 return []
756 return []
754 copies = []
757 copies = []
755 removes = set()
758 removes = set()
756 cfiles = patches.keys()
759 cfiles = patches.keys()
757 cwd = repo.getcwd()
760 cwd = repo.getcwd()
758 if cwd:
761 if cwd:
759 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
762 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
760 for f in patches:
763 for f in patches:
761 gp = patches[f]
764 gp = patches[f]
762 if not gp:
765 if not gp:
763 continue
766 continue
764 if gp.op == 'RENAME':
767 if gp.op == 'RENAME':
765 copies.append((gp.oldpath, gp.path))
768 copies.append((gp.oldpath, gp.path))
766 removes.add(gp.oldpath)
769 removes.add(gp.oldpath)
767 elif gp.op == 'COPY':
770 elif gp.op == 'COPY':
768 copies.append((gp.oldpath, gp.path))
771 copies.append((gp.oldpath, gp.path))
769 elif gp.op == 'DELETE':
772 elif gp.op == 'DELETE':
770 removes.add(gp.path)
773 removes.add(gp.path)
771
774
772 wctx = repo[None]
775 wctx = repo[None]
773 for src, dst in copies:
776 for src, dst in copies:
774 dirstatecopy(ui, repo, wctx, src, dst, cwd=cwd)
777 dirstatecopy(ui, repo, wctx, src, dst, cwd=cwd)
775 if (not similarity) and removes:
778 if (not similarity) and removes:
776 wctx.remove(sorted(removes), True)
779 wctx.remove(sorted(removes), True)
777
780
778 for f in patches:
781 for f in patches:
779 gp = patches[f]
782 gp = patches[f]
780 if gp and gp.mode:
783 if gp and gp.mode:
781 islink, isexec = gp.mode
784 islink, isexec = gp.mode
782 dst = repo.wjoin(gp.path)
785 dst = repo.wjoin(gp.path)
783 # patch won't create empty files
786 # patch won't create empty files
784 if gp.op == 'ADD' and not os.path.lexists(dst):
787 if gp.op == 'ADD' and not os.path.lexists(dst):
785 flags = (isexec and 'x' or '') + (islink and 'l' or '')
788 flags = (isexec and 'x' or '') + (islink and 'l' or '')
786 repo.wwrite(gp.path, '', flags)
789 repo.wwrite(gp.path, '', flags)
787 util.setflags(dst, islink, isexec)
790 util.setflags(dst, islink, isexec)
788 addremove(repo, cfiles, similarity=similarity)
791 addremove(repo, cfiles, similarity=similarity)
789 files = patches.keys()
792 files = patches.keys()
790 files.extend([r for r in removes if r not in files])
793 files.extend([r for r in removes if r not in files])
791 return sorted(files)
794 return sorted(files)
792
795
793 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
796 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
794 """Update the dirstate to reflect the intent of copying src to dst. For
797 """Update the dirstate to reflect the intent of copying src to dst. For
795 different reasons it might not end with dst being marked as copied from src.
798 different reasons it might not end with dst being marked as copied from src.
796 """
799 """
797 origsrc = repo.dirstate.copied(src) or src
800 origsrc = repo.dirstate.copied(src) or src
798 if dst == origsrc: # copying back a copy?
801 if dst == origsrc: # copying back a copy?
799 if repo.dirstate[dst] not in 'mn' and not dryrun:
802 if repo.dirstate[dst] not in 'mn' and not dryrun:
800 repo.dirstate.normallookup(dst)
803 repo.dirstate.normallookup(dst)
801 else:
804 else:
802 if repo.dirstate[origsrc] == 'a' and origsrc == src:
805 if repo.dirstate[origsrc] == 'a' and origsrc == src:
803 if not ui.quiet:
806 if not ui.quiet:
804 ui.warn(_("%s has not been committed yet, so no copy "
807 ui.warn(_("%s has not been committed yet, so no copy "
805 "data will be stored for %s.\n")
808 "data will be stored for %s.\n")
806 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
809 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
807 if repo.dirstate[dst] in '?r' and not dryrun:
810 if repo.dirstate[dst] in '?r' and not dryrun:
808 wctx.add([dst])
811 wctx.add([dst])
809 elif not dryrun:
812 elif not dryrun:
810 wctx.copy(origsrc, dst)
813 wctx.copy(origsrc, dst)
811
814
812 def readrequires(opener, supported):
815 def readrequires(opener, supported):
813 '''Reads and parses .hg/requires and checks if all entries found
816 '''Reads and parses .hg/requires and checks if all entries found
814 are in the list of supported features.'''
817 are in the list of supported features.'''
815 requirements = set(opener.read("requires").splitlines())
818 requirements = set(opener.read("requires").splitlines())
816 missings = []
819 missings = []
817 for r in requirements:
820 for r in requirements:
818 if r not in supported:
821 if r not in supported:
819 if not r or not r[0].isalnum():
822 if not r or not r[0].isalnum():
820 raise error.RequirementError(_(".hg/requires file is corrupt"))
823 raise error.RequirementError(_(".hg/requires file is corrupt"))
821 missings.append(r)
824 missings.append(r)
822 missings.sort()
825 missings.sort()
823 if missings:
826 if missings:
824 raise error.RequirementError(
827 raise error.RequirementError(
825 _("unknown repository format: requires features '%s' (upgrade "
828 _("unknown repository format: requires features '%s' (upgrade "
826 "Mercurial)") % "', '".join(missings))
829 "Mercurial)") % "', '".join(missings))
827 return requirements
830 return requirements
828
831
829 class filecacheentry(object):
832 class filecacheentry(object):
830 def __init__(self, path):
833 def __init__(self, path):
831 self.path = path
834 self.path = path
832 self.cachestat = filecacheentry.stat(self.path)
835 self.cachestat = filecacheentry.stat(self.path)
833
836
834 if self.cachestat:
837 if self.cachestat:
835 self._cacheable = self.cachestat.cacheable()
838 self._cacheable = self.cachestat.cacheable()
836 else:
839 else:
837 # None means we don't know yet
840 # None means we don't know yet
838 self._cacheable = None
841 self._cacheable = None
839
842
840 def refresh(self):
843 def refresh(self):
841 if self.cacheable():
844 if self.cacheable():
842 self.cachestat = filecacheentry.stat(self.path)
845 self.cachestat = filecacheentry.stat(self.path)
843
846
844 def cacheable(self):
847 def cacheable(self):
845 if self._cacheable is not None:
848 if self._cacheable is not None:
846 return self._cacheable
849 return self._cacheable
847
850
848 # we don't know yet, assume it is for now
851 # we don't know yet, assume it is for now
849 return True
852 return True
850
853
851 def changed(self):
854 def changed(self):
852 # no point in going further if we can't cache it
855 # no point in going further if we can't cache it
853 if not self.cacheable():
856 if not self.cacheable():
854 return True
857 return True
855
858
856 newstat = filecacheentry.stat(self.path)
859 newstat = filecacheentry.stat(self.path)
857
860
858 # we may not know if it's cacheable yet, check again now
861 # we may not know if it's cacheable yet, check again now
859 if newstat and self._cacheable is None:
862 if newstat and self._cacheable is None:
860 self._cacheable = newstat.cacheable()
863 self._cacheable = newstat.cacheable()
861
864
862 # check again
865 # check again
863 if not self._cacheable:
866 if not self._cacheable:
864 return True
867 return True
865
868
866 if self.cachestat != newstat:
869 if self.cachestat != newstat:
867 self.cachestat = newstat
870 self.cachestat = newstat
868 return True
871 return True
869 else:
872 else:
870 return False
873 return False
871
874
872 @staticmethod
875 @staticmethod
873 def stat(path):
876 def stat(path):
874 try:
877 try:
875 return util.cachestat(path)
878 return util.cachestat(path)
876 except OSError, e:
879 except OSError, e:
877 if e.errno != errno.ENOENT:
880 if e.errno != errno.ENOENT:
878 raise
881 raise
879
882
880 class filecache(object):
883 class filecache(object):
881 '''A property like decorator that tracks a file under .hg/ for updates.
884 '''A property like decorator that tracks a file under .hg/ for updates.
882
885
883 Records stat info when called in _filecache.
886 Records stat info when called in _filecache.
884
887
885 On subsequent calls, compares old stat info with new info, and recreates
888 On subsequent calls, compares old stat info with new info, and recreates
886 the object when needed, updating the new stat info in _filecache.
889 the object when needed, updating the new stat info in _filecache.
887
890
888 Mercurial either atomic renames or appends for files under .hg,
891 Mercurial either atomic renames or appends for files under .hg,
889 so to ensure the cache is reliable we need the filesystem to be able
892 so to ensure the cache is reliable we need the filesystem to be able
890 to tell us if a file has been replaced. If it can't, we fallback to
893 to tell us if a file has been replaced. If it can't, we fallback to
891 recreating the object on every call (essentially the same behaviour as
894 recreating the object on every call (essentially the same behaviour as
892 propertycache).'''
895 propertycache).'''
893 def __init__(self, path):
896 def __init__(self, path):
894 self.path = path
897 self.path = path
895
898
896 def join(self, obj, fname):
899 def join(self, obj, fname):
897 """Used to compute the runtime path of the cached file.
900 """Used to compute the runtime path of the cached file.
898
901
899 Users should subclass filecache and provide their own version of this
902 Users should subclass filecache and provide their own version of this
900 function to call the appropriate join function on 'obj' (an instance
903 function to call the appropriate join function on 'obj' (an instance
901 of the class that its member function was decorated).
904 of the class that its member function was decorated).
902 """
905 """
903 return obj.join(fname)
906 return obj.join(fname)
904
907
905 def __call__(self, func):
908 def __call__(self, func):
906 self.func = func
909 self.func = func
907 self.name = func.__name__
910 self.name = func.__name__
908 return self
911 return self
909
912
910 def __get__(self, obj, type=None):
913 def __get__(self, obj, type=None):
911 # do we need to check if the file changed?
914 # do we need to check if the file changed?
912 if self.name in obj.__dict__:
915 if self.name in obj.__dict__:
913 return obj.__dict__[self.name]
916 return obj.__dict__[self.name]
914
917
915 entry = obj._filecache.get(self.name)
918 entry = obj._filecache.get(self.name)
916
919
917 if entry:
920 if entry:
918 if entry.changed():
921 if entry.changed():
919 entry.obj = self.func(obj)
922 entry.obj = self.func(obj)
920 else:
923 else:
921 path = self.join(obj, self.path)
924 path = self.join(obj, self.path)
922
925
923 # We stat -before- creating the object so our cache doesn't lie if
926 # We stat -before- creating the object so our cache doesn't lie if
924 # a writer modified between the time we read and stat
927 # a writer modified between the time we read and stat
925 entry = filecacheentry(path)
928 entry = filecacheentry(path)
926 entry.obj = self.func(obj)
929 entry.obj = self.func(obj)
927
930
928 obj._filecache[self.name] = entry
931 obj._filecache[self.name] = entry
929
932
930 obj.__dict__[self.name] = entry.obj
933 obj.__dict__[self.name] = entry.obj
931 return entry.obj
934 return entry.obj
932
935
933 def __set__(self, obj, value):
936 def __set__(self, obj, value):
934 if self.name in obj._filecache:
937 if self.name in obj._filecache:
935 obj._filecache[self.name].obj = value # update cached copy
938 obj._filecache[self.name].obj = value # update cached copy
936 obj.__dict__[self.name] = value # update copy returned by obj.x
939 obj.__dict__[self.name] = value # update copy returned by obj.x
937
940
938 def __delete__(self, obj):
941 def __delete__(self, obj):
939 try:
942 try:
940 del obj.__dict__[self.name]
943 del obj.__dict__[self.name]
941 except KeyError:
944 except KeyError:
942 raise AttributeError, self.name
945 raise AttributeError, self.name
@@ -1,533 +1,534 b''
1 # store.py - repository store handling for Mercurial
1 # store.py - repository store handling for Mercurial
2 #
2 #
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 import osutil, scmutil, util, parsers
9 import scmutil, util, parsers
10 import os, stat, errno
10 import os, stat, errno
11
11
12 _sha = util.sha1
12 _sha = util.sha1
13
13
14 # This avoids a collision between a file named foo and a dir named
14 # This avoids a collision between a file named foo and a dir named
15 # foo.i or foo.d
15 # foo.i or foo.d
16 def _encodedir(path):
16 def _encodedir(path):
17 '''
17 '''
18 >>> _encodedir('data/foo.i')
18 >>> _encodedir('data/foo.i')
19 'data/foo.i'
19 'data/foo.i'
20 >>> _encodedir('data/foo.i/bla.i')
20 >>> _encodedir('data/foo.i/bla.i')
21 'data/foo.i.hg/bla.i'
21 'data/foo.i.hg/bla.i'
22 >>> _encodedir('data/foo.i.hg/bla.i')
22 >>> _encodedir('data/foo.i.hg/bla.i')
23 'data/foo.i.hg.hg/bla.i'
23 'data/foo.i.hg.hg/bla.i'
24 >>> _encodedir('data/foo.i\\ndata/foo.i/bla.i\\ndata/foo.i.hg/bla.i\\n')
24 >>> _encodedir('data/foo.i\\ndata/foo.i/bla.i\\ndata/foo.i.hg/bla.i\\n')
25 'data/foo.i\\ndata/foo.i.hg/bla.i\\ndata/foo.i.hg.hg/bla.i\\n'
25 'data/foo.i\\ndata/foo.i.hg/bla.i\\ndata/foo.i.hg.hg/bla.i\\n'
26 '''
26 '''
27 return (path
27 return (path
28 .replace(".hg/", ".hg.hg/")
28 .replace(".hg/", ".hg.hg/")
29 .replace(".i/", ".i.hg/")
29 .replace(".i/", ".i.hg/")
30 .replace(".d/", ".d.hg/"))
30 .replace(".d/", ".d.hg/"))
31
31
32 encodedir = getattr(parsers, 'encodedir', _encodedir)
32 encodedir = getattr(parsers, 'encodedir', _encodedir)
33
33
34 def decodedir(path):
34 def decodedir(path):
35 '''
35 '''
36 >>> decodedir('data/foo.i')
36 >>> decodedir('data/foo.i')
37 'data/foo.i'
37 'data/foo.i'
38 >>> decodedir('data/foo.i.hg/bla.i')
38 >>> decodedir('data/foo.i.hg/bla.i')
39 'data/foo.i/bla.i'
39 'data/foo.i/bla.i'
40 >>> decodedir('data/foo.i.hg.hg/bla.i')
40 >>> decodedir('data/foo.i.hg.hg/bla.i')
41 'data/foo.i.hg/bla.i'
41 'data/foo.i.hg/bla.i'
42 '''
42 '''
43 if ".hg/" not in path:
43 if ".hg/" not in path:
44 return path
44 return path
45 return (path
45 return (path
46 .replace(".d.hg/", ".d/")
46 .replace(".d.hg/", ".d/")
47 .replace(".i.hg/", ".i/")
47 .replace(".i.hg/", ".i/")
48 .replace(".hg.hg/", ".hg/"))
48 .replace(".hg.hg/", ".hg/"))
49
49
50 def _buildencodefun():
50 def _buildencodefun():
51 '''
51 '''
52 >>> enc, dec = _buildencodefun()
52 >>> enc, dec = _buildencodefun()
53
53
54 >>> enc('nothing/special.txt')
54 >>> enc('nothing/special.txt')
55 'nothing/special.txt'
55 'nothing/special.txt'
56 >>> dec('nothing/special.txt')
56 >>> dec('nothing/special.txt')
57 'nothing/special.txt'
57 'nothing/special.txt'
58
58
59 >>> enc('HELLO')
59 >>> enc('HELLO')
60 '_h_e_l_l_o'
60 '_h_e_l_l_o'
61 >>> dec('_h_e_l_l_o')
61 >>> dec('_h_e_l_l_o')
62 'HELLO'
62 'HELLO'
63
63
64 >>> enc('hello:world?')
64 >>> enc('hello:world?')
65 'hello~3aworld~3f'
65 'hello~3aworld~3f'
66 >>> dec('hello~3aworld~3f')
66 >>> dec('hello~3aworld~3f')
67 'hello:world?'
67 'hello:world?'
68
68
69 >>> enc('the\x07quick\xADshot')
69 >>> enc('the\x07quick\xADshot')
70 'the~07quick~adshot'
70 'the~07quick~adshot'
71 >>> dec('the~07quick~adshot')
71 >>> dec('the~07quick~adshot')
72 'the\\x07quick\\xadshot'
72 'the\\x07quick\\xadshot'
73 '''
73 '''
74 e = '_'
74 e = '_'
75 winreserved = [ord(x) for x in '\\:*?"<>|']
75 winreserved = [ord(x) for x in '\\:*?"<>|']
76 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
76 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
77 for x in (range(32) + range(126, 256) + winreserved):
77 for x in (range(32) + range(126, 256) + winreserved):
78 cmap[chr(x)] = "~%02x" % x
78 cmap[chr(x)] = "~%02x" % x
79 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
79 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
80 cmap[chr(x)] = e + chr(x).lower()
80 cmap[chr(x)] = e + chr(x).lower()
81 dmap = {}
81 dmap = {}
82 for k, v in cmap.iteritems():
82 for k, v in cmap.iteritems():
83 dmap[v] = k
83 dmap[v] = k
84 def decode(s):
84 def decode(s):
85 i = 0
85 i = 0
86 while i < len(s):
86 while i < len(s):
87 for l in xrange(1, 4):
87 for l in xrange(1, 4):
88 try:
88 try:
89 yield dmap[s[i:i + l]]
89 yield dmap[s[i:i + l]]
90 i += l
90 i += l
91 break
91 break
92 except KeyError:
92 except KeyError:
93 pass
93 pass
94 else:
94 else:
95 raise KeyError
95 raise KeyError
96 return (lambda s: ''.join([cmap[c] for c in s]),
96 return (lambda s: ''.join([cmap[c] for c in s]),
97 lambda s: ''.join(list(decode(s))))
97 lambda s: ''.join(list(decode(s))))
98
98
99 _encodefname, _decodefname = _buildencodefun()
99 _encodefname, _decodefname = _buildencodefun()
100
100
101 def encodefilename(s):
101 def encodefilename(s):
102 '''
102 '''
103 >>> encodefilename('foo.i/bar.d/bla.hg/hi:world?/HELLO')
103 >>> encodefilename('foo.i/bar.d/bla.hg/hi:world?/HELLO')
104 'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o'
104 'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o'
105 '''
105 '''
106 return _encodefname(encodedir(s))
106 return _encodefname(encodedir(s))
107
107
108 def decodefilename(s):
108 def decodefilename(s):
109 '''
109 '''
110 >>> decodefilename('foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o')
110 >>> decodefilename('foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o')
111 'foo.i/bar.d/bla.hg/hi:world?/HELLO'
111 'foo.i/bar.d/bla.hg/hi:world?/HELLO'
112 '''
112 '''
113 return decodedir(_decodefname(s))
113 return decodedir(_decodefname(s))
114
114
115 def _buildlowerencodefun():
115 def _buildlowerencodefun():
116 '''
116 '''
117 >>> f = _buildlowerencodefun()
117 >>> f = _buildlowerencodefun()
118 >>> f('nothing/special.txt')
118 >>> f('nothing/special.txt')
119 'nothing/special.txt'
119 'nothing/special.txt'
120 >>> f('HELLO')
120 >>> f('HELLO')
121 'hello'
121 'hello'
122 >>> f('hello:world?')
122 >>> f('hello:world?')
123 'hello~3aworld~3f'
123 'hello~3aworld~3f'
124 >>> f('the\x07quick\xADshot')
124 >>> f('the\x07quick\xADshot')
125 'the~07quick~adshot'
125 'the~07quick~adshot'
126 '''
126 '''
127 winreserved = [ord(x) for x in '\\:*?"<>|']
127 winreserved = [ord(x) for x in '\\:*?"<>|']
128 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
128 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
129 for x in (range(32) + range(126, 256) + winreserved):
129 for x in (range(32) + range(126, 256) + winreserved):
130 cmap[chr(x)] = "~%02x" % x
130 cmap[chr(x)] = "~%02x" % x
131 for x in range(ord("A"), ord("Z")+1):
131 for x in range(ord("A"), ord("Z")+1):
132 cmap[chr(x)] = chr(x).lower()
132 cmap[chr(x)] = chr(x).lower()
133 return lambda s: "".join([cmap[c] for c in s])
133 return lambda s: "".join([cmap[c] for c in s])
134
134
135 lowerencode = _buildlowerencodefun()
135 lowerencode = _buildlowerencodefun()
136
136
137 # Windows reserved names: con, prn, aux, nul, com1..com9, lpt1..lpt9
137 # Windows reserved names: con, prn, aux, nul, com1..com9, lpt1..lpt9
138 _winres3 = ('aux', 'con', 'prn', 'nul') # length 3
138 _winres3 = ('aux', 'con', 'prn', 'nul') # length 3
139 _winres4 = ('com', 'lpt') # length 4 (with trailing 1..9)
139 _winres4 = ('com', 'lpt') # length 4 (with trailing 1..9)
140 def _auxencode(path, dotencode):
140 def _auxencode(path, dotencode):
141 '''
141 '''
142 Encodes filenames containing names reserved by Windows or which end in
142 Encodes filenames containing names reserved by Windows or which end in
143 period or space. Does not touch other single reserved characters c.
143 period or space. Does not touch other single reserved characters c.
144 Specifically, c in '\\:*?"<>|' or ord(c) <= 31 are *not* encoded here.
144 Specifically, c in '\\:*?"<>|' or ord(c) <= 31 are *not* encoded here.
145 Additionally encodes space or period at the beginning, if dotencode is
145 Additionally encodes space or period at the beginning, if dotencode is
146 True. Parameter path is assumed to be all lowercase.
146 True. Parameter path is assumed to be all lowercase.
147 A segment only needs encoding if a reserved name appears as a
147 A segment only needs encoding if a reserved name appears as a
148 basename (e.g. "aux", "aux.foo"). A directory or file named "foo.aux"
148 basename (e.g. "aux", "aux.foo"). A directory or file named "foo.aux"
149 doesn't need encoding.
149 doesn't need encoding.
150
150
151 >>> s = '.foo/aux.txt/txt.aux/con/prn/nul/foo.'
151 >>> s = '.foo/aux.txt/txt.aux/con/prn/nul/foo.'
152 >>> _auxencode(s.split('/'), True)
152 >>> _auxencode(s.split('/'), True)
153 ['~2efoo', 'au~78.txt', 'txt.aux', 'co~6e', 'pr~6e', 'nu~6c', 'foo~2e']
153 ['~2efoo', 'au~78.txt', 'txt.aux', 'co~6e', 'pr~6e', 'nu~6c', 'foo~2e']
154 >>> s = '.com1com2/lpt9.lpt4.lpt1/conprn/com0/lpt0/foo.'
154 >>> s = '.com1com2/lpt9.lpt4.lpt1/conprn/com0/lpt0/foo.'
155 >>> _auxencode(s.split('/'), False)
155 >>> _auxencode(s.split('/'), False)
156 ['.com1com2', 'lp~749.lpt4.lpt1', 'conprn', 'com0', 'lpt0', 'foo~2e']
156 ['.com1com2', 'lp~749.lpt4.lpt1', 'conprn', 'com0', 'lpt0', 'foo~2e']
157 >>> _auxencode(['foo. '], True)
157 >>> _auxencode(['foo. '], True)
158 ['foo.~20']
158 ['foo.~20']
159 >>> _auxencode([' .foo'], True)
159 >>> _auxencode([' .foo'], True)
160 ['~20.foo']
160 ['~20.foo']
161 '''
161 '''
162 for i, n in enumerate(path):
162 for i, n in enumerate(path):
163 if not n:
163 if not n:
164 continue
164 continue
165 if dotencode and n[0] in '. ':
165 if dotencode and n[0] in '. ':
166 n = "~%02x" % ord(n[0]) + n[1:]
166 n = "~%02x" % ord(n[0]) + n[1:]
167 path[i] = n
167 path[i] = n
168 else:
168 else:
169 l = n.find('.')
169 l = n.find('.')
170 if l == -1:
170 if l == -1:
171 l = len(n)
171 l = len(n)
172 if ((l == 3 and n[:3] in _winres3) or
172 if ((l == 3 and n[:3] in _winres3) or
173 (l == 4 and n[3] <= '9' and n[3] >= '1'
173 (l == 4 and n[3] <= '9' and n[3] >= '1'
174 and n[:3] in _winres4)):
174 and n[:3] in _winres4)):
175 # encode third letter ('aux' -> 'au~78')
175 # encode third letter ('aux' -> 'au~78')
176 ec = "~%02x" % ord(n[2])
176 ec = "~%02x" % ord(n[2])
177 n = n[0:2] + ec + n[3:]
177 n = n[0:2] + ec + n[3:]
178 path[i] = n
178 path[i] = n
179 if n[-1] in '. ':
179 if n[-1] in '. ':
180 # encode last period or space ('foo...' -> 'foo..~2e')
180 # encode last period or space ('foo...' -> 'foo..~2e')
181 path[i] = n[:-1] + "~%02x" % ord(n[-1])
181 path[i] = n[:-1] + "~%02x" % ord(n[-1])
182 return path
182 return path
183
183
184 _maxstorepathlen = 120
184 _maxstorepathlen = 120
185 _dirprefixlen = 8
185 _dirprefixlen = 8
186 _maxshortdirslen = 8 * (_dirprefixlen + 1) - 4
186 _maxshortdirslen = 8 * (_dirprefixlen + 1) - 4
187
187
188 def _hashencode(path, dotencode):
188 def _hashencode(path, dotencode):
189 digest = _sha(path).hexdigest()
189 digest = _sha(path).hexdigest()
190 le = lowerencode(path).split('/')[1:]
190 le = lowerencode(path).split('/')[1:]
191 parts = _auxencode(le, dotencode)
191 parts = _auxencode(le, dotencode)
192 basename = parts[-1]
192 basename = parts[-1]
193 _root, ext = os.path.splitext(basename)
193 _root, ext = os.path.splitext(basename)
194 sdirs = []
194 sdirs = []
195 sdirslen = 0
195 sdirslen = 0
196 for p in parts[:-1]:
196 for p in parts[:-1]:
197 d = p[:_dirprefixlen]
197 d = p[:_dirprefixlen]
198 if d[-1] in '. ':
198 if d[-1] in '. ':
199 # Windows can't access dirs ending in period or space
199 # Windows can't access dirs ending in period or space
200 d = d[:-1] + '_'
200 d = d[:-1] + '_'
201 if sdirslen == 0:
201 if sdirslen == 0:
202 t = len(d)
202 t = len(d)
203 else:
203 else:
204 t = sdirslen + 1 + len(d)
204 t = sdirslen + 1 + len(d)
205 if t > _maxshortdirslen:
205 if t > _maxshortdirslen:
206 break
206 break
207 sdirs.append(d)
207 sdirs.append(d)
208 sdirslen = t
208 sdirslen = t
209 dirs = '/'.join(sdirs)
209 dirs = '/'.join(sdirs)
210 if len(dirs) > 0:
210 if len(dirs) > 0:
211 dirs += '/'
211 dirs += '/'
212 res = 'dh/' + dirs + digest + ext
212 res = 'dh/' + dirs + digest + ext
213 spaceleft = _maxstorepathlen - len(res)
213 spaceleft = _maxstorepathlen - len(res)
214 if spaceleft > 0:
214 if spaceleft > 0:
215 filler = basename[:spaceleft]
215 filler = basename[:spaceleft]
216 res = 'dh/' + dirs + filler + digest + ext
216 res = 'dh/' + dirs + filler + digest + ext
217 return res
217 return res
218
218
219 def _hybridencode(path, dotencode):
219 def _hybridencode(path, dotencode):
220 '''encodes path with a length limit
220 '''encodes path with a length limit
221
221
222 Encodes all paths that begin with 'data/', according to the following.
222 Encodes all paths that begin with 'data/', according to the following.
223
223
224 Default encoding (reversible):
224 Default encoding (reversible):
225
225
226 Encodes all uppercase letters 'X' as '_x'. All reserved or illegal
226 Encodes all uppercase letters 'X' as '_x'. All reserved or illegal
227 characters are encoded as '~xx', where xx is the two digit hex code
227 characters are encoded as '~xx', where xx is the two digit hex code
228 of the character (see encodefilename).
228 of the character (see encodefilename).
229 Relevant path components consisting of Windows reserved filenames are
229 Relevant path components consisting of Windows reserved filenames are
230 masked by encoding the third character ('aux' -> 'au~78', see _auxencode).
230 masked by encoding the third character ('aux' -> 'au~78', see _auxencode).
231
231
232 Hashed encoding (not reversible):
232 Hashed encoding (not reversible):
233
233
234 If the default-encoded path is longer than _maxstorepathlen, a
234 If the default-encoded path is longer than _maxstorepathlen, a
235 non-reversible hybrid hashing of the path is done instead.
235 non-reversible hybrid hashing of the path is done instead.
236 This encoding uses up to _dirprefixlen characters of all directory
236 This encoding uses up to _dirprefixlen characters of all directory
237 levels of the lowerencoded path, but not more levels than can fit into
237 levels of the lowerencoded path, but not more levels than can fit into
238 _maxshortdirslen.
238 _maxshortdirslen.
239 Then follows the filler followed by the sha digest of the full path.
239 Then follows the filler followed by the sha digest of the full path.
240 The filler is the beginning of the basename of the lowerencoded path
240 The filler is the beginning of the basename of the lowerencoded path
241 (the basename is everything after the last path separator). The filler
241 (the basename is everything after the last path separator). The filler
242 is as long as possible, filling in characters from the basename until
242 is as long as possible, filling in characters from the basename until
243 the encoded path has _maxstorepathlen characters (or all chars of the
243 the encoded path has _maxstorepathlen characters (or all chars of the
244 basename have been taken).
244 basename have been taken).
245 The extension (e.g. '.i' or '.d') is preserved.
245 The extension (e.g. '.i' or '.d') is preserved.
246
246
247 The string 'data/' at the beginning is replaced with 'dh/', if the hashed
247 The string 'data/' at the beginning is replaced with 'dh/', if the hashed
248 encoding was used.
248 encoding was used.
249 '''
249 '''
250 path = encodedir(path)
250 path = encodedir(path)
251 ef = _encodefname(path).split('/')
251 ef = _encodefname(path).split('/')
252 res = '/'.join(_auxencode(ef, dotencode))
252 res = '/'.join(_auxencode(ef, dotencode))
253 if len(res) > _maxstorepathlen:
253 if len(res) > _maxstorepathlen:
254 res = _hashencode(path, dotencode)
254 res = _hashencode(path, dotencode)
255 return res
255 return res
256
256
257 def _pathencode(path):
257 def _pathencode(path):
258 if len(path) > _maxstorepathlen:
258 if len(path) > _maxstorepathlen:
259 return None
259 return None
260 ef = _encodefname(encodedir(path)).split('/')
260 ef = _encodefname(encodedir(path)).split('/')
261 res = '/'.join(_auxencode(ef, True))
261 res = '/'.join(_auxencode(ef, True))
262 if len(res) > _maxstorepathlen:
262 if len(res) > _maxstorepathlen:
263 return None
263 return None
264 return res
264 return res
265
265
266 _pathencode = getattr(parsers, 'pathencode', _pathencode)
266 _pathencode = getattr(parsers, 'pathencode', _pathencode)
267
267
268 def _dothybridencode(f):
268 def _dothybridencode(f):
269 ef = _pathencode(f)
269 ef = _pathencode(f)
270 if ef is None:
270 if ef is None:
271 return _hashencode(encodedir(f), True)
271 return _hashencode(encodedir(f), True)
272 return ef
272 return ef
273
273
274 def _plainhybridencode(f):
274 def _plainhybridencode(f):
275 return _hybridencode(f, False)
275 return _hybridencode(f, False)
276
276
277 def _calcmode(vfs):
277 def _calcmode(vfs):
278 try:
278 try:
279 # files in .hg/ will be created using this mode
279 # files in .hg/ will be created using this mode
280 mode = vfs.stat().st_mode
280 mode = vfs.stat().st_mode
281 # avoid some useless chmods
281 # avoid some useless chmods
282 if (0777 & ~util.umask) == (0777 & mode):
282 if (0777 & ~util.umask) == (0777 & mode):
283 mode = None
283 mode = None
284 except OSError:
284 except OSError:
285 mode = None
285 mode = None
286 return mode
286 return mode
287
287
288 _data = ('data 00manifest.d 00manifest.i 00changelog.d 00changelog.i'
288 _data = ('data 00manifest.d 00manifest.i 00changelog.d 00changelog.i'
289 ' phaseroots obsstore')
289 ' phaseroots obsstore')
290
290
291 class basicstore(object):
291 class basicstore(object):
292 '''base class for local repository stores'''
292 '''base class for local repository stores'''
293 def __init__(self, path, vfstype):
293 def __init__(self, path, vfstype):
294 vfs = vfstype(path)
294 vfs = vfstype(path)
295 self.path = vfs.base
295 self.path = vfs.base
296 self.createmode = _calcmode(vfs)
296 self.createmode = _calcmode(vfs)
297 vfs.createmode = self.createmode
297 vfs.createmode = self.createmode
298 self.rawvfs = vfs
298 self.rawvfs = vfs
299 self.vfs = scmutil.filtervfs(vfs, encodedir)
299 self.vfs = scmutil.filtervfs(vfs, encodedir)
300 self.opener = self.vfs
300 self.opener = self.vfs
301
301
302 def join(self, f):
302 def join(self, f):
303 return self.path + '/' + encodedir(f)
303 return self.path + '/' + encodedir(f)
304
304
305 def _walk(self, relpath, recurse):
305 def _walk(self, relpath, recurse):
306 '''yields (unencoded, encoded, size)'''
306 '''yields (unencoded, encoded, size)'''
307 path = self.path
307 path = self.path
308 if relpath:
308 if relpath:
309 path += '/' + relpath
309 path += '/' + relpath
310 striplen = len(self.path) + 1
310 striplen = len(self.path) + 1
311 l = []
311 l = []
312 if self.rawvfs.isdir(path):
312 if self.rawvfs.isdir(path):
313 visit = [path]
313 visit = [path]
314 readdir = self.rawvfs.readdir
314 while visit:
315 while visit:
315 p = visit.pop()
316 p = visit.pop()
316 for f, kind, st in osutil.listdir(p, stat=True):
317 for f, kind, st in readdir(p, stat=True):
317 fp = p + '/' + f
318 fp = p + '/' + f
318 if kind == stat.S_IFREG and f[-2:] in ('.d', '.i'):
319 if kind == stat.S_IFREG and f[-2:] in ('.d', '.i'):
319 n = util.pconvert(fp[striplen:])
320 n = util.pconvert(fp[striplen:])
320 l.append((decodedir(n), n, st.st_size))
321 l.append((decodedir(n), n, st.st_size))
321 elif kind == stat.S_IFDIR and recurse:
322 elif kind == stat.S_IFDIR and recurse:
322 visit.append(fp)
323 visit.append(fp)
323 l.sort()
324 l.sort()
324 return l
325 return l
325
326
326 def datafiles(self):
327 def datafiles(self):
327 return self._walk('data', True)
328 return self._walk('data', True)
328
329
329 def walk(self):
330 def walk(self):
330 '''yields (unencoded, encoded, size)'''
331 '''yields (unencoded, encoded, size)'''
331 # yield data files first
332 # yield data files first
332 for x in self.datafiles():
333 for x in self.datafiles():
333 yield x
334 yield x
334 # yield manifest before changelog
335 # yield manifest before changelog
335 for x in reversed(self._walk('', False)):
336 for x in reversed(self._walk('', False)):
336 yield x
337 yield x
337
338
338 def copylist(self):
339 def copylist(self):
339 return ['requires'] + _data.split()
340 return ['requires'] + _data.split()
340
341
341 def write(self):
342 def write(self):
342 pass
343 pass
343
344
344 def __contains__(self, path):
345 def __contains__(self, path):
345 '''Checks if the store contains path'''
346 '''Checks if the store contains path'''
346 path = "/".join(("data", path))
347 path = "/".join(("data", path))
347 # file?
348 # file?
348 if os.path.exists(self.join(path + ".i")):
349 if os.path.exists(self.join(path + ".i")):
349 return True
350 return True
350 # dir?
351 # dir?
351 if not path.endswith("/"):
352 if not path.endswith("/"):
352 path = path + "/"
353 path = path + "/"
353 return os.path.exists(self.join(path))
354 return os.path.exists(self.join(path))
354
355
355 class encodedstore(basicstore):
356 class encodedstore(basicstore):
356 def __init__(self, path, vfstype):
357 def __init__(self, path, vfstype):
357 vfs = vfstype(path + '/store')
358 vfs = vfstype(path + '/store')
358 self.path = vfs.base
359 self.path = vfs.base
359 self.createmode = _calcmode(vfs)
360 self.createmode = _calcmode(vfs)
360 vfs.createmode = self.createmode
361 vfs.createmode = self.createmode
361 self.rawvfs = vfs
362 self.rawvfs = vfs
362 self.vfs = scmutil.filtervfs(vfs, encodefilename)
363 self.vfs = scmutil.filtervfs(vfs, encodefilename)
363 self.opener = self.vfs
364 self.opener = self.vfs
364
365
365 def datafiles(self):
366 def datafiles(self):
366 for a, b, size in self._walk('data', True):
367 for a, b, size in self._walk('data', True):
367 try:
368 try:
368 a = decodefilename(a)
369 a = decodefilename(a)
369 except KeyError:
370 except KeyError:
370 a = None
371 a = None
371 yield a, b, size
372 yield a, b, size
372
373
373 def join(self, f):
374 def join(self, f):
374 return self.path + '/' + encodefilename(f)
375 return self.path + '/' + encodefilename(f)
375
376
376 def copylist(self):
377 def copylist(self):
377 return (['requires', '00changelog.i'] +
378 return (['requires', '00changelog.i'] +
378 ['store/' + f for f in _data.split()])
379 ['store/' + f for f in _data.split()])
379
380
380 class fncache(object):
381 class fncache(object):
381 # the filename used to be partially encoded
382 # the filename used to be partially encoded
382 # hence the encodedir/decodedir dance
383 # hence the encodedir/decodedir dance
383 def __init__(self, vfs):
384 def __init__(self, vfs):
384 self.vfs = vfs
385 self.vfs = vfs
385 self.entries = None
386 self.entries = None
386 self._dirty = False
387 self._dirty = False
387
388
388 def _load(self):
389 def _load(self):
389 '''fill the entries from the fncache file'''
390 '''fill the entries from the fncache file'''
390 self._dirty = False
391 self._dirty = False
391 try:
392 try:
392 fp = self.vfs('fncache', mode='rb')
393 fp = self.vfs('fncache', mode='rb')
393 except IOError:
394 except IOError:
394 # skip nonexistent file
395 # skip nonexistent file
395 self.entries = set()
396 self.entries = set()
396 return
397 return
397 self.entries = set(decodedir(fp.read()).splitlines())
398 self.entries = set(decodedir(fp.read()).splitlines())
398 if '' in self.entries:
399 if '' in self.entries:
399 fp.seek(0)
400 fp.seek(0)
400 for n, line in enumerate(fp):
401 for n, line in enumerate(fp):
401 if not line.rstrip('\n'):
402 if not line.rstrip('\n'):
402 t = _('invalid entry in fncache, line %s') % (n + 1)
403 t = _('invalid entry in fncache, line %s') % (n + 1)
403 raise util.Abort(t)
404 raise util.Abort(t)
404 fp.close()
405 fp.close()
405
406
406 def _write(self, files, atomictemp):
407 def _write(self, files, atomictemp):
407 fp = self.vfs('fncache', mode='wb', atomictemp=atomictemp)
408 fp = self.vfs('fncache', mode='wb', atomictemp=atomictemp)
408 if files:
409 if files:
409 fp.write(encodedir('\n'.join(files) + '\n'))
410 fp.write(encodedir('\n'.join(files) + '\n'))
410 fp.close()
411 fp.close()
411 self._dirty = False
412 self._dirty = False
412
413
413 def rewrite(self, files):
414 def rewrite(self, files):
414 self._write(files, False)
415 self._write(files, False)
415 self.entries = set(files)
416 self.entries = set(files)
416
417
417 def write(self):
418 def write(self):
418 if self._dirty:
419 if self._dirty:
419 self._write(self.entries, True)
420 self._write(self.entries, True)
420
421
421 def add(self, fn):
422 def add(self, fn):
422 if self.entries is None:
423 if self.entries is None:
423 self._load()
424 self._load()
424 if fn not in self.entries:
425 if fn not in self.entries:
425 self._dirty = True
426 self._dirty = True
426 self.entries.add(fn)
427 self.entries.add(fn)
427
428
428 def __contains__(self, path):
429 def __contains__(self, path):
429 if self.entries is None:
430 if self.entries is None:
430 self._load()
431 self._load()
431 # Check for files (exact match)
432 # Check for files (exact match)
432 if path + ".i" in self.entries:
433 if path + ".i" in self.entries:
433 return True
434 return True
434 # Now check for directories (prefix match)
435 # Now check for directories (prefix match)
435 if not path.endswith('/'):
436 if not path.endswith('/'):
436 path += '/'
437 path += '/'
437 for e in self.entries:
438 for e in self.entries:
438 if e.startswith(path):
439 if e.startswith(path):
439 return True
440 return True
440 return False
441 return False
441
442
442 def __iter__(self):
443 def __iter__(self):
443 if self.entries is None:
444 if self.entries is None:
444 self._load()
445 self._load()
445 return iter(self.entries)
446 return iter(self.entries)
446
447
447 class _fncachevfs(scmutil.abstractvfs):
448 class _fncachevfs(scmutil.abstractvfs):
448 def __init__(self, vfs, fnc, encode):
449 def __init__(self, vfs, fnc, encode):
449 self.vfs = vfs
450 self.vfs = vfs
450 self.fncache = fnc
451 self.fncache = fnc
451 self.encode = encode
452 self.encode = encode
452
453
453 def _getmustaudit(self):
454 def _getmustaudit(self):
454 return self.vfs.mustaudit
455 return self.vfs.mustaudit
455
456
456 def _setmustaudit(self, onoff):
457 def _setmustaudit(self, onoff):
457 self.vfs.mustaudit = onoff
458 self.vfs.mustaudit = onoff
458
459
459 mustaudit = property(_getmustaudit, _setmustaudit)
460 mustaudit = property(_getmustaudit, _setmustaudit)
460
461
461 def __call__(self, path, mode='r', *args, **kw):
462 def __call__(self, path, mode='r', *args, **kw):
462 if mode not in ('r', 'rb') and path.startswith('data/'):
463 if mode not in ('r', 'rb') and path.startswith('data/'):
463 self.fncache.add(path)
464 self.fncache.add(path)
464 return self.vfs(self.encode(path), mode, *args, **kw)
465 return self.vfs(self.encode(path), mode, *args, **kw)
465
466
466 def join(self, path):
467 def join(self, path):
467 if path:
468 if path:
468 return self.vfs.join(self.encode(path))
469 return self.vfs.join(self.encode(path))
469 else:
470 else:
470 return self.vfs.join(path)
471 return self.vfs.join(path)
471
472
472 class fncachestore(basicstore):
473 class fncachestore(basicstore):
473 def __init__(self, path, vfstype, dotencode):
474 def __init__(self, path, vfstype, dotencode):
474 if dotencode:
475 if dotencode:
475 encode = _dothybridencode
476 encode = _dothybridencode
476 else:
477 else:
477 encode = _plainhybridencode
478 encode = _plainhybridencode
478 self.encode = encode
479 self.encode = encode
479 vfs = vfstype(path + '/store')
480 vfs = vfstype(path + '/store')
480 self.path = vfs.base
481 self.path = vfs.base
481 self.pathsep = self.path + '/'
482 self.pathsep = self.path + '/'
482 self.createmode = _calcmode(vfs)
483 self.createmode = _calcmode(vfs)
483 vfs.createmode = self.createmode
484 vfs.createmode = self.createmode
484 self.rawvfs = vfs
485 self.rawvfs = vfs
485 fnc = fncache(vfs)
486 fnc = fncache(vfs)
486 self.fncache = fnc
487 self.fncache = fnc
487 self.vfs = _fncachevfs(vfs, fnc, encode)
488 self.vfs = _fncachevfs(vfs, fnc, encode)
488 self.opener = self.vfs
489 self.opener = self.vfs
489
490
490 def join(self, f):
491 def join(self, f):
491 return self.pathsep + self.encode(f)
492 return self.pathsep + self.encode(f)
492
493
493 def getsize(self, path):
494 def getsize(self, path):
494 return self.rawvfs.stat(path).st_size
495 return self.rawvfs.stat(path).st_size
495
496
496 def datafiles(self):
497 def datafiles(self):
497 rewrite = False
498 rewrite = False
498 existing = []
499 existing = []
499 for f in sorted(self.fncache):
500 for f in sorted(self.fncache):
500 ef = self.encode(f)
501 ef = self.encode(f)
501 try:
502 try:
502 yield f, ef, self.getsize(ef)
503 yield f, ef, self.getsize(ef)
503 existing.append(f)
504 existing.append(f)
504 except OSError, err:
505 except OSError, err:
505 if err.errno != errno.ENOENT:
506 if err.errno != errno.ENOENT:
506 raise
507 raise
507 # nonexistent entry
508 # nonexistent entry
508 rewrite = True
509 rewrite = True
509 if rewrite:
510 if rewrite:
510 # rewrite fncache to remove nonexistent entries
511 # rewrite fncache to remove nonexistent entries
511 # (may be caused by rollback / strip)
512 # (may be caused by rollback / strip)
512 self.fncache.rewrite(existing)
513 self.fncache.rewrite(existing)
513
514
514 def copylist(self):
515 def copylist(self):
515 d = ('data dh fncache phaseroots obsstore'
516 d = ('data dh fncache phaseroots obsstore'
516 ' 00manifest.d 00manifest.i 00changelog.d 00changelog.i')
517 ' 00manifest.d 00manifest.i 00changelog.d 00changelog.i')
517 return (['requires', '00changelog.i'] +
518 return (['requires', '00changelog.i'] +
518 ['store/' + f for f in d.split()])
519 ['store/' + f for f in d.split()])
519
520
520 def write(self):
521 def write(self):
521 self.fncache.write()
522 self.fncache.write()
522
523
523 def __contains__(self, path):
524 def __contains__(self, path):
524 '''Checks if the store contains path'''
525 '''Checks if the store contains path'''
525 path = "/".join(("data", path))
526 path = "/".join(("data", path))
526 return path in self.fncache
527 return path in self.fncache
527
528
528 def store(requirements, path, vfstype):
529 def store(requirements, path, vfstype):
529 if 'store' in requirements:
530 if 'store' in requirements:
530 if 'fncache' in requirements:
531 if 'fncache' in requirements:
531 return fncachestore(path, vfstype, 'dotencode' in requirements)
532 return fncachestore(path, vfstype, 'dotencode' in requirements)
532 return encodedstore(path, vfstype)
533 return encodedstore(path, vfstype)
533 return basicstore(path, vfstype)
534 return basicstore(path, vfstype)
General Comments 0
You need to be logged in to leave comments. Login now