##// END OF EJS Templates
store: invoke "os.stat()" for "createmode" initialization via vfs...
FUJIWARA Katsunori -
r17726:7cb7e17c default
parent child Browse files
Show More
@@ -1,939 +1,942 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 import util, error, osutil, revset, similar, encoding, phases
9 import util, error, osutil, revset, similar, encoding, phases
10 import match as matchmod
10 import match as matchmod
11 import os, errno, re, stat, sys, glob
11 import os, errno, re, stat, sys, glob
12
12
13 def nochangesfound(ui, repo, excluded=None):
13 def nochangesfound(ui, repo, excluded=None):
14 '''Report no changes for push/pull, excluded is None or a list of
14 '''Report no changes for push/pull, excluded is None or a list of
15 nodes excluded from the push/pull.
15 nodes excluded from the push/pull.
16 '''
16 '''
17 secretlist = []
17 secretlist = []
18 if excluded:
18 if excluded:
19 for n in excluded:
19 for n in excluded:
20 ctx = repo[n]
20 ctx = repo[n]
21 if ctx.phase() >= phases.secret and not ctx.extinct():
21 if ctx.phase() >= phases.secret and not ctx.extinct():
22 secretlist.append(n)
22 secretlist.append(n)
23
23
24 if secretlist:
24 if secretlist:
25 ui.status(_("no changes found (ignored %d secret changesets)\n")
25 ui.status(_("no changes found (ignored %d secret changesets)\n")
26 % len(secretlist))
26 % len(secretlist))
27 else:
27 else:
28 ui.status(_("no changes found\n"))
28 ui.status(_("no changes found\n"))
29
29
30 def checkfilename(f):
30 def checkfilename(f):
31 '''Check that the filename f is an acceptable filename for a tracked file'''
31 '''Check that the filename f is an acceptable filename for a tracked file'''
32 if '\r' in f or '\n' in f:
32 if '\r' in f or '\n' in f:
33 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
33 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
34
34
35 def checkportable(ui, f):
35 def checkportable(ui, f):
36 '''Check if filename f is portable and warn or abort depending on config'''
36 '''Check if filename f is portable and warn or abort depending on config'''
37 checkfilename(f)
37 checkfilename(f)
38 abort, warn = checkportabilityalert(ui)
38 abort, warn = checkportabilityalert(ui)
39 if abort or warn:
39 if abort or warn:
40 msg = util.checkwinfilename(f)
40 msg = util.checkwinfilename(f)
41 if msg:
41 if msg:
42 msg = "%s: %r" % (msg, f)
42 msg = "%s: %r" % (msg, f)
43 if abort:
43 if abort:
44 raise util.Abort(msg)
44 raise util.Abort(msg)
45 ui.warn(_("warning: %s\n") % msg)
45 ui.warn(_("warning: %s\n") % msg)
46
46
47 def checkportabilityalert(ui):
47 def checkportabilityalert(ui):
48 '''check if the user's config requests nothing, a warning, or abort for
48 '''check if the user's config requests nothing, a warning, or abort for
49 non-portable filenames'''
49 non-portable filenames'''
50 val = ui.config('ui', 'portablefilenames', 'warn')
50 val = ui.config('ui', 'portablefilenames', 'warn')
51 lval = val.lower()
51 lval = val.lower()
52 bval = util.parsebool(val)
52 bval = util.parsebool(val)
53 abort = os.name == 'nt' or lval == 'abort'
53 abort = os.name == 'nt' or lval == 'abort'
54 warn = bval or lval == 'warn'
54 warn = bval or lval == 'warn'
55 if bval is None and not (warn or abort or lval == 'ignore'):
55 if bval is None and not (warn or abort or lval == 'ignore'):
56 raise error.ConfigError(
56 raise error.ConfigError(
57 _("ui.portablefilenames value is invalid ('%s')") % val)
57 _("ui.portablefilenames value is invalid ('%s')") % val)
58 return abort, warn
58 return abort, warn
59
59
60 class casecollisionauditor(object):
60 class casecollisionauditor(object):
61 def __init__(self, ui, abort, dirstate):
61 def __init__(self, ui, abort, dirstate):
62 self._ui = ui
62 self._ui = ui
63 self._abort = abort
63 self._abort = abort
64 allfiles = '\0'.join(dirstate._map)
64 allfiles = '\0'.join(dirstate._map)
65 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
65 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
66 self._dirstate = dirstate
66 self._dirstate = dirstate
67 # The purpose of _newfiles is so that we don't complain about
67 # The purpose of _newfiles is so that we don't complain about
68 # case collisions if someone were to call this object with the
68 # case collisions if someone were to call this object with the
69 # same filename twice.
69 # same filename twice.
70 self._newfiles = set()
70 self._newfiles = set()
71
71
72 def __call__(self, f):
72 def __call__(self, f):
73 fl = encoding.lower(f)
73 fl = encoding.lower(f)
74 if (fl in self._loweredfiles and f not in self._dirstate and
74 if (fl in self._loweredfiles and f not in self._dirstate and
75 f not in self._newfiles):
75 f not in self._newfiles):
76 msg = _('possible case-folding collision for %s') % f
76 msg = _('possible case-folding collision for %s') % f
77 if self._abort:
77 if self._abort:
78 raise util.Abort(msg)
78 raise util.Abort(msg)
79 self._ui.warn(_("warning: %s\n") % msg)
79 self._ui.warn(_("warning: %s\n") % msg)
80 self._loweredfiles.add(fl)
80 self._loweredfiles.add(fl)
81 self._newfiles.add(f)
81 self._newfiles.add(f)
82
82
83 class pathauditor(object):
83 class pathauditor(object):
84 '''ensure that a filesystem path contains no banned components.
84 '''ensure that a filesystem path contains no banned components.
85 the following properties of a path are checked:
85 the following properties of a path are checked:
86
86
87 - ends with a directory separator
87 - ends with a directory separator
88 - under top-level .hg
88 - under top-level .hg
89 - starts at the root of a windows drive
89 - starts at the root of a windows drive
90 - contains ".."
90 - contains ".."
91 - traverses a symlink (e.g. a/symlink_here/b)
91 - traverses a symlink (e.g. a/symlink_here/b)
92 - inside a nested repository (a callback can be used to approve
92 - inside a nested repository (a callback can be used to approve
93 some nested repositories, e.g., subrepositories)
93 some nested repositories, e.g., subrepositories)
94 '''
94 '''
95
95
96 def __init__(self, root, callback=None):
96 def __init__(self, root, callback=None):
97 self.audited = set()
97 self.audited = set()
98 self.auditeddir = set()
98 self.auditeddir = set()
99 self.root = root
99 self.root = root
100 self.callback = callback
100 self.callback = callback
101 if os.path.lexists(root) and not util.checkcase(root):
101 if os.path.lexists(root) and not util.checkcase(root):
102 self.normcase = util.normcase
102 self.normcase = util.normcase
103 else:
103 else:
104 self.normcase = lambda x: x
104 self.normcase = lambda x: x
105
105
106 def __call__(self, path):
106 def __call__(self, path):
107 '''Check the relative path.
107 '''Check the relative path.
108 path may contain a pattern (e.g. foodir/**.txt)'''
108 path may contain a pattern (e.g. foodir/**.txt)'''
109
109
110 path = util.localpath(path)
110 path = util.localpath(path)
111 normpath = self.normcase(path)
111 normpath = self.normcase(path)
112 if normpath in self.audited:
112 if normpath in self.audited:
113 return
113 return
114 # AIX ignores "/" at end of path, others raise EISDIR.
114 # AIX ignores "/" at end of path, others raise EISDIR.
115 if util.endswithsep(path):
115 if util.endswithsep(path):
116 raise util.Abort(_("path ends in directory separator: %s") % path)
116 raise util.Abort(_("path ends in directory separator: %s") % path)
117 parts = util.splitpath(path)
117 parts = util.splitpath(path)
118 if (os.path.splitdrive(path)[0]
118 if (os.path.splitdrive(path)[0]
119 or parts[0].lower() in ('.hg', '.hg.', '')
119 or parts[0].lower() in ('.hg', '.hg.', '')
120 or os.pardir in parts):
120 or os.pardir in parts):
121 raise util.Abort(_("path contains illegal component: %s") % path)
121 raise util.Abort(_("path contains illegal component: %s") % path)
122 if '.hg' in path.lower():
122 if '.hg' in path.lower():
123 lparts = [p.lower() for p in parts]
123 lparts = [p.lower() for p in parts]
124 for p in '.hg', '.hg.':
124 for p in '.hg', '.hg.':
125 if p in lparts[1:]:
125 if p in lparts[1:]:
126 pos = lparts.index(p)
126 pos = lparts.index(p)
127 base = os.path.join(*parts[:pos])
127 base = os.path.join(*parts[:pos])
128 raise util.Abort(_("path '%s' is inside nested repo %r")
128 raise util.Abort(_("path '%s' is inside nested repo %r")
129 % (path, base))
129 % (path, base))
130
130
131 normparts = util.splitpath(normpath)
131 normparts = util.splitpath(normpath)
132 assert len(parts) == len(normparts)
132 assert len(parts) == len(normparts)
133
133
134 parts.pop()
134 parts.pop()
135 normparts.pop()
135 normparts.pop()
136 prefixes = []
136 prefixes = []
137 while parts:
137 while parts:
138 prefix = os.sep.join(parts)
138 prefix = os.sep.join(parts)
139 normprefix = os.sep.join(normparts)
139 normprefix = os.sep.join(normparts)
140 if normprefix in self.auditeddir:
140 if normprefix in self.auditeddir:
141 break
141 break
142 curpath = os.path.join(self.root, prefix)
142 curpath = os.path.join(self.root, prefix)
143 try:
143 try:
144 st = os.lstat(curpath)
144 st = os.lstat(curpath)
145 except OSError, err:
145 except OSError, err:
146 # EINVAL can be raised as invalid path syntax under win32.
146 # EINVAL can be raised as invalid path syntax under win32.
147 # They must be ignored for patterns can be checked too.
147 # They must be ignored for patterns can be checked too.
148 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
148 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
149 raise
149 raise
150 else:
150 else:
151 if stat.S_ISLNK(st.st_mode):
151 if stat.S_ISLNK(st.st_mode):
152 raise util.Abort(
152 raise util.Abort(
153 _('path %r traverses symbolic link %r')
153 _('path %r traverses symbolic link %r')
154 % (path, prefix))
154 % (path, prefix))
155 elif (stat.S_ISDIR(st.st_mode) and
155 elif (stat.S_ISDIR(st.st_mode) and
156 os.path.isdir(os.path.join(curpath, '.hg'))):
156 os.path.isdir(os.path.join(curpath, '.hg'))):
157 if not self.callback or not self.callback(curpath):
157 if not self.callback or not self.callback(curpath):
158 raise util.Abort(_("path '%s' is inside nested "
158 raise util.Abort(_("path '%s' is inside nested "
159 "repo %r")
159 "repo %r")
160 % (path, prefix))
160 % (path, prefix))
161 prefixes.append(normprefix)
161 prefixes.append(normprefix)
162 parts.pop()
162 parts.pop()
163 normparts.pop()
163 normparts.pop()
164
164
165 self.audited.add(normpath)
165 self.audited.add(normpath)
166 # only add prefixes to the cache after checking everything: we don't
166 # only add prefixes to the cache after checking everything: we don't
167 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
167 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
168 self.auditeddir.update(prefixes)
168 self.auditeddir.update(prefixes)
169
169
170 class abstractvfs(object):
170 class abstractvfs(object):
171 """Abstract base class; cannot be instantiated"""
171 """Abstract base class; cannot be instantiated"""
172
172
173 def __init__(self, *args, **kwargs):
173 def __init__(self, *args, **kwargs):
174 '''Prevent instantiation; don't call this from subclasses.'''
174 '''Prevent instantiation; don't call this from subclasses.'''
175 raise NotImplementedError('attempted instantiating ' + str(type(self)))
175 raise NotImplementedError('attempted instantiating ' + str(type(self)))
176
176
177 def tryread(self, path):
177 def tryread(self, path):
178 '''gracefully return an empty string for missing files'''
178 '''gracefully return an empty string for missing files'''
179 try:
179 try:
180 return self.read(path)
180 return self.read(path)
181 except IOError, inst:
181 except IOError, inst:
182 if inst.errno != errno.ENOENT:
182 if inst.errno != errno.ENOENT:
183 raise
183 raise
184 return ""
184 return ""
185
185
186 def read(self, path):
186 def read(self, path):
187 fp = self(path, 'rb')
187 fp = self(path, 'rb')
188 try:
188 try:
189 return fp.read()
189 return fp.read()
190 finally:
190 finally:
191 fp.close()
191 fp.close()
192
192
193 def write(self, path, data):
193 def write(self, path, data):
194 fp = self(path, 'wb')
194 fp = self(path, 'wb')
195 try:
195 try:
196 return fp.write(data)
196 return fp.write(data)
197 finally:
197 finally:
198 fp.close()
198 fp.close()
199
199
200 def append(self, path, data):
200 def append(self, path, data):
201 fp = self(path, 'ab')
201 fp = self(path, 'ab')
202 try:
202 try:
203 return fp.write(data)
203 return fp.write(data)
204 finally:
204 finally:
205 fp.close()
205 fp.close()
206
206
207 def exists(self, path=None):
207 def exists(self, path=None):
208 return os.path.exists(self.join(path))
208 return os.path.exists(self.join(path))
209
209
210 def isdir(self, path=None):
210 def isdir(self, path=None):
211 return os.path.isdir(self.join(path))
211 return os.path.isdir(self.join(path))
212
212
213 def makedir(self, path=None, notindexed=True):
213 def makedir(self, path=None, notindexed=True):
214 return util.makedir(self.join(path), notindexed)
214 return util.makedir(self.join(path), notindexed)
215
215
216 def makedirs(self, path=None, mode=None):
216 def makedirs(self, path=None, mode=None):
217 return util.makedirs(self.join(path), mode)
217 return util.makedirs(self.join(path), mode)
218
218
219 def mkdir(self, path=None):
219 def mkdir(self, path=None):
220 return os.mkdir(self.join(path))
220 return os.mkdir(self.join(path))
221
221
222 def stat(self, path=None):
223 return os.stat(self.join(path))
224
222 class vfs(abstractvfs):
225 class vfs(abstractvfs):
223 '''Operate files relative to a base directory
226 '''Operate files relative to a base directory
224
227
225 This class is used to hide the details of COW semantics and
228 This class is used to hide the details of COW semantics and
226 remote file access from higher level code.
229 remote file access from higher level code.
227 '''
230 '''
228 def __init__(self, base, audit=True, expand=False):
231 def __init__(self, base, audit=True, expand=False):
229 if expand:
232 if expand:
230 base = os.path.realpath(util.expandpath(base))
233 base = os.path.realpath(util.expandpath(base))
231 self.base = base
234 self.base = base
232 self._setmustaudit(audit)
235 self._setmustaudit(audit)
233 self.createmode = None
236 self.createmode = None
234 self._trustnlink = None
237 self._trustnlink = None
235
238
236 def _getmustaudit(self):
239 def _getmustaudit(self):
237 return self._audit
240 return self._audit
238
241
239 def _setmustaudit(self, onoff):
242 def _setmustaudit(self, onoff):
240 self._audit = onoff
243 self._audit = onoff
241 if onoff:
244 if onoff:
242 self.auditor = pathauditor(self.base)
245 self.auditor = pathauditor(self.base)
243 else:
246 else:
244 self.auditor = util.always
247 self.auditor = util.always
245
248
246 mustaudit = property(_getmustaudit, _setmustaudit)
249 mustaudit = property(_getmustaudit, _setmustaudit)
247
250
248 @util.propertycache
251 @util.propertycache
249 def _cansymlink(self):
252 def _cansymlink(self):
250 return util.checklink(self.base)
253 return util.checklink(self.base)
251
254
252 def _fixfilemode(self, name):
255 def _fixfilemode(self, name):
253 if self.createmode is None:
256 if self.createmode is None:
254 return
257 return
255 os.chmod(name, self.createmode & 0666)
258 os.chmod(name, self.createmode & 0666)
256
259
257 def __call__(self, path, mode="r", text=False, atomictemp=False):
260 def __call__(self, path, mode="r", text=False, atomictemp=False):
258 if self._audit:
261 if self._audit:
259 r = util.checkosfilename(path)
262 r = util.checkosfilename(path)
260 if r:
263 if r:
261 raise util.Abort("%s: %r" % (r, path))
264 raise util.Abort("%s: %r" % (r, path))
262 self.auditor(path)
265 self.auditor(path)
263 f = self.join(path)
266 f = self.join(path)
264
267
265 if not text and "b" not in mode:
268 if not text and "b" not in mode:
266 mode += "b" # for that other OS
269 mode += "b" # for that other OS
267
270
268 nlink = -1
271 nlink = -1
269 dirname, basename = util.split(f)
272 dirname, basename = util.split(f)
270 # If basename is empty, then the path is malformed because it points
273 # If basename is empty, then the path is malformed because it points
271 # to a directory. Let the posixfile() call below raise IOError.
274 # to a directory. Let the posixfile() call below raise IOError.
272 if basename and mode not in ('r', 'rb'):
275 if basename and mode not in ('r', 'rb'):
273 if atomictemp:
276 if atomictemp:
274 if not os.path.isdir(dirname):
277 if not os.path.isdir(dirname):
275 util.makedirs(dirname, self.createmode)
278 util.makedirs(dirname, self.createmode)
276 return util.atomictempfile(f, mode, self.createmode)
279 return util.atomictempfile(f, mode, self.createmode)
277 try:
280 try:
278 if 'w' in mode:
281 if 'w' in mode:
279 util.unlink(f)
282 util.unlink(f)
280 nlink = 0
283 nlink = 0
281 else:
284 else:
282 # nlinks() may behave differently for files on Windows
285 # nlinks() may behave differently for files on Windows
283 # shares if the file is open.
286 # shares if the file is open.
284 fd = util.posixfile(f)
287 fd = util.posixfile(f)
285 nlink = util.nlinks(f)
288 nlink = util.nlinks(f)
286 if nlink < 1:
289 if nlink < 1:
287 nlink = 2 # force mktempcopy (issue1922)
290 nlink = 2 # force mktempcopy (issue1922)
288 fd.close()
291 fd.close()
289 except (OSError, IOError), e:
292 except (OSError, IOError), e:
290 if e.errno != errno.ENOENT:
293 if e.errno != errno.ENOENT:
291 raise
294 raise
292 nlink = 0
295 nlink = 0
293 if not os.path.isdir(dirname):
296 if not os.path.isdir(dirname):
294 util.makedirs(dirname, self.createmode)
297 util.makedirs(dirname, self.createmode)
295 if nlink > 0:
298 if nlink > 0:
296 if self._trustnlink is None:
299 if self._trustnlink is None:
297 self._trustnlink = nlink > 1 or util.checknlink(f)
300 self._trustnlink = nlink > 1 or util.checknlink(f)
298 if nlink > 1 or not self._trustnlink:
301 if nlink > 1 or not self._trustnlink:
299 util.rename(util.mktempcopy(f), f)
302 util.rename(util.mktempcopy(f), f)
300 fp = util.posixfile(f, mode)
303 fp = util.posixfile(f, mode)
301 if nlink == 0:
304 if nlink == 0:
302 self._fixfilemode(f)
305 self._fixfilemode(f)
303 return fp
306 return fp
304
307
305 def symlink(self, src, dst):
308 def symlink(self, src, dst):
306 self.auditor(dst)
309 self.auditor(dst)
307 linkname = self.join(dst)
310 linkname = self.join(dst)
308 try:
311 try:
309 os.unlink(linkname)
312 os.unlink(linkname)
310 except OSError:
313 except OSError:
311 pass
314 pass
312
315
313 dirname = os.path.dirname(linkname)
316 dirname = os.path.dirname(linkname)
314 if not os.path.exists(dirname):
317 if not os.path.exists(dirname):
315 util.makedirs(dirname, self.createmode)
318 util.makedirs(dirname, self.createmode)
316
319
317 if self._cansymlink:
320 if self._cansymlink:
318 try:
321 try:
319 os.symlink(src, linkname)
322 os.symlink(src, linkname)
320 except OSError, err:
323 except OSError, err:
321 raise OSError(err.errno, _('could not symlink to %r: %s') %
324 raise OSError(err.errno, _('could not symlink to %r: %s') %
322 (src, err.strerror), linkname)
325 (src, err.strerror), linkname)
323 else:
326 else:
324 f = self(dst, "w")
327 f = self(dst, "w")
325 f.write(src)
328 f.write(src)
326 f.close()
329 f.close()
327 self._fixfilemode(dst)
330 self._fixfilemode(dst)
328
331
329 def audit(self, path):
332 def audit(self, path):
330 self.auditor(path)
333 self.auditor(path)
331
334
332 def join(self, path):
335 def join(self, path):
333 if path:
336 if path:
334 return os.path.join(self.base, path)
337 return os.path.join(self.base, path)
335 else:
338 else:
336 return self.base
339 return self.base
337
340
338 opener = vfs
341 opener = vfs
339
342
340 class filtervfs(abstractvfs):
343 class filtervfs(abstractvfs):
341 '''Wrapper vfs for filtering filenames with a function.'''
344 '''Wrapper vfs for filtering filenames with a function.'''
342
345
343 def __init__(self, opener, filter):
346 def __init__(self, opener, filter):
344 self._filter = filter
347 self._filter = filter
345 self._orig = opener
348 self._orig = opener
346
349
347 def __call__(self, path, *args, **kwargs):
350 def __call__(self, path, *args, **kwargs):
348 return self._orig(self._filter(path), *args, **kwargs)
351 return self._orig(self._filter(path), *args, **kwargs)
349
352
350 def join(self, path):
353 def join(self, path):
351 if path:
354 if path:
352 return self._orig.join(self._filter(path))
355 return self._orig.join(self._filter(path))
353 else:
356 else:
354 return self._orig.join(path)
357 return self._orig.join(path)
355
358
356 filteropener = filtervfs
359 filteropener = filtervfs
357
360
358 def canonpath(root, cwd, myname, auditor=None):
361 def canonpath(root, cwd, myname, auditor=None):
359 '''return the canonical path of myname, given cwd and root'''
362 '''return the canonical path of myname, given cwd and root'''
360 if util.endswithsep(root):
363 if util.endswithsep(root):
361 rootsep = root
364 rootsep = root
362 else:
365 else:
363 rootsep = root + os.sep
366 rootsep = root + os.sep
364 name = myname
367 name = myname
365 if not os.path.isabs(name):
368 if not os.path.isabs(name):
366 name = os.path.join(root, cwd, name)
369 name = os.path.join(root, cwd, name)
367 name = os.path.normpath(name)
370 name = os.path.normpath(name)
368 if auditor is None:
371 if auditor is None:
369 auditor = pathauditor(root)
372 auditor = pathauditor(root)
370 if name != rootsep and name.startswith(rootsep):
373 if name != rootsep and name.startswith(rootsep):
371 name = name[len(rootsep):]
374 name = name[len(rootsep):]
372 auditor(name)
375 auditor(name)
373 return util.pconvert(name)
376 return util.pconvert(name)
374 elif name == root:
377 elif name == root:
375 return ''
378 return ''
376 else:
379 else:
377 # Determine whether `name' is in the hierarchy at or beneath `root',
380 # Determine whether `name' is in the hierarchy at or beneath `root',
378 # by iterating name=dirname(name) until that causes no change (can't
381 # by iterating name=dirname(name) until that causes no change (can't
379 # check name == '/', because that doesn't work on windows). The list
382 # check name == '/', because that doesn't work on windows). The list
380 # `rel' holds the reversed list of components making up the relative
383 # `rel' holds the reversed list of components making up the relative
381 # file name we want.
384 # file name we want.
382 rel = []
385 rel = []
383 while True:
386 while True:
384 try:
387 try:
385 s = util.samefile(name, root)
388 s = util.samefile(name, root)
386 except OSError:
389 except OSError:
387 s = False
390 s = False
388 if s:
391 if s:
389 if not rel:
392 if not rel:
390 # name was actually the same as root (maybe a symlink)
393 # name was actually the same as root (maybe a symlink)
391 return ''
394 return ''
392 rel.reverse()
395 rel.reverse()
393 name = os.path.join(*rel)
396 name = os.path.join(*rel)
394 auditor(name)
397 auditor(name)
395 return util.pconvert(name)
398 return util.pconvert(name)
396 dirname, basename = util.split(name)
399 dirname, basename = util.split(name)
397 rel.append(basename)
400 rel.append(basename)
398 if dirname == name:
401 if dirname == name:
399 break
402 break
400 name = dirname
403 name = dirname
401
404
402 raise util.Abort('%s not under root' % myname)
405 raise util.Abort('%s not under root' % myname)
403
406
404 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
407 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
405 '''yield every hg repository under path, always recursively.
408 '''yield every hg repository under path, always recursively.
406 The recurse flag will only control recursion into repo working dirs'''
409 The recurse flag will only control recursion into repo working dirs'''
407 def errhandler(err):
410 def errhandler(err):
408 if err.filename == path:
411 if err.filename == path:
409 raise err
412 raise err
410 samestat = getattr(os.path, 'samestat', None)
413 samestat = getattr(os.path, 'samestat', None)
411 if followsym and samestat is not None:
414 if followsym and samestat is not None:
412 def adddir(dirlst, dirname):
415 def adddir(dirlst, dirname):
413 match = False
416 match = False
414 dirstat = os.stat(dirname)
417 dirstat = os.stat(dirname)
415 for lstdirstat in dirlst:
418 for lstdirstat in dirlst:
416 if samestat(dirstat, lstdirstat):
419 if samestat(dirstat, lstdirstat):
417 match = True
420 match = True
418 break
421 break
419 if not match:
422 if not match:
420 dirlst.append(dirstat)
423 dirlst.append(dirstat)
421 return not match
424 return not match
422 else:
425 else:
423 followsym = False
426 followsym = False
424
427
425 if (seen_dirs is None) and followsym:
428 if (seen_dirs is None) and followsym:
426 seen_dirs = []
429 seen_dirs = []
427 adddir(seen_dirs, path)
430 adddir(seen_dirs, path)
428 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
431 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
429 dirs.sort()
432 dirs.sort()
430 if '.hg' in dirs:
433 if '.hg' in dirs:
431 yield root # found a repository
434 yield root # found a repository
432 qroot = os.path.join(root, '.hg', 'patches')
435 qroot = os.path.join(root, '.hg', 'patches')
433 if os.path.isdir(os.path.join(qroot, '.hg')):
436 if os.path.isdir(os.path.join(qroot, '.hg')):
434 yield qroot # we have a patch queue repo here
437 yield qroot # we have a patch queue repo here
435 if recurse:
438 if recurse:
436 # avoid recursing inside the .hg directory
439 # avoid recursing inside the .hg directory
437 dirs.remove('.hg')
440 dirs.remove('.hg')
438 else:
441 else:
439 dirs[:] = [] # don't descend further
442 dirs[:] = [] # don't descend further
440 elif followsym:
443 elif followsym:
441 newdirs = []
444 newdirs = []
442 for d in dirs:
445 for d in dirs:
443 fname = os.path.join(root, d)
446 fname = os.path.join(root, d)
444 if adddir(seen_dirs, fname):
447 if adddir(seen_dirs, fname):
445 if os.path.islink(fname):
448 if os.path.islink(fname):
446 for hgname in walkrepos(fname, True, seen_dirs):
449 for hgname in walkrepos(fname, True, seen_dirs):
447 yield hgname
450 yield hgname
448 else:
451 else:
449 newdirs.append(d)
452 newdirs.append(d)
450 dirs[:] = newdirs
453 dirs[:] = newdirs
451
454
452 def osrcpath():
455 def osrcpath():
453 '''return default os-specific hgrc search path'''
456 '''return default os-specific hgrc search path'''
454 path = systemrcpath()
457 path = systemrcpath()
455 path.extend(userrcpath())
458 path.extend(userrcpath())
456 path = [os.path.normpath(f) for f in path]
459 path = [os.path.normpath(f) for f in path]
457 return path
460 return path
458
461
459 _rcpath = None
462 _rcpath = None
460
463
461 def rcpath():
464 def rcpath():
462 '''return hgrc search path. if env var HGRCPATH is set, use it.
465 '''return hgrc search path. if env var HGRCPATH is set, use it.
463 for each item in path, if directory, use files ending in .rc,
466 for each item in path, if directory, use files ending in .rc,
464 else use item.
467 else use item.
465 make HGRCPATH empty to only look in .hg/hgrc of current repo.
468 make HGRCPATH empty to only look in .hg/hgrc of current repo.
466 if no HGRCPATH, use default os-specific path.'''
469 if no HGRCPATH, use default os-specific path.'''
467 global _rcpath
470 global _rcpath
468 if _rcpath is None:
471 if _rcpath is None:
469 if 'HGRCPATH' in os.environ:
472 if 'HGRCPATH' in os.environ:
470 _rcpath = []
473 _rcpath = []
471 for p in os.environ['HGRCPATH'].split(os.pathsep):
474 for p in os.environ['HGRCPATH'].split(os.pathsep):
472 if not p:
475 if not p:
473 continue
476 continue
474 p = util.expandpath(p)
477 p = util.expandpath(p)
475 if os.path.isdir(p):
478 if os.path.isdir(p):
476 for f, kind in osutil.listdir(p):
479 for f, kind in osutil.listdir(p):
477 if f.endswith('.rc'):
480 if f.endswith('.rc'):
478 _rcpath.append(os.path.join(p, f))
481 _rcpath.append(os.path.join(p, f))
479 else:
482 else:
480 _rcpath.append(p)
483 _rcpath.append(p)
481 else:
484 else:
482 _rcpath = osrcpath()
485 _rcpath = osrcpath()
483 return _rcpath
486 return _rcpath
484
487
485 if os.name != 'nt':
488 if os.name != 'nt':
486
489
487 def rcfiles(path):
490 def rcfiles(path):
488 rcs = [os.path.join(path, 'hgrc')]
491 rcs = [os.path.join(path, 'hgrc')]
489 rcdir = os.path.join(path, 'hgrc.d')
492 rcdir = os.path.join(path, 'hgrc.d')
490 try:
493 try:
491 rcs.extend([os.path.join(rcdir, f)
494 rcs.extend([os.path.join(rcdir, f)
492 for f, kind in osutil.listdir(rcdir)
495 for f, kind in osutil.listdir(rcdir)
493 if f.endswith(".rc")])
496 if f.endswith(".rc")])
494 except OSError:
497 except OSError:
495 pass
498 pass
496 return rcs
499 return rcs
497
500
498 def systemrcpath():
501 def systemrcpath():
499 path = []
502 path = []
500 if sys.platform == 'plan9':
503 if sys.platform == 'plan9':
501 root = 'lib/mercurial'
504 root = 'lib/mercurial'
502 else:
505 else:
503 root = 'etc/mercurial'
506 root = 'etc/mercurial'
504 # old mod_python does not set sys.argv
507 # old mod_python does not set sys.argv
505 if len(getattr(sys, 'argv', [])) > 0:
508 if len(getattr(sys, 'argv', [])) > 0:
506 p = os.path.dirname(os.path.dirname(sys.argv[0]))
509 p = os.path.dirname(os.path.dirname(sys.argv[0]))
507 path.extend(rcfiles(os.path.join(p, root)))
510 path.extend(rcfiles(os.path.join(p, root)))
508 path.extend(rcfiles('/' + root))
511 path.extend(rcfiles('/' + root))
509 return path
512 return path
510
513
511 def userrcpath():
514 def userrcpath():
512 if sys.platform == 'plan9':
515 if sys.platform == 'plan9':
513 return [os.environ['home'] + '/lib/hgrc']
516 return [os.environ['home'] + '/lib/hgrc']
514 else:
517 else:
515 return [os.path.expanduser('~/.hgrc')]
518 return [os.path.expanduser('~/.hgrc')]
516
519
517 else:
520 else:
518
521
519 import _winreg
522 import _winreg
520
523
521 def systemrcpath():
524 def systemrcpath():
522 '''return default os-specific hgrc search path'''
525 '''return default os-specific hgrc search path'''
523 rcpath = []
526 rcpath = []
524 filename = util.executablepath()
527 filename = util.executablepath()
525 # Use mercurial.ini found in directory with hg.exe
528 # Use mercurial.ini found in directory with hg.exe
526 progrc = os.path.join(os.path.dirname(filename), 'mercurial.ini')
529 progrc = os.path.join(os.path.dirname(filename), 'mercurial.ini')
527 if os.path.isfile(progrc):
530 if os.path.isfile(progrc):
528 rcpath.append(progrc)
531 rcpath.append(progrc)
529 return rcpath
532 return rcpath
530 # Use hgrc.d found in directory with hg.exe
533 # Use hgrc.d found in directory with hg.exe
531 progrcd = os.path.join(os.path.dirname(filename), 'hgrc.d')
534 progrcd = os.path.join(os.path.dirname(filename), 'hgrc.d')
532 if os.path.isdir(progrcd):
535 if os.path.isdir(progrcd):
533 for f, kind in osutil.listdir(progrcd):
536 for f, kind in osutil.listdir(progrcd):
534 if f.endswith('.rc'):
537 if f.endswith('.rc'):
535 rcpath.append(os.path.join(progrcd, f))
538 rcpath.append(os.path.join(progrcd, f))
536 return rcpath
539 return rcpath
537 # else look for a system rcpath in the registry
540 # else look for a system rcpath in the registry
538 value = util.lookupreg('SOFTWARE\\Mercurial', None,
541 value = util.lookupreg('SOFTWARE\\Mercurial', None,
539 _winreg.HKEY_LOCAL_MACHINE)
542 _winreg.HKEY_LOCAL_MACHINE)
540 if not isinstance(value, str) or not value:
543 if not isinstance(value, str) or not value:
541 return rcpath
544 return rcpath
542 value = util.localpath(value)
545 value = util.localpath(value)
543 for p in value.split(os.pathsep):
546 for p in value.split(os.pathsep):
544 if p.lower().endswith('mercurial.ini'):
547 if p.lower().endswith('mercurial.ini'):
545 rcpath.append(p)
548 rcpath.append(p)
546 elif os.path.isdir(p):
549 elif os.path.isdir(p):
547 for f, kind in osutil.listdir(p):
550 for f, kind in osutil.listdir(p):
548 if f.endswith('.rc'):
551 if f.endswith('.rc'):
549 rcpath.append(os.path.join(p, f))
552 rcpath.append(os.path.join(p, f))
550 return rcpath
553 return rcpath
551
554
552 def userrcpath():
555 def userrcpath():
553 '''return os-specific hgrc search path to the user dir'''
556 '''return os-specific hgrc search path to the user dir'''
554 home = os.path.expanduser('~')
557 home = os.path.expanduser('~')
555 path = [os.path.join(home, 'mercurial.ini'),
558 path = [os.path.join(home, 'mercurial.ini'),
556 os.path.join(home, '.hgrc')]
559 os.path.join(home, '.hgrc')]
557 userprofile = os.environ.get('USERPROFILE')
560 userprofile = os.environ.get('USERPROFILE')
558 if userprofile:
561 if userprofile:
559 path.append(os.path.join(userprofile, 'mercurial.ini'))
562 path.append(os.path.join(userprofile, 'mercurial.ini'))
560 path.append(os.path.join(userprofile, '.hgrc'))
563 path.append(os.path.join(userprofile, '.hgrc'))
561 return path
564 return path
562
565
563 def revsingle(repo, revspec, default='.'):
566 def revsingle(repo, revspec, default='.'):
564 if not revspec:
567 if not revspec:
565 return repo[default]
568 return repo[default]
566
569
567 l = revrange(repo, [revspec])
570 l = revrange(repo, [revspec])
568 if len(l) < 1:
571 if len(l) < 1:
569 raise util.Abort(_('empty revision set'))
572 raise util.Abort(_('empty revision set'))
570 return repo[l[-1]]
573 return repo[l[-1]]
571
574
572 def revpair(repo, revs):
575 def revpair(repo, revs):
573 if not revs:
576 if not revs:
574 return repo.dirstate.p1(), None
577 return repo.dirstate.p1(), None
575
578
576 l = revrange(repo, revs)
579 l = revrange(repo, revs)
577
580
578 if len(l) == 0:
581 if len(l) == 0:
579 if revs:
582 if revs:
580 raise util.Abort(_('empty revision range'))
583 raise util.Abort(_('empty revision range'))
581 return repo.dirstate.p1(), None
584 return repo.dirstate.p1(), None
582
585
583 if len(l) == 1 and len(revs) == 1 and _revrangesep not in revs[0]:
586 if len(l) == 1 and len(revs) == 1 and _revrangesep not in revs[0]:
584 return repo.lookup(l[0]), None
587 return repo.lookup(l[0]), None
585
588
586 return repo.lookup(l[0]), repo.lookup(l[-1])
589 return repo.lookup(l[0]), repo.lookup(l[-1])
587
590
588 _revrangesep = ':'
591 _revrangesep = ':'
589
592
590 def revrange(repo, revs):
593 def revrange(repo, revs):
591 """Yield revision as strings from a list of revision specifications."""
594 """Yield revision as strings from a list of revision specifications."""
592
595
593 def revfix(repo, val, defval):
596 def revfix(repo, val, defval):
594 if not val and val != 0 and defval is not None:
597 if not val and val != 0 and defval is not None:
595 return defval
598 return defval
596 return repo[val].rev()
599 return repo[val].rev()
597
600
598 seen, l = set(), []
601 seen, l = set(), []
599 for spec in revs:
602 for spec in revs:
600 if l and not seen:
603 if l and not seen:
601 seen = set(l)
604 seen = set(l)
602 # attempt to parse old-style ranges first to deal with
605 # attempt to parse old-style ranges first to deal with
603 # things like old-tag which contain query metacharacters
606 # things like old-tag which contain query metacharacters
604 try:
607 try:
605 if isinstance(spec, int):
608 if isinstance(spec, int):
606 seen.add(spec)
609 seen.add(spec)
607 l.append(spec)
610 l.append(spec)
608 continue
611 continue
609
612
610 if _revrangesep in spec:
613 if _revrangesep in spec:
611 start, end = spec.split(_revrangesep, 1)
614 start, end = spec.split(_revrangesep, 1)
612 start = revfix(repo, start, 0)
615 start = revfix(repo, start, 0)
613 end = revfix(repo, end, len(repo) - 1)
616 end = revfix(repo, end, len(repo) - 1)
614 step = start > end and -1 or 1
617 step = start > end and -1 or 1
615 if not seen and not l:
618 if not seen and not l:
616 # by far the most common case: revs = ["-1:0"]
619 # by far the most common case: revs = ["-1:0"]
617 l = range(start, end + step, step)
620 l = range(start, end + step, step)
618 # defer syncing seen until next iteration
621 # defer syncing seen until next iteration
619 continue
622 continue
620 newrevs = set(xrange(start, end + step, step))
623 newrevs = set(xrange(start, end + step, step))
621 if seen:
624 if seen:
622 newrevs.difference_update(seen)
625 newrevs.difference_update(seen)
623 seen.update(newrevs)
626 seen.update(newrevs)
624 else:
627 else:
625 seen = newrevs
628 seen = newrevs
626 l.extend(sorted(newrevs, reverse=start > end))
629 l.extend(sorted(newrevs, reverse=start > end))
627 continue
630 continue
628 elif spec and spec in repo: # single unquoted rev
631 elif spec and spec in repo: # single unquoted rev
629 rev = revfix(repo, spec, None)
632 rev = revfix(repo, spec, None)
630 if rev in seen:
633 if rev in seen:
631 continue
634 continue
632 seen.add(rev)
635 seen.add(rev)
633 l.append(rev)
636 l.append(rev)
634 continue
637 continue
635 except error.RepoLookupError:
638 except error.RepoLookupError:
636 pass
639 pass
637
640
638 # fall through to new-style queries if old-style fails
641 # fall through to new-style queries if old-style fails
639 m = revset.match(repo.ui, spec)
642 m = revset.match(repo.ui, spec)
640 dl = [r for r in m(repo, list(repo)) if r not in seen]
643 dl = [r for r in m(repo, list(repo)) if r not in seen]
641 l.extend(dl)
644 l.extend(dl)
642 seen.update(dl)
645 seen.update(dl)
643
646
644 return l
647 return l
645
648
646 def expandpats(pats):
649 def expandpats(pats):
647 if not util.expandglobs:
650 if not util.expandglobs:
648 return list(pats)
651 return list(pats)
649 ret = []
652 ret = []
650 for p in pats:
653 for p in pats:
651 kind, name = matchmod._patsplit(p, None)
654 kind, name = matchmod._patsplit(p, None)
652 if kind is None:
655 if kind is None:
653 try:
656 try:
654 globbed = glob.glob(name)
657 globbed = glob.glob(name)
655 except re.error:
658 except re.error:
656 globbed = [name]
659 globbed = [name]
657 if globbed:
660 if globbed:
658 ret.extend(globbed)
661 ret.extend(globbed)
659 continue
662 continue
660 ret.append(p)
663 ret.append(p)
661 return ret
664 return ret
662
665
663 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
666 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
664 if pats == ("",):
667 if pats == ("",):
665 pats = []
668 pats = []
666 if not globbed and default == 'relpath':
669 if not globbed and default == 'relpath':
667 pats = expandpats(pats or [])
670 pats = expandpats(pats or [])
668
671
669 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
672 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
670 default)
673 default)
671 def badfn(f, msg):
674 def badfn(f, msg):
672 ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
675 ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
673 m.bad = badfn
676 m.bad = badfn
674 return m, pats
677 return m, pats
675
678
676 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
679 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
677 return matchandpats(ctx, pats, opts, globbed, default)[0]
680 return matchandpats(ctx, pats, opts, globbed, default)[0]
678
681
679 def matchall(repo):
682 def matchall(repo):
680 return matchmod.always(repo.root, repo.getcwd())
683 return matchmod.always(repo.root, repo.getcwd())
681
684
682 def matchfiles(repo, files):
685 def matchfiles(repo, files):
683 return matchmod.exact(repo.root, repo.getcwd(), files)
686 return matchmod.exact(repo.root, repo.getcwd(), files)
684
687
685 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
688 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
686 if dry_run is None:
689 if dry_run is None:
687 dry_run = opts.get('dry_run')
690 dry_run = opts.get('dry_run')
688 if similarity is None:
691 if similarity is None:
689 similarity = float(opts.get('similarity') or 0)
692 similarity = float(opts.get('similarity') or 0)
690 # we'd use status here, except handling of symlinks and ignore is tricky
693 # we'd use status here, except handling of symlinks and ignore is tricky
691 added, unknown, deleted, removed = [], [], [], []
694 added, unknown, deleted, removed = [], [], [], []
692 audit_path = pathauditor(repo.root)
695 audit_path = pathauditor(repo.root)
693 m = match(repo[None], pats, opts)
696 m = match(repo[None], pats, opts)
694 rejected = []
697 rejected = []
695 m.bad = lambda x, y: rejected.append(x)
698 m.bad = lambda x, y: rejected.append(x)
696
699
697 for abs in repo.walk(m):
700 for abs in repo.walk(m):
698 target = repo.wjoin(abs)
701 target = repo.wjoin(abs)
699 good = True
702 good = True
700 try:
703 try:
701 audit_path(abs)
704 audit_path(abs)
702 except (OSError, util.Abort):
705 except (OSError, util.Abort):
703 good = False
706 good = False
704 rel = m.rel(abs)
707 rel = m.rel(abs)
705 exact = m.exact(abs)
708 exact = m.exact(abs)
706 if good and abs not in repo.dirstate:
709 if good and abs not in repo.dirstate:
707 unknown.append(abs)
710 unknown.append(abs)
708 if repo.ui.verbose or not exact:
711 if repo.ui.verbose or not exact:
709 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
712 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
710 elif (repo.dirstate[abs] != 'r' and
713 elif (repo.dirstate[abs] != 'r' and
711 (not good or not os.path.lexists(target) or
714 (not good or not os.path.lexists(target) or
712 (os.path.isdir(target) and not os.path.islink(target)))):
715 (os.path.isdir(target) and not os.path.islink(target)))):
713 deleted.append(abs)
716 deleted.append(abs)
714 if repo.ui.verbose or not exact:
717 if repo.ui.verbose or not exact:
715 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
718 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
716 # for finding renames
719 # for finding renames
717 elif repo.dirstate[abs] == 'r':
720 elif repo.dirstate[abs] == 'r':
718 removed.append(abs)
721 removed.append(abs)
719 elif repo.dirstate[abs] == 'a':
722 elif repo.dirstate[abs] == 'a':
720 added.append(abs)
723 added.append(abs)
721 copies = {}
724 copies = {}
722 if similarity > 0:
725 if similarity > 0:
723 for old, new, score in similar.findrenames(repo,
726 for old, new, score in similar.findrenames(repo,
724 added + unknown, removed + deleted, similarity):
727 added + unknown, removed + deleted, similarity):
725 if repo.ui.verbose or not m.exact(old) or not m.exact(new):
728 if repo.ui.verbose or not m.exact(old) or not m.exact(new):
726 repo.ui.status(_('recording removal of %s as rename to %s '
729 repo.ui.status(_('recording removal of %s as rename to %s '
727 '(%d%% similar)\n') %
730 '(%d%% similar)\n') %
728 (m.rel(old), m.rel(new), score * 100))
731 (m.rel(old), m.rel(new), score * 100))
729 copies[new] = old
732 copies[new] = old
730
733
731 if not dry_run:
734 if not dry_run:
732 wctx = repo[None]
735 wctx = repo[None]
733 wlock = repo.wlock()
736 wlock = repo.wlock()
734 try:
737 try:
735 wctx.forget(deleted)
738 wctx.forget(deleted)
736 wctx.add(unknown)
739 wctx.add(unknown)
737 for new, old in copies.iteritems():
740 for new, old in copies.iteritems():
738 wctx.copy(old, new)
741 wctx.copy(old, new)
739 finally:
742 finally:
740 wlock.release()
743 wlock.release()
741
744
742 for f in rejected:
745 for f in rejected:
743 if f in m.files():
746 if f in m.files():
744 return 1
747 return 1
745 return 0
748 return 0
746
749
747 def updatedir(ui, repo, patches, similarity=0):
750 def updatedir(ui, repo, patches, similarity=0):
748 '''Update dirstate after patch application according to metadata'''
751 '''Update dirstate after patch application according to metadata'''
749 if not patches:
752 if not patches:
750 return []
753 return []
751 copies = []
754 copies = []
752 removes = set()
755 removes = set()
753 cfiles = patches.keys()
756 cfiles = patches.keys()
754 cwd = repo.getcwd()
757 cwd = repo.getcwd()
755 if cwd:
758 if cwd:
756 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
759 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
757 for f in patches:
760 for f in patches:
758 gp = patches[f]
761 gp = patches[f]
759 if not gp:
762 if not gp:
760 continue
763 continue
761 if gp.op == 'RENAME':
764 if gp.op == 'RENAME':
762 copies.append((gp.oldpath, gp.path))
765 copies.append((gp.oldpath, gp.path))
763 removes.add(gp.oldpath)
766 removes.add(gp.oldpath)
764 elif gp.op == 'COPY':
767 elif gp.op == 'COPY':
765 copies.append((gp.oldpath, gp.path))
768 copies.append((gp.oldpath, gp.path))
766 elif gp.op == 'DELETE':
769 elif gp.op == 'DELETE':
767 removes.add(gp.path)
770 removes.add(gp.path)
768
771
769 wctx = repo[None]
772 wctx = repo[None]
770 for src, dst in copies:
773 for src, dst in copies:
771 dirstatecopy(ui, repo, wctx, src, dst, cwd=cwd)
774 dirstatecopy(ui, repo, wctx, src, dst, cwd=cwd)
772 if (not similarity) and removes:
775 if (not similarity) and removes:
773 wctx.remove(sorted(removes), True)
776 wctx.remove(sorted(removes), True)
774
777
775 for f in patches:
778 for f in patches:
776 gp = patches[f]
779 gp = patches[f]
777 if gp and gp.mode:
780 if gp and gp.mode:
778 islink, isexec = gp.mode
781 islink, isexec = gp.mode
779 dst = repo.wjoin(gp.path)
782 dst = repo.wjoin(gp.path)
780 # patch won't create empty files
783 # patch won't create empty files
781 if gp.op == 'ADD' and not os.path.lexists(dst):
784 if gp.op == 'ADD' and not os.path.lexists(dst):
782 flags = (isexec and 'x' or '') + (islink and 'l' or '')
785 flags = (isexec and 'x' or '') + (islink and 'l' or '')
783 repo.wwrite(gp.path, '', flags)
786 repo.wwrite(gp.path, '', flags)
784 util.setflags(dst, islink, isexec)
787 util.setflags(dst, islink, isexec)
785 addremove(repo, cfiles, similarity=similarity)
788 addremove(repo, cfiles, similarity=similarity)
786 files = patches.keys()
789 files = patches.keys()
787 files.extend([r for r in removes if r not in files])
790 files.extend([r for r in removes if r not in files])
788 return sorted(files)
791 return sorted(files)
789
792
790 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
793 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
791 """Update the dirstate to reflect the intent of copying src to dst. For
794 """Update the dirstate to reflect the intent of copying src to dst. For
792 different reasons it might not end with dst being marked as copied from src.
795 different reasons it might not end with dst being marked as copied from src.
793 """
796 """
794 origsrc = repo.dirstate.copied(src) or src
797 origsrc = repo.dirstate.copied(src) or src
795 if dst == origsrc: # copying back a copy?
798 if dst == origsrc: # copying back a copy?
796 if repo.dirstate[dst] not in 'mn' and not dryrun:
799 if repo.dirstate[dst] not in 'mn' and not dryrun:
797 repo.dirstate.normallookup(dst)
800 repo.dirstate.normallookup(dst)
798 else:
801 else:
799 if repo.dirstate[origsrc] == 'a' and origsrc == src:
802 if repo.dirstate[origsrc] == 'a' and origsrc == src:
800 if not ui.quiet:
803 if not ui.quiet:
801 ui.warn(_("%s has not been committed yet, so no copy "
804 ui.warn(_("%s has not been committed yet, so no copy "
802 "data will be stored for %s.\n")
805 "data will be stored for %s.\n")
803 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
806 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
804 if repo.dirstate[dst] in '?r' and not dryrun:
807 if repo.dirstate[dst] in '?r' and not dryrun:
805 wctx.add([dst])
808 wctx.add([dst])
806 elif not dryrun:
809 elif not dryrun:
807 wctx.copy(origsrc, dst)
810 wctx.copy(origsrc, dst)
808
811
809 def readrequires(opener, supported):
812 def readrequires(opener, supported):
810 '''Reads and parses .hg/requires and checks if all entries found
813 '''Reads and parses .hg/requires and checks if all entries found
811 are in the list of supported features.'''
814 are in the list of supported features.'''
812 requirements = set(opener.read("requires").splitlines())
815 requirements = set(opener.read("requires").splitlines())
813 missings = []
816 missings = []
814 for r in requirements:
817 for r in requirements:
815 if r not in supported:
818 if r not in supported:
816 if not r or not r[0].isalnum():
819 if not r or not r[0].isalnum():
817 raise error.RequirementError(_(".hg/requires file is corrupt"))
820 raise error.RequirementError(_(".hg/requires file is corrupt"))
818 missings.append(r)
821 missings.append(r)
819 missings.sort()
822 missings.sort()
820 if missings:
823 if missings:
821 raise error.RequirementError(
824 raise error.RequirementError(
822 _("unknown repository format: requires features '%s' (upgrade "
825 _("unknown repository format: requires features '%s' (upgrade "
823 "Mercurial)") % "', '".join(missings))
826 "Mercurial)") % "', '".join(missings))
824 return requirements
827 return requirements
825
828
826 class filecacheentry(object):
829 class filecacheentry(object):
827 def __init__(self, path):
830 def __init__(self, path):
828 self.path = path
831 self.path = path
829 self.cachestat = filecacheentry.stat(self.path)
832 self.cachestat = filecacheentry.stat(self.path)
830
833
831 if self.cachestat:
834 if self.cachestat:
832 self._cacheable = self.cachestat.cacheable()
835 self._cacheable = self.cachestat.cacheable()
833 else:
836 else:
834 # None means we don't know yet
837 # None means we don't know yet
835 self._cacheable = None
838 self._cacheable = None
836
839
837 def refresh(self):
840 def refresh(self):
838 if self.cacheable():
841 if self.cacheable():
839 self.cachestat = filecacheentry.stat(self.path)
842 self.cachestat = filecacheentry.stat(self.path)
840
843
841 def cacheable(self):
844 def cacheable(self):
842 if self._cacheable is not None:
845 if self._cacheable is not None:
843 return self._cacheable
846 return self._cacheable
844
847
845 # we don't know yet, assume it is for now
848 # we don't know yet, assume it is for now
846 return True
849 return True
847
850
848 def changed(self):
851 def changed(self):
849 # no point in going further if we can't cache it
852 # no point in going further if we can't cache it
850 if not self.cacheable():
853 if not self.cacheable():
851 return True
854 return True
852
855
853 newstat = filecacheentry.stat(self.path)
856 newstat = filecacheentry.stat(self.path)
854
857
855 # we may not know if it's cacheable yet, check again now
858 # we may not know if it's cacheable yet, check again now
856 if newstat and self._cacheable is None:
859 if newstat and self._cacheable is None:
857 self._cacheable = newstat.cacheable()
860 self._cacheable = newstat.cacheable()
858
861
859 # check again
862 # check again
860 if not self._cacheable:
863 if not self._cacheable:
861 return True
864 return True
862
865
863 if self.cachestat != newstat:
866 if self.cachestat != newstat:
864 self.cachestat = newstat
867 self.cachestat = newstat
865 return True
868 return True
866 else:
869 else:
867 return False
870 return False
868
871
869 @staticmethod
872 @staticmethod
870 def stat(path):
873 def stat(path):
871 try:
874 try:
872 return util.cachestat(path)
875 return util.cachestat(path)
873 except OSError, e:
876 except OSError, e:
874 if e.errno != errno.ENOENT:
877 if e.errno != errno.ENOENT:
875 raise
878 raise
876
879
877 class filecache(object):
880 class filecache(object):
878 '''A property like decorator that tracks a file under .hg/ for updates.
881 '''A property like decorator that tracks a file under .hg/ for updates.
879
882
880 Records stat info when called in _filecache.
883 Records stat info when called in _filecache.
881
884
882 On subsequent calls, compares old stat info with new info, and recreates
885 On subsequent calls, compares old stat info with new info, and recreates
883 the object when needed, updating the new stat info in _filecache.
886 the object when needed, updating the new stat info in _filecache.
884
887
885 Mercurial either atomic renames or appends for files under .hg,
888 Mercurial either atomic renames or appends for files under .hg,
886 so to ensure the cache is reliable we need the filesystem to be able
889 so to ensure the cache is reliable we need the filesystem to be able
887 to tell us if a file has been replaced. If it can't, we fallback to
890 to tell us if a file has been replaced. If it can't, we fallback to
888 recreating the object on every call (essentially the same behaviour as
891 recreating the object on every call (essentially the same behaviour as
889 propertycache).'''
892 propertycache).'''
890 def __init__(self, path):
893 def __init__(self, path):
891 self.path = path
894 self.path = path
892
895
893 def join(self, obj, fname):
896 def join(self, obj, fname):
894 """Used to compute the runtime path of the cached file.
897 """Used to compute the runtime path of the cached file.
895
898
896 Users should subclass filecache and provide their own version of this
899 Users should subclass filecache and provide their own version of this
897 function to call the appropriate join function on 'obj' (an instance
900 function to call the appropriate join function on 'obj' (an instance
898 of the class that its member function was decorated).
901 of the class that its member function was decorated).
899 """
902 """
900 return obj.join(fname)
903 return obj.join(fname)
901
904
902 def __call__(self, func):
905 def __call__(self, func):
903 self.func = func
906 self.func = func
904 self.name = func.__name__
907 self.name = func.__name__
905 return self
908 return self
906
909
907 def __get__(self, obj, type=None):
910 def __get__(self, obj, type=None):
908 # do we need to check if the file changed?
911 # do we need to check if the file changed?
909 if self.name in obj.__dict__:
912 if self.name in obj.__dict__:
910 return obj.__dict__[self.name]
913 return obj.__dict__[self.name]
911
914
912 entry = obj._filecache.get(self.name)
915 entry = obj._filecache.get(self.name)
913
916
914 if entry:
917 if entry:
915 if entry.changed():
918 if entry.changed():
916 entry.obj = self.func(obj)
919 entry.obj = self.func(obj)
917 else:
920 else:
918 path = self.join(obj, self.path)
921 path = self.join(obj, self.path)
919
922
920 # We stat -before- creating the object so our cache doesn't lie if
923 # We stat -before- creating the object so our cache doesn't lie if
921 # a writer modified between the time we read and stat
924 # a writer modified between the time we read and stat
922 entry = filecacheentry(path)
925 entry = filecacheentry(path)
923 entry.obj = self.func(obj)
926 entry.obj = self.func(obj)
924
927
925 obj._filecache[self.name] = entry
928 obj._filecache[self.name] = entry
926
929
927 obj.__dict__[self.name] = entry.obj
930 obj.__dict__[self.name] = entry.obj
928 return entry.obj
931 return entry.obj
929
932
930 def __set__(self, obj, value):
933 def __set__(self, obj, value):
931 if self.name in obj._filecache:
934 if self.name in obj._filecache:
932 obj._filecache[self.name].obj = value # update cached copy
935 obj._filecache[self.name].obj = value # update cached copy
933 obj.__dict__[self.name] = value # update copy returned by obj.x
936 obj.__dict__[self.name] = value # update copy returned by obj.x
934
937
935 def __delete__(self, obj):
938 def __delete__(self, obj):
936 try:
939 try:
937 del obj.__dict__[self.name]
940 del obj.__dict__[self.name]
938 except KeyError:
941 except KeyError:
939 raise AttributeError, self.name
942 raise AttributeError, self.name
@@ -1,505 +1,505 b''
1 # store.py - repository store handling for Mercurial
1 # store.py - repository store handling for Mercurial
2 #
2 #
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 import osutil, scmutil, util, parsers
9 import osutil, scmutil, util, parsers
10 import os, stat, errno
10 import os, stat, errno
11
11
12 _sha = util.sha1
12 _sha = util.sha1
13
13
14 # This avoids a collision between a file named foo and a dir named
14 # This avoids a collision between a file named foo and a dir named
15 # foo.i or foo.d
15 # foo.i or foo.d
16 def _encodedir(path):
16 def _encodedir(path):
17 '''
17 '''
18 >>> _encodedir('data/foo.i')
18 >>> _encodedir('data/foo.i')
19 'data/foo.i'
19 'data/foo.i'
20 >>> _encodedir('data/foo.i/bla.i')
20 >>> _encodedir('data/foo.i/bla.i')
21 'data/foo.i.hg/bla.i'
21 'data/foo.i.hg/bla.i'
22 >>> _encodedir('data/foo.i.hg/bla.i')
22 >>> _encodedir('data/foo.i.hg/bla.i')
23 'data/foo.i.hg.hg/bla.i'
23 'data/foo.i.hg.hg/bla.i'
24 >>> _encodedir('data/foo.i\\ndata/foo.i/bla.i\\ndata/foo.i.hg/bla.i\\n')
24 >>> _encodedir('data/foo.i\\ndata/foo.i/bla.i\\ndata/foo.i.hg/bla.i\\n')
25 'data/foo.i\\ndata/foo.i.hg/bla.i\\ndata/foo.i.hg.hg/bla.i\\n'
25 'data/foo.i\\ndata/foo.i.hg/bla.i\\ndata/foo.i.hg.hg/bla.i\\n'
26 '''
26 '''
27 return (path
27 return (path
28 .replace(".hg/", ".hg.hg/")
28 .replace(".hg/", ".hg.hg/")
29 .replace(".i/", ".i.hg/")
29 .replace(".i/", ".i.hg/")
30 .replace(".d/", ".d.hg/"))
30 .replace(".d/", ".d.hg/"))
31
31
32 encodedir = getattr(parsers, 'encodedir', _encodedir)
32 encodedir = getattr(parsers, 'encodedir', _encodedir)
33
33
34 def decodedir(path):
34 def decodedir(path):
35 '''
35 '''
36 >>> decodedir('data/foo.i')
36 >>> decodedir('data/foo.i')
37 'data/foo.i'
37 'data/foo.i'
38 >>> decodedir('data/foo.i.hg/bla.i')
38 >>> decodedir('data/foo.i.hg/bla.i')
39 'data/foo.i/bla.i'
39 'data/foo.i/bla.i'
40 >>> decodedir('data/foo.i.hg.hg/bla.i')
40 >>> decodedir('data/foo.i.hg.hg/bla.i')
41 'data/foo.i.hg/bla.i'
41 'data/foo.i.hg/bla.i'
42 '''
42 '''
43 if ".hg/" not in path:
43 if ".hg/" not in path:
44 return path
44 return path
45 return (path
45 return (path
46 .replace(".d.hg/", ".d/")
46 .replace(".d.hg/", ".d/")
47 .replace(".i.hg/", ".i/")
47 .replace(".i.hg/", ".i/")
48 .replace(".hg.hg/", ".hg/"))
48 .replace(".hg.hg/", ".hg/"))
49
49
50 def _buildencodefun():
50 def _buildencodefun():
51 '''
51 '''
52 >>> enc, dec = _buildencodefun()
52 >>> enc, dec = _buildencodefun()
53
53
54 >>> enc('nothing/special.txt')
54 >>> enc('nothing/special.txt')
55 'nothing/special.txt'
55 'nothing/special.txt'
56 >>> dec('nothing/special.txt')
56 >>> dec('nothing/special.txt')
57 'nothing/special.txt'
57 'nothing/special.txt'
58
58
59 >>> enc('HELLO')
59 >>> enc('HELLO')
60 '_h_e_l_l_o'
60 '_h_e_l_l_o'
61 >>> dec('_h_e_l_l_o')
61 >>> dec('_h_e_l_l_o')
62 'HELLO'
62 'HELLO'
63
63
64 >>> enc('hello:world?')
64 >>> enc('hello:world?')
65 'hello~3aworld~3f'
65 'hello~3aworld~3f'
66 >>> dec('hello~3aworld~3f')
66 >>> dec('hello~3aworld~3f')
67 'hello:world?'
67 'hello:world?'
68
68
69 >>> enc('the\x07quick\xADshot')
69 >>> enc('the\x07quick\xADshot')
70 'the~07quick~adshot'
70 'the~07quick~adshot'
71 >>> dec('the~07quick~adshot')
71 >>> dec('the~07quick~adshot')
72 'the\\x07quick\\xadshot'
72 'the\\x07quick\\xadshot'
73 '''
73 '''
74 e = '_'
74 e = '_'
75 winreserved = [ord(x) for x in '\\:*?"<>|']
75 winreserved = [ord(x) for x in '\\:*?"<>|']
76 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
76 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
77 for x in (range(32) + range(126, 256) + winreserved):
77 for x in (range(32) + range(126, 256) + winreserved):
78 cmap[chr(x)] = "~%02x" % x
78 cmap[chr(x)] = "~%02x" % x
79 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
79 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
80 cmap[chr(x)] = e + chr(x).lower()
80 cmap[chr(x)] = e + chr(x).lower()
81 dmap = {}
81 dmap = {}
82 for k, v in cmap.iteritems():
82 for k, v in cmap.iteritems():
83 dmap[v] = k
83 dmap[v] = k
84 def decode(s):
84 def decode(s):
85 i = 0
85 i = 0
86 while i < len(s):
86 while i < len(s):
87 for l in xrange(1, 4):
87 for l in xrange(1, 4):
88 try:
88 try:
89 yield dmap[s[i:i + l]]
89 yield dmap[s[i:i + l]]
90 i += l
90 i += l
91 break
91 break
92 except KeyError:
92 except KeyError:
93 pass
93 pass
94 else:
94 else:
95 raise KeyError
95 raise KeyError
96 return (lambda s: ''.join([cmap[c] for c in s]),
96 return (lambda s: ''.join([cmap[c] for c in s]),
97 lambda s: ''.join(list(decode(s))))
97 lambda s: ''.join(list(decode(s))))
98
98
99 _encodefname, _decodefname = _buildencodefun()
99 _encodefname, _decodefname = _buildencodefun()
100
100
101 def encodefilename(s):
101 def encodefilename(s):
102 '''
102 '''
103 >>> encodefilename('foo.i/bar.d/bla.hg/hi:world?/HELLO')
103 >>> encodefilename('foo.i/bar.d/bla.hg/hi:world?/HELLO')
104 'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o'
104 'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o'
105 '''
105 '''
106 return _encodefname(encodedir(s))
106 return _encodefname(encodedir(s))
107
107
108 def decodefilename(s):
108 def decodefilename(s):
109 '''
109 '''
110 >>> decodefilename('foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o')
110 >>> decodefilename('foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o')
111 'foo.i/bar.d/bla.hg/hi:world?/HELLO'
111 'foo.i/bar.d/bla.hg/hi:world?/HELLO'
112 '''
112 '''
113 return decodedir(_decodefname(s))
113 return decodedir(_decodefname(s))
114
114
115 def _buildlowerencodefun():
115 def _buildlowerencodefun():
116 '''
116 '''
117 >>> f = _buildlowerencodefun()
117 >>> f = _buildlowerencodefun()
118 >>> f('nothing/special.txt')
118 >>> f('nothing/special.txt')
119 'nothing/special.txt'
119 'nothing/special.txt'
120 >>> f('HELLO')
120 >>> f('HELLO')
121 'hello'
121 'hello'
122 >>> f('hello:world?')
122 >>> f('hello:world?')
123 'hello~3aworld~3f'
123 'hello~3aworld~3f'
124 >>> f('the\x07quick\xADshot')
124 >>> f('the\x07quick\xADshot')
125 'the~07quick~adshot'
125 'the~07quick~adshot'
126 '''
126 '''
127 winreserved = [ord(x) for x in '\\:*?"<>|']
127 winreserved = [ord(x) for x in '\\:*?"<>|']
128 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
128 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
129 for x in (range(32) + range(126, 256) + winreserved):
129 for x in (range(32) + range(126, 256) + winreserved):
130 cmap[chr(x)] = "~%02x" % x
130 cmap[chr(x)] = "~%02x" % x
131 for x in range(ord("A"), ord("Z")+1):
131 for x in range(ord("A"), ord("Z")+1):
132 cmap[chr(x)] = chr(x).lower()
132 cmap[chr(x)] = chr(x).lower()
133 return lambda s: "".join([cmap[c] for c in s])
133 return lambda s: "".join([cmap[c] for c in s])
134
134
135 lowerencode = _buildlowerencodefun()
135 lowerencode = _buildlowerencodefun()
136
136
137 # Windows reserved names: con, prn, aux, nul, com1..com9, lpt1..lpt9
137 # Windows reserved names: con, prn, aux, nul, com1..com9, lpt1..lpt9
138 _winres3 = ('aux', 'con', 'prn', 'nul') # length 3
138 _winres3 = ('aux', 'con', 'prn', 'nul') # length 3
139 _winres4 = ('com', 'lpt') # length 4 (with trailing 1..9)
139 _winres4 = ('com', 'lpt') # length 4 (with trailing 1..9)
140 def _auxencode(path, dotencode):
140 def _auxencode(path, dotencode):
141 '''
141 '''
142 Encodes filenames containing names reserved by Windows or which end in
142 Encodes filenames containing names reserved by Windows or which end in
143 period or space. Does not touch other single reserved characters c.
143 period or space. Does not touch other single reserved characters c.
144 Specifically, c in '\\:*?"<>|' or ord(c) <= 31 are *not* encoded here.
144 Specifically, c in '\\:*?"<>|' or ord(c) <= 31 are *not* encoded here.
145 Additionally encodes space or period at the beginning, if dotencode is
145 Additionally encodes space or period at the beginning, if dotencode is
146 True. Parameter path is assumed to be all lowercase.
146 True. Parameter path is assumed to be all lowercase.
147 A segment only needs encoding if a reserved name appears as a
147 A segment only needs encoding if a reserved name appears as a
148 basename (e.g. "aux", "aux.foo"). A directory or file named "foo.aux"
148 basename (e.g. "aux", "aux.foo"). A directory or file named "foo.aux"
149 doesn't need encoding.
149 doesn't need encoding.
150
150
151 >>> s = '.foo/aux.txt/txt.aux/con/prn/nul/foo.'
151 >>> s = '.foo/aux.txt/txt.aux/con/prn/nul/foo.'
152 >>> _auxencode(s.split('/'), True)
152 >>> _auxencode(s.split('/'), True)
153 ['~2efoo', 'au~78.txt', 'txt.aux', 'co~6e', 'pr~6e', 'nu~6c', 'foo~2e']
153 ['~2efoo', 'au~78.txt', 'txt.aux', 'co~6e', 'pr~6e', 'nu~6c', 'foo~2e']
154 >>> s = '.com1com2/lpt9.lpt4.lpt1/conprn/com0/lpt0/foo.'
154 >>> s = '.com1com2/lpt9.lpt4.lpt1/conprn/com0/lpt0/foo.'
155 >>> _auxencode(s.split('/'), False)
155 >>> _auxencode(s.split('/'), False)
156 ['.com1com2', 'lp~749.lpt4.lpt1', 'conprn', 'com0', 'lpt0', 'foo~2e']
156 ['.com1com2', 'lp~749.lpt4.lpt1', 'conprn', 'com0', 'lpt0', 'foo~2e']
157 >>> _auxencode(['foo. '], True)
157 >>> _auxencode(['foo. '], True)
158 ['foo.~20']
158 ['foo.~20']
159 >>> _auxencode([' .foo'], True)
159 >>> _auxencode([' .foo'], True)
160 ['~20.foo']
160 ['~20.foo']
161 '''
161 '''
162 for i, n in enumerate(path):
162 for i, n in enumerate(path):
163 if not n:
163 if not n:
164 continue
164 continue
165 if dotencode and n[0] in '. ':
165 if dotencode and n[0] in '. ':
166 n = "~%02x" % ord(n[0]) + n[1:]
166 n = "~%02x" % ord(n[0]) + n[1:]
167 path[i] = n
167 path[i] = n
168 else:
168 else:
169 l = n.find('.')
169 l = n.find('.')
170 if l == -1:
170 if l == -1:
171 l = len(n)
171 l = len(n)
172 if ((l == 3 and n[:3] in _winres3) or
172 if ((l == 3 and n[:3] in _winres3) or
173 (l == 4 and n[3] <= '9' and n[3] >= '1'
173 (l == 4 and n[3] <= '9' and n[3] >= '1'
174 and n[:3] in _winres4)):
174 and n[:3] in _winres4)):
175 # encode third letter ('aux' -> 'au~78')
175 # encode third letter ('aux' -> 'au~78')
176 ec = "~%02x" % ord(n[2])
176 ec = "~%02x" % ord(n[2])
177 n = n[0:2] + ec + n[3:]
177 n = n[0:2] + ec + n[3:]
178 path[i] = n
178 path[i] = n
179 if n[-1] in '. ':
179 if n[-1] in '. ':
180 # encode last period or space ('foo...' -> 'foo..~2e')
180 # encode last period or space ('foo...' -> 'foo..~2e')
181 path[i] = n[:-1] + "~%02x" % ord(n[-1])
181 path[i] = n[:-1] + "~%02x" % ord(n[-1])
182 return path
182 return path
183
183
184 _maxstorepathlen = 120
184 _maxstorepathlen = 120
185 _dirprefixlen = 8
185 _dirprefixlen = 8
186 _maxshortdirslen = 8 * (_dirprefixlen + 1) - 4
186 _maxshortdirslen = 8 * (_dirprefixlen + 1) - 4
187
187
188 def _hashencode(path, dotencode):
188 def _hashencode(path, dotencode):
189 digest = _sha(path).hexdigest()
189 digest = _sha(path).hexdigest()
190 le = lowerencode(path).split('/')[1:]
190 le = lowerencode(path).split('/')[1:]
191 parts = _auxencode(le, dotencode)
191 parts = _auxencode(le, dotencode)
192 basename = parts[-1]
192 basename = parts[-1]
193 _root, ext = os.path.splitext(basename)
193 _root, ext = os.path.splitext(basename)
194 sdirs = []
194 sdirs = []
195 sdirslen = 0
195 sdirslen = 0
196 for p in parts[:-1]:
196 for p in parts[:-1]:
197 d = p[:_dirprefixlen]
197 d = p[:_dirprefixlen]
198 if d[-1] in '. ':
198 if d[-1] in '. ':
199 # Windows can't access dirs ending in period or space
199 # Windows can't access dirs ending in period or space
200 d = d[:-1] + '_'
200 d = d[:-1] + '_'
201 if sdirslen == 0:
201 if sdirslen == 0:
202 t = len(d)
202 t = len(d)
203 else:
203 else:
204 t = sdirslen + 1 + len(d)
204 t = sdirslen + 1 + len(d)
205 if t > _maxshortdirslen:
205 if t > _maxshortdirslen:
206 break
206 break
207 sdirs.append(d)
207 sdirs.append(d)
208 sdirslen = t
208 sdirslen = t
209 dirs = '/'.join(sdirs)
209 dirs = '/'.join(sdirs)
210 if len(dirs) > 0:
210 if len(dirs) > 0:
211 dirs += '/'
211 dirs += '/'
212 res = 'dh/' + dirs + digest + ext
212 res = 'dh/' + dirs + digest + ext
213 spaceleft = _maxstorepathlen - len(res)
213 spaceleft = _maxstorepathlen - len(res)
214 if spaceleft > 0:
214 if spaceleft > 0:
215 filler = basename[:spaceleft]
215 filler = basename[:spaceleft]
216 res = 'dh/' + dirs + filler + digest + ext
216 res = 'dh/' + dirs + filler + digest + ext
217 return res
217 return res
218
218
219 def _hybridencode(path, dotencode):
219 def _hybridencode(path, dotencode):
220 '''encodes path with a length limit
220 '''encodes path with a length limit
221
221
222 Encodes all paths that begin with 'data/', according to the following.
222 Encodes all paths that begin with 'data/', according to the following.
223
223
224 Default encoding (reversible):
224 Default encoding (reversible):
225
225
226 Encodes all uppercase letters 'X' as '_x'. All reserved or illegal
226 Encodes all uppercase letters 'X' as '_x'. All reserved or illegal
227 characters are encoded as '~xx', where xx is the two digit hex code
227 characters are encoded as '~xx', where xx is the two digit hex code
228 of the character (see encodefilename).
228 of the character (see encodefilename).
229 Relevant path components consisting of Windows reserved filenames are
229 Relevant path components consisting of Windows reserved filenames are
230 masked by encoding the third character ('aux' -> 'au~78', see auxencode).
230 masked by encoding the third character ('aux' -> 'au~78', see auxencode).
231
231
232 Hashed encoding (not reversible):
232 Hashed encoding (not reversible):
233
233
234 If the default-encoded path is longer than _maxstorepathlen, a
234 If the default-encoded path is longer than _maxstorepathlen, a
235 non-reversible hybrid hashing of the path is done instead.
235 non-reversible hybrid hashing of the path is done instead.
236 This encoding uses up to _dirprefixlen characters of all directory
236 This encoding uses up to _dirprefixlen characters of all directory
237 levels of the lowerencoded path, but not more levels than can fit into
237 levels of the lowerencoded path, but not more levels than can fit into
238 _maxshortdirslen.
238 _maxshortdirslen.
239 Then follows the filler followed by the sha digest of the full path.
239 Then follows the filler followed by the sha digest of the full path.
240 The filler is the beginning of the basename of the lowerencoded path
240 The filler is the beginning of the basename of the lowerencoded path
241 (the basename is everything after the last path separator). The filler
241 (the basename is everything after the last path separator). The filler
242 is as long as possible, filling in characters from the basename until
242 is as long as possible, filling in characters from the basename until
243 the encoded path has _maxstorepathlen characters (or all chars of the
243 the encoded path has _maxstorepathlen characters (or all chars of the
244 basename have been taken).
244 basename have been taken).
245 The extension (e.g. '.i' or '.d') is preserved.
245 The extension (e.g. '.i' or '.d') is preserved.
246
246
247 The string 'data/' at the beginning is replaced with 'dh/', if the hashed
247 The string 'data/' at the beginning is replaced with 'dh/', if the hashed
248 encoding was used.
248 encoding was used.
249 '''
249 '''
250 path = encodedir(path)
250 path = encodedir(path)
251 ef = _encodefname(path).split('/')
251 ef = _encodefname(path).split('/')
252 res = '/'.join(_auxencode(ef, dotencode))
252 res = '/'.join(_auxencode(ef, dotencode))
253 if len(res) > _maxstorepathlen:
253 if len(res) > _maxstorepathlen:
254 res = _hashencode(path, dotencode)
254 res = _hashencode(path, dotencode)
255 return res
255 return res
256
256
257 def _pathencode(path):
257 def _pathencode(path):
258 if len(path) > _maxstorepathlen:
258 if len(path) > _maxstorepathlen:
259 return None
259 return None
260 ef = _encodefname(encodedir(path)).split('/')
260 ef = _encodefname(encodedir(path)).split('/')
261 res = '/'.join(_auxencode(ef, True))
261 res = '/'.join(_auxencode(ef, True))
262 if len(res) > _maxstorepathlen:
262 if len(res) > _maxstorepathlen:
263 return None
263 return None
264 return res
264 return res
265
265
266 _pathencode = getattr(parsers, 'pathencode', _pathencode)
266 _pathencode = getattr(parsers, 'pathencode', _pathencode)
267
267
268 def _dothybridencode(f):
268 def _dothybridencode(f):
269 ef = _pathencode(f)
269 ef = _pathencode(f)
270 if ef is None:
270 if ef is None:
271 return _hashencode(encodedir(f), True)
271 return _hashencode(encodedir(f), True)
272 return ef
272 return ef
273
273
274 def _plainhybridencode(f):
274 def _plainhybridencode(f):
275 return _hybridencode(f, False)
275 return _hybridencode(f, False)
276
276
277 def _calcmode(path):
277 def _calcmode(vfs):
278 try:
278 try:
279 # files in .hg/ will be created using this mode
279 # files in .hg/ will be created using this mode
280 mode = os.stat(path).st_mode
280 mode = vfs.stat().st_mode
281 # avoid some useless chmods
281 # avoid some useless chmods
282 if (0777 & ~util.umask) == (0777 & mode):
282 if (0777 & ~util.umask) == (0777 & mode):
283 mode = None
283 mode = None
284 except OSError:
284 except OSError:
285 mode = None
285 mode = None
286 return mode
286 return mode
287
287
288 _data = ('data 00manifest.d 00manifest.i 00changelog.d 00changelog.i'
288 _data = ('data 00manifest.d 00manifest.i 00changelog.d 00changelog.i'
289 ' phaseroots obsstore')
289 ' phaseroots obsstore')
290
290
291 class basicstore(object):
291 class basicstore(object):
292 '''base class for local repository stores'''
292 '''base class for local repository stores'''
293 def __init__(self, path, vfstype):
293 def __init__(self, path, vfstype):
294 vfs = vfstype(path)
294 vfs = vfstype(path)
295 self.path = vfs.base
295 self.path = vfs.base
296 self.createmode = _calcmode(path)
296 self.createmode = _calcmode(vfs)
297 vfs.createmode = self.createmode
297 vfs.createmode = self.createmode
298 self.vfs = scmutil.filtervfs(vfs, encodedir)
298 self.vfs = scmutil.filtervfs(vfs, encodedir)
299 self.opener = self.vfs
299 self.opener = self.vfs
300
300
301 def join(self, f):
301 def join(self, f):
302 return self.path + '/' + encodedir(f)
302 return self.path + '/' + encodedir(f)
303
303
304 def _walk(self, relpath, recurse):
304 def _walk(self, relpath, recurse):
305 '''yields (unencoded, encoded, size)'''
305 '''yields (unencoded, encoded, size)'''
306 path = self.path
306 path = self.path
307 if relpath:
307 if relpath:
308 path += '/' + relpath
308 path += '/' + relpath
309 striplen = len(self.path) + 1
309 striplen = len(self.path) + 1
310 l = []
310 l = []
311 if os.path.isdir(path):
311 if os.path.isdir(path):
312 visit = [path]
312 visit = [path]
313 while visit:
313 while visit:
314 p = visit.pop()
314 p = visit.pop()
315 for f, kind, st in osutil.listdir(p, stat=True):
315 for f, kind, st in osutil.listdir(p, stat=True):
316 fp = p + '/' + f
316 fp = p + '/' + f
317 if kind == stat.S_IFREG and f[-2:] in ('.d', '.i'):
317 if kind == stat.S_IFREG and f[-2:] in ('.d', '.i'):
318 n = util.pconvert(fp[striplen:])
318 n = util.pconvert(fp[striplen:])
319 l.append((decodedir(n), n, st.st_size))
319 l.append((decodedir(n), n, st.st_size))
320 elif kind == stat.S_IFDIR and recurse:
320 elif kind == stat.S_IFDIR and recurse:
321 visit.append(fp)
321 visit.append(fp)
322 l.sort()
322 l.sort()
323 return l
323 return l
324
324
325 def datafiles(self):
325 def datafiles(self):
326 return self._walk('data', True)
326 return self._walk('data', True)
327
327
328 def walk(self):
328 def walk(self):
329 '''yields (unencoded, encoded, size)'''
329 '''yields (unencoded, encoded, size)'''
330 # yield data files first
330 # yield data files first
331 for x in self.datafiles():
331 for x in self.datafiles():
332 yield x
332 yield x
333 # yield manifest before changelog
333 # yield manifest before changelog
334 for x in reversed(self._walk('', False)):
334 for x in reversed(self._walk('', False)):
335 yield x
335 yield x
336
336
337 def copylist(self):
337 def copylist(self):
338 return ['requires'] + _data.split()
338 return ['requires'] + _data.split()
339
339
340 def write(self):
340 def write(self):
341 pass
341 pass
342
342
343 class encodedstore(basicstore):
343 class encodedstore(basicstore):
344 def __init__(self, path, vfstype):
344 def __init__(self, path, vfstype):
345 vfs = vfstype(path + '/store')
345 vfs = vfstype(path + '/store')
346 self.path = vfs.base
346 self.path = vfs.base
347 self.createmode = _calcmode(self.path)
347 self.createmode = _calcmode(vfs)
348 vfs.createmode = self.createmode
348 vfs.createmode = self.createmode
349 self.vfs = scmutil.filtervfs(vfs, encodefilename)
349 self.vfs = scmutil.filtervfs(vfs, encodefilename)
350 self.opener = self.vfs
350 self.opener = self.vfs
351
351
352 def datafiles(self):
352 def datafiles(self):
353 for a, b, size in self._walk('data', True):
353 for a, b, size in self._walk('data', True):
354 try:
354 try:
355 a = decodefilename(a)
355 a = decodefilename(a)
356 except KeyError:
356 except KeyError:
357 a = None
357 a = None
358 yield a, b, size
358 yield a, b, size
359
359
360 def join(self, f):
360 def join(self, f):
361 return self.path + '/' + encodefilename(f)
361 return self.path + '/' + encodefilename(f)
362
362
363 def copylist(self):
363 def copylist(self):
364 return (['requires', '00changelog.i'] +
364 return (['requires', '00changelog.i'] +
365 ['store/' + f for f in _data.split()])
365 ['store/' + f for f in _data.split()])
366
366
367 class fncache(object):
367 class fncache(object):
368 # the filename used to be partially encoded
368 # the filename used to be partially encoded
369 # hence the encodedir/decodedir dance
369 # hence the encodedir/decodedir dance
370 def __init__(self, vfs):
370 def __init__(self, vfs):
371 self.vfs = vfs
371 self.vfs = vfs
372 self.entries = None
372 self.entries = None
373 self._dirty = False
373 self._dirty = False
374
374
375 def _load(self):
375 def _load(self):
376 '''fill the entries from the fncache file'''
376 '''fill the entries from the fncache file'''
377 self._dirty = False
377 self._dirty = False
378 try:
378 try:
379 fp = self.vfs('fncache', mode='rb')
379 fp = self.vfs('fncache', mode='rb')
380 except IOError:
380 except IOError:
381 # skip nonexistent file
381 # skip nonexistent file
382 self.entries = set()
382 self.entries = set()
383 return
383 return
384 self.entries = set(decodedir(fp.read()).splitlines())
384 self.entries = set(decodedir(fp.read()).splitlines())
385 if '' in self.entries:
385 if '' in self.entries:
386 fp.seek(0)
386 fp.seek(0)
387 for n, line in enumerate(fp):
387 for n, line in enumerate(fp):
388 if not line.rstrip('\n'):
388 if not line.rstrip('\n'):
389 t = _('invalid entry in fncache, line %s') % (n + 1)
389 t = _('invalid entry in fncache, line %s') % (n + 1)
390 raise util.Abort(t)
390 raise util.Abort(t)
391 fp.close()
391 fp.close()
392
392
393 def _write(self, files, atomictemp):
393 def _write(self, files, atomictemp):
394 fp = self.vfs('fncache', mode='wb', atomictemp=atomictemp)
394 fp = self.vfs('fncache', mode='wb', atomictemp=atomictemp)
395 if files:
395 if files:
396 fp.write(encodedir('\n'.join(files) + '\n'))
396 fp.write(encodedir('\n'.join(files) + '\n'))
397 fp.close()
397 fp.close()
398 self._dirty = False
398 self._dirty = False
399
399
400 def rewrite(self, files):
400 def rewrite(self, files):
401 self._write(files, False)
401 self._write(files, False)
402 self.entries = set(files)
402 self.entries = set(files)
403
403
404 def write(self):
404 def write(self):
405 if self._dirty:
405 if self._dirty:
406 self._write(self.entries, True)
406 self._write(self.entries, True)
407
407
408 def add(self, fn):
408 def add(self, fn):
409 if self.entries is None:
409 if self.entries is None:
410 self._load()
410 self._load()
411 if fn not in self.entries:
411 if fn not in self.entries:
412 self._dirty = True
412 self._dirty = True
413 self.entries.add(fn)
413 self.entries.add(fn)
414
414
415 def __contains__(self, fn):
415 def __contains__(self, fn):
416 if self.entries is None:
416 if self.entries is None:
417 self._load()
417 self._load()
418 return fn in self.entries
418 return fn in self.entries
419
419
420 def __iter__(self):
420 def __iter__(self):
421 if self.entries is None:
421 if self.entries is None:
422 self._load()
422 self._load()
423 return iter(self.entries)
423 return iter(self.entries)
424
424
425 class _fncachevfs(scmutil.abstractvfs):
425 class _fncachevfs(scmutil.abstractvfs):
426 def __init__(self, vfs, fnc, encode):
426 def __init__(self, vfs, fnc, encode):
427 self.vfs = vfs
427 self.vfs = vfs
428 self.fncache = fnc
428 self.fncache = fnc
429 self.encode = encode
429 self.encode = encode
430
430
431 def _getmustaudit(self):
431 def _getmustaudit(self):
432 return self.vfs.mustaudit
432 return self.vfs.mustaudit
433
433
434 def _setmustaudit(self, onoff):
434 def _setmustaudit(self, onoff):
435 self.vfs.mustaudit = onoff
435 self.vfs.mustaudit = onoff
436
436
437 mustaudit = property(_getmustaudit, _setmustaudit)
437 mustaudit = property(_getmustaudit, _setmustaudit)
438
438
439 def __call__(self, path, mode='r', *args, **kw):
439 def __call__(self, path, mode='r', *args, **kw):
440 if mode not in ('r', 'rb') and path.startswith('data/'):
440 if mode not in ('r', 'rb') and path.startswith('data/'):
441 self.fncache.add(path)
441 self.fncache.add(path)
442 return self.vfs(self.encode(path), mode, *args, **kw)
442 return self.vfs(self.encode(path), mode, *args, **kw)
443
443
444 def join(self, path):
444 def join(self, path):
445 if path:
445 if path:
446 return self.vfs.join(self.encode(path))
446 return self.vfs.join(self.encode(path))
447 else:
447 else:
448 return self.vfs.join(path)
448 return self.vfs.join(path)
449
449
450 class fncachestore(basicstore):
450 class fncachestore(basicstore):
451 def __init__(self, path, vfstype, dotencode):
451 def __init__(self, path, vfstype, dotencode):
452 if dotencode:
452 if dotencode:
453 encode = _dothybridencode
453 encode = _dothybridencode
454 else:
454 else:
455 encode = _plainhybridencode
455 encode = _plainhybridencode
456 self.encode = encode
456 self.encode = encode
457 vfs = vfstype(path + '/store')
457 vfs = vfstype(path + '/store')
458 self.path = vfs.base
458 self.path = vfs.base
459 self.pathsep = self.path + '/'
459 self.pathsep = self.path + '/'
460 self.createmode = _calcmode(self.path)
460 self.createmode = _calcmode(vfs)
461 vfs.createmode = self.createmode
461 vfs.createmode = self.createmode
462 fnc = fncache(vfs)
462 fnc = fncache(vfs)
463 self.fncache = fnc
463 self.fncache = fnc
464 self.vfs = _fncachevfs(vfs, fnc, encode)
464 self.vfs = _fncachevfs(vfs, fnc, encode)
465 self.opener = self.vfs
465 self.opener = self.vfs
466
466
467 def join(self, f):
467 def join(self, f):
468 return self.pathsep + self.encode(f)
468 return self.pathsep + self.encode(f)
469
469
470 def getsize(self, path):
470 def getsize(self, path):
471 return os.stat(self.pathsep + path).st_size
471 return os.stat(self.pathsep + path).st_size
472
472
473 def datafiles(self):
473 def datafiles(self):
474 rewrite = False
474 rewrite = False
475 existing = []
475 existing = []
476 for f in sorted(self.fncache):
476 for f in sorted(self.fncache):
477 ef = self.encode(f)
477 ef = self.encode(f)
478 try:
478 try:
479 yield f, ef, self.getsize(ef)
479 yield f, ef, self.getsize(ef)
480 existing.append(f)
480 existing.append(f)
481 except OSError, err:
481 except OSError, err:
482 if err.errno != errno.ENOENT:
482 if err.errno != errno.ENOENT:
483 raise
483 raise
484 # nonexistent entry
484 # nonexistent entry
485 rewrite = True
485 rewrite = True
486 if rewrite:
486 if rewrite:
487 # rewrite fncache to remove nonexistent entries
487 # rewrite fncache to remove nonexistent entries
488 # (may be caused by rollback / strip)
488 # (may be caused by rollback / strip)
489 self.fncache.rewrite(existing)
489 self.fncache.rewrite(existing)
490
490
491 def copylist(self):
491 def copylist(self):
492 d = ('data dh fncache phaseroots obsstore'
492 d = ('data dh fncache phaseroots obsstore'
493 ' 00manifest.d 00manifest.i 00changelog.d 00changelog.i')
493 ' 00manifest.d 00manifest.i 00changelog.d 00changelog.i')
494 return (['requires', '00changelog.i'] +
494 return (['requires', '00changelog.i'] +
495 ['store/' + f for f in d.split()])
495 ['store/' + f for f in d.split()])
496
496
497 def write(self):
497 def write(self):
498 self.fncache.write()
498 self.fncache.write()
499
499
500 def store(requirements, path, vfstype):
500 def store(requirements, path, vfstype):
501 if 'store' in requirements:
501 if 'store' in requirements:
502 if 'fncache' in requirements:
502 if 'fncache' in requirements:
503 return fncachestore(path, vfstype, 'dotencode' in requirements)
503 return fncachestore(path, vfstype, 'dotencode' in requirements)
504 return encodedstore(path, vfstype)
504 return encodedstore(path, vfstype)
505 return basicstore(path, vfstype)
505 return basicstore(path, vfstype)
General Comments 0
You need to be logged in to leave comments. Login now