##// END OF EJS Templates
devel-warn: add a prefix to all messages ("devel-warn: ")...
Pierre-Yves David -
r24755:cd89f4e6 default
parent child Browse files
Show More
@@ -1,1159 +1,1160
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 from mercurial.node import nullrev
9 from mercurial.node import nullrev
10 import util, error, osutil, revset, similar, encoding, phases
10 import util, error, osutil, revset, similar, encoding, phases
11 import pathutil
11 import pathutil
12 import match as matchmod
12 import match as matchmod
13 import os, errno, re, glob, tempfile, shutil, stat, inspect
13 import os, errno, re, glob, tempfile, shutil, stat, inspect
14
14
15 if os.name == 'nt':
15 if os.name == 'nt':
16 import scmwindows as scmplatform
16 import scmwindows as scmplatform
17 else:
17 else:
18 import scmposix as scmplatform
18 import scmposix as scmplatform
19
19
20 systemrcpath = scmplatform.systemrcpath
20 systemrcpath = scmplatform.systemrcpath
21 userrcpath = scmplatform.userrcpath
21 userrcpath = scmplatform.userrcpath
22
22
23 class status(tuple):
23 class status(tuple):
24 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
24 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
25 and 'ignored' properties are only relevant to the working copy.
25 and 'ignored' properties are only relevant to the working copy.
26 '''
26 '''
27
27
28 __slots__ = ()
28 __slots__ = ()
29
29
30 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
30 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
31 clean):
31 clean):
32 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
32 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
33 ignored, clean))
33 ignored, clean))
34
34
35 @property
35 @property
36 def modified(self):
36 def modified(self):
37 '''files that have been modified'''
37 '''files that have been modified'''
38 return self[0]
38 return self[0]
39
39
40 @property
40 @property
41 def added(self):
41 def added(self):
42 '''files that have been added'''
42 '''files that have been added'''
43 return self[1]
43 return self[1]
44
44
45 @property
45 @property
46 def removed(self):
46 def removed(self):
47 '''files that have been removed'''
47 '''files that have been removed'''
48 return self[2]
48 return self[2]
49
49
50 @property
50 @property
51 def deleted(self):
51 def deleted(self):
52 '''files that are in the dirstate, but have been deleted from the
52 '''files that are in the dirstate, but have been deleted from the
53 working copy (aka "missing")
53 working copy (aka "missing")
54 '''
54 '''
55 return self[3]
55 return self[3]
56
56
57 @property
57 @property
58 def unknown(self):
58 def unknown(self):
59 '''files not in the dirstate that are not ignored'''
59 '''files not in the dirstate that are not ignored'''
60 return self[4]
60 return self[4]
61
61
62 @property
62 @property
63 def ignored(self):
63 def ignored(self):
64 '''files not in the dirstate that are ignored (by _dirignore())'''
64 '''files not in the dirstate that are ignored (by _dirignore())'''
65 return self[5]
65 return self[5]
66
66
67 @property
67 @property
68 def clean(self):
68 def clean(self):
69 '''files that have not been modified'''
69 '''files that have not been modified'''
70 return self[6]
70 return self[6]
71
71
72 def __repr__(self, *args, **kwargs):
72 def __repr__(self, *args, **kwargs):
73 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
73 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
74 'unknown=%r, ignored=%r, clean=%r>') % self)
74 'unknown=%r, ignored=%r, clean=%r>') % self)
75
75
76 def itersubrepos(ctx1, ctx2):
76 def itersubrepos(ctx1, ctx2):
77 """find subrepos in ctx1 or ctx2"""
77 """find subrepos in ctx1 or ctx2"""
78 # Create a (subpath, ctx) mapping where we prefer subpaths from
78 # Create a (subpath, ctx) mapping where we prefer subpaths from
79 # ctx1. The subpaths from ctx2 are important when the .hgsub file
79 # ctx1. The subpaths from ctx2 are important when the .hgsub file
80 # has been modified (in ctx2) but not yet committed (in ctx1).
80 # has been modified (in ctx2) but not yet committed (in ctx1).
81 subpaths = dict.fromkeys(ctx2.substate, ctx2)
81 subpaths = dict.fromkeys(ctx2.substate, ctx2)
82 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
82 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
83 for subpath, ctx in sorted(subpaths.iteritems()):
83 for subpath, ctx in sorted(subpaths.iteritems()):
84 yield subpath, ctx.sub(subpath)
84 yield subpath, ctx.sub(subpath)
85
85
86 def nochangesfound(ui, repo, excluded=None):
86 def nochangesfound(ui, repo, excluded=None):
87 '''Report no changes for push/pull, excluded is None or a list of
87 '''Report no changes for push/pull, excluded is None or a list of
88 nodes excluded from the push/pull.
88 nodes excluded from the push/pull.
89 '''
89 '''
90 secretlist = []
90 secretlist = []
91 if excluded:
91 if excluded:
92 for n in excluded:
92 for n in excluded:
93 if n not in repo:
93 if n not in repo:
94 # discovery should not have included the filtered revision,
94 # discovery should not have included the filtered revision,
95 # we have to explicitly exclude it until discovery is cleanup.
95 # we have to explicitly exclude it until discovery is cleanup.
96 continue
96 continue
97 ctx = repo[n]
97 ctx = repo[n]
98 if ctx.phase() >= phases.secret and not ctx.extinct():
98 if ctx.phase() >= phases.secret and not ctx.extinct():
99 secretlist.append(n)
99 secretlist.append(n)
100
100
101 if secretlist:
101 if secretlist:
102 ui.status(_("no changes found (ignored %d secret changesets)\n")
102 ui.status(_("no changes found (ignored %d secret changesets)\n")
103 % len(secretlist))
103 % len(secretlist))
104 else:
104 else:
105 ui.status(_("no changes found\n"))
105 ui.status(_("no changes found\n"))
106
106
107 def checknewlabel(repo, lbl, kind):
107 def checknewlabel(repo, lbl, kind):
108 # Do not use the "kind" parameter in ui output.
108 # Do not use the "kind" parameter in ui output.
109 # It makes strings difficult to translate.
109 # It makes strings difficult to translate.
110 if lbl in ['tip', '.', 'null']:
110 if lbl in ['tip', '.', 'null']:
111 raise util.Abort(_("the name '%s' is reserved") % lbl)
111 raise util.Abort(_("the name '%s' is reserved") % lbl)
112 for c in (':', '\0', '\n', '\r'):
112 for c in (':', '\0', '\n', '\r'):
113 if c in lbl:
113 if c in lbl:
114 raise util.Abort(_("%r cannot be used in a name") % c)
114 raise util.Abort(_("%r cannot be used in a name") % c)
115 try:
115 try:
116 int(lbl)
116 int(lbl)
117 raise util.Abort(_("cannot use an integer as a name"))
117 raise util.Abort(_("cannot use an integer as a name"))
118 except ValueError:
118 except ValueError:
119 pass
119 pass
120
120
121 def checkfilename(f):
121 def checkfilename(f):
122 '''Check that the filename f is an acceptable filename for a tracked file'''
122 '''Check that the filename f is an acceptable filename for a tracked file'''
123 if '\r' in f or '\n' in f:
123 if '\r' in f or '\n' in f:
124 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
124 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
125
125
126 def checkportable(ui, f):
126 def checkportable(ui, f):
127 '''Check if filename f is portable and warn or abort depending on config'''
127 '''Check if filename f is portable and warn or abort depending on config'''
128 checkfilename(f)
128 checkfilename(f)
129 abort, warn = checkportabilityalert(ui)
129 abort, warn = checkportabilityalert(ui)
130 if abort or warn:
130 if abort or warn:
131 msg = util.checkwinfilename(f)
131 msg = util.checkwinfilename(f)
132 if msg:
132 if msg:
133 msg = "%s: %r" % (msg, f)
133 msg = "%s: %r" % (msg, f)
134 if abort:
134 if abort:
135 raise util.Abort(msg)
135 raise util.Abort(msg)
136 ui.warn(_("warning: %s\n") % msg)
136 ui.warn(_("warning: %s\n") % msg)
137
137
138 def checkportabilityalert(ui):
138 def checkportabilityalert(ui):
139 '''check if the user's config requests nothing, a warning, or abort for
139 '''check if the user's config requests nothing, a warning, or abort for
140 non-portable filenames'''
140 non-portable filenames'''
141 val = ui.config('ui', 'portablefilenames', 'warn')
141 val = ui.config('ui', 'portablefilenames', 'warn')
142 lval = val.lower()
142 lval = val.lower()
143 bval = util.parsebool(val)
143 bval = util.parsebool(val)
144 abort = os.name == 'nt' or lval == 'abort'
144 abort = os.name == 'nt' or lval == 'abort'
145 warn = bval or lval == 'warn'
145 warn = bval or lval == 'warn'
146 if bval is None and not (warn or abort or lval == 'ignore'):
146 if bval is None and not (warn or abort or lval == 'ignore'):
147 raise error.ConfigError(
147 raise error.ConfigError(
148 _("ui.portablefilenames value is invalid ('%s')") % val)
148 _("ui.portablefilenames value is invalid ('%s')") % val)
149 return abort, warn
149 return abort, warn
150
150
151 class casecollisionauditor(object):
151 class casecollisionauditor(object):
152 def __init__(self, ui, abort, dirstate):
152 def __init__(self, ui, abort, dirstate):
153 self._ui = ui
153 self._ui = ui
154 self._abort = abort
154 self._abort = abort
155 allfiles = '\0'.join(dirstate._map)
155 allfiles = '\0'.join(dirstate._map)
156 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
156 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
157 self._dirstate = dirstate
157 self._dirstate = dirstate
158 # The purpose of _newfiles is so that we don't complain about
158 # The purpose of _newfiles is so that we don't complain about
159 # case collisions if someone were to call this object with the
159 # case collisions if someone were to call this object with the
160 # same filename twice.
160 # same filename twice.
161 self._newfiles = set()
161 self._newfiles = set()
162
162
163 def __call__(self, f):
163 def __call__(self, f):
164 if f in self._newfiles:
164 if f in self._newfiles:
165 return
165 return
166 fl = encoding.lower(f)
166 fl = encoding.lower(f)
167 if fl in self._loweredfiles and f not in self._dirstate:
167 if fl in self._loweredfiles and f not in self._dirstate:
168 msg = _('possible case-folding collision for %s') % f
168 msg = _('possible case-folding collision for %s') % f
169 if self._abort:
169 if self._abort:
170 raise util.Abort(msg)
170 raise util.Abort(msg)
171 self._ui.warn(_("warning: %s\n") % msg)
171 self._ui.warn(_("warning: %s\n") % msg)
172 self._loweredfiles.add(fl)
172 self._loweredfiles.add(fl)
173 self._newfiles.add(f)
173 self._newfiles.add(f)
174
174
175 def develwarn(tui, msg):
175 def develwarn(tui, msg):
176 """issue a developer warning message"""
176 """issue a developer warning message"""
177 msg = 'devel-warn: ' + msg
177 if tui.tracebackflag:
178 if tui.tracebackflag:
178 util.debugstacktrace(msg, 2)
179 util.debugstacktrace(msg, 2)
179 else:
180 else:
180 curframe = inspect.currentframe()
181 curframe = inspect.currentframe()
181 calframe = inspect.getouterframes(curframe, 2)
182 calframe = inspect.getouterframes(curframe, 2)
182 tui.write_err('%s at: %s:%s (%s)\n' % ((msg,) + calframe[2][1:4]))
183 tui.write_err('%s at: %s:%s (%s)\n' % ((msg,) + calframe[2][1:4]))
183
184
184 def filteredhash(repo, maxrev):
185 def filteredhash(repo, maxrev):
185 """build hash of filtered revisions in the current repoview.
186 """build hash of filtered revisions in the current repoview.
186
187
187 Multiple caches perform up-to-date validation by checking that the
188 Multiple caches perform up-to-date validation by checking that the
188 tiprev and tipnode stored in the cache file match the current repository.
189 tiprev and tipnode stored in the cache file match the current repository.
189 However, this is not sufficient for validating repoviews because the set
190 However, this is not sufficient for validating repoviews because the set
190 of revisions in the view may change without the repository tiprev and
191 of revisions in the view may change without the repository tiprev and
191 tipnode changing.
192 tipnode changing.
192
193
193 This function hashes all the revs filtered from the view and returns
194 This function hashes all the revs filtered from the view and returns
194 that SHA-1 digest.
195 that SHA-1 digest.
195 """
196 """
196 cl = repo.changelog
197 cl = repo.changelog
197 if not cl.filteredrevs:
198 if not cl.filteredrevs:
198 return None
199 return None
199 key = None
200 key = None
200 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
201 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
201 if revs:
202 if revs:
202 s = util.sha1()
203 s = util.sha1()
203 for rev in revs:
204 for rev in revs:
204 s.update('%s;' % rev)
205 s.update('%s;' % rev)
205 key = s.digest()
206 key = s.digest()
206 return key
207 return key
207
208
208 class abstractvfs(object):
209 class abstractvfs(object):
209 """Abstract base class; cannot be instantiated"""
210 """Abstract base class; cannot be instantiated"""
210
211
211 def __init__(self, *args, **kwargs):
212 def __init__(self, *args, **kwargs):
212 '''Prevent instantiation; don't call this from subclasses.'''
213 '''Prevent instantiation; don't call this from subclasses.'''
213 raise NotImplementedError('attempted instantiating ' + str(type(self)))
214 raise NotImplementedError('attempted instantiating ' + str(type(self)))
214
215
215 def tryread(self, path):
216 def tryread(self, path):
216 '''gracefully return an empty string for missing files'''
217 '''gracefully return an empty string for missing files'''
217 try:
218 try:
218 return self.read(path)
219 return self.read(path)
219 except IOError, inst:
220 except IOError, inst:
220 if inst.errno != errno.ENOENT:
221 if inst.errno != errno.ENOENT:
221 raise
222 raise
222 return ""
223 return ""
223
224
224 def tryreadlines(self, path, mode='rb'):
225 def tryreadlines(self, path, mode='rb'):
225 '''gracefully return an empty array for missing files'''
226 '''gracefully return an empty array for missing files'''
226 try:
227 try:
227 return self.readlines(path, mode=mode)
228 return self.readlines(path, mode=mode)
228 except IOError, inst:
229 except IOError, inst:
229 if inst.errno != errno.ENOENT:
230 if inst.errno != errno.ENOENT:
230 raise
231 raise
231 return []
232 return []
232
233
233 def open(self, path, mode="r", text=False, atomictemp=False,
234 def open(self, path, mode="r", text=False, atomictemp=False,
234 notindexed=False):
235 notindexed=False):
235 '''Open ``path`` file, which is relative to vfs root.
236 '''Open ``path`` file, which is relative to vfs root.
236
237
237 Newly created directories are marked as "not to be indexed by
238 Newly created directories are marked as "not to be indexed by
238 the content indexing service", if ``notindexed`` is specified
239 the content indexing service", if ``notindexed`` is specified
239 for "write" mode access.
240 for "write" mode access.
240 '''
241 '''
241 self.open = self.__call__
242 self.open = self.__call__
242 return self.__call__(path, mode, text, atomictemp, notindexed)
243 return self.__call__(path, mode, text, atomictemp, notindexed)
243
244
244 def read(self, path):
245 def read(self, path):
245 fp = self(path, 'rb')
246 fp = self(path, 'rb')
246 try:
247 try:
247 return fp.read()
248 return fp.read()
248 finally:
249 finally:
249 fp.close()
250 fp.close()
250
251
251 def readlines(self, path, mode='rb'):
252 def readlines(self, path, mode='rb'):
252 fp = self(path, mode=mode)
253 fp = self(path, mode=mode)
253 try:
254 try:
254 return fp.readlines()
255 return fp.readlines()
255 finally:
256 finally:
256 fp.close()
257 fp.close()
257
258
258 def write(self, path, data):
259 def write(self, path, data):
259 fp = self(path, 'wb')
260 fp = self(path, 'wb')
260 try:
261 try:
261 return fp.write(data)
262 return fp.write(data)
262 finally:
263 finally:
263 fp.close()
264 fp.close()
264
265
265 def writelines(self, path, data, mode='wb', notindexed=False):
266 def writelines(self, path, data, mode='wb', notindexed=False):
266 fp = self(path, mode=mode, notindexed=notindexed)
267 fp = self(path, mode=mode, notindexed=notindexed)
267 try:
268 try:
268 return fp.writelines(data)
269 return fp.writelines(data)
269 finally:
270 finally:
270 fp.close()
271 fp.close()
271
272
272 def append(self, path, data):
273 def append(self, path, data):
273 fp = self(path, 'ab')
274 fp = self(path, 'ab')
274 try:
275 try:
275 return fp.write(data)
276 return fp.write(data)
276 finally:
277 finally:
277 fp.close()
278 fp.close()
278
279
279 def chmod(self, path, mode):
280 def chmod(self, path, mode):
280 return os.chmod(self.join(path), mode)
281 return os.chmod(self.join(path), mode)
281
282
282 def exists(self, path=None):
283 def exists(self, path=None):
283 return os.path.exists(self.join(path))
284 return os.path.exists(self.join(path))
284
285
285 def fstat(self, fp):
286 def fstat(self, fp):
286 return util.fstat(fp)
287 return util.fstat(fp)
287
288
288 def isdir(self, path=None):
289 def isdir(self, path=None):
289 return os.path.isdir(self.join(path))
290 return os.path.isdir(self.join(path))
290
291
291 def isfile(self, path=None):
292 def isfile(self, path=None):
292 return os.path.isfile(self.join(path))
293 return os.path.isfile(self.join(path))
293
294
294 def islink(self, path=None):
295 def islink(self, path=None):
295 return os.path.islink(self.join(path))
296 return os.path.islink(self.join(path))
296
297
297 def reljoin(self, *paths):
298 def reljoin(self, *paths):
298 """join various elements of a path together (as os.path.join would do)
299 """join various elements of a path together (as os.path.join would do)
299
300
300 The vfs base is not injected so that path stay relative. This exists
301 The vfs base is not injected so that path stay relative. This exists
301 to allow handling of strange encoding if needed."""
302 to allow handling of strange encoding if needed."""
302 return os.path.join(*paths)
303 return os.path.join(*paths)
303
304
304 def split(self, path):
305 def split(self, path):
305 """split top-most element of a path (as os.path.split would do)
306 """split top-most element of a path (as os.path.split would do)
306
307
307 This exists to allow handling of strange encoding if needed."""
308 This exists to allow handling of strange encoding if needed."""
308 return os.path.split(path)
309 return os.path.split(path)
309
310
310 def lexists(self, path=None):
311 def lexists(self, path=None):
311 return os.path.lexists(self.join(path))
312 return os.path.lexists(self.join(path))
312
313
313 def lstat(self, path=None):
314 def lstat(self, path=None):
314 return os.lstat(self.join(path))
315 return os.lstat(self.join(path))
315
316
316 def listdir(self, path=None):
317 def listdir(self, path=None):
317 return os.listdir(self.join(path))
318 return os.listdir(self.join(path))
318
319
319 def makedir(self, path=None, notindexed=True):
320 def makedir(self, path=None, notindexed=True):
320 return util.makedir(self.join(path), notindexed)
321 return util.makedir(self.join(path), notindexed)
321
322
322 def makedirs(self, path=None, mode=None):
323 def makedirs(self, path=None, mode=None):
323 return util.makedirs(self.join(path), mode)
324 return util.makedirs(self.join(path), mode)
324
325
325 def makelock(self, info, path):
326 def makelock(self, info, path):
326 return util.makelock(info, self.join(path))
327 return util.makelock(info, self.join(path))
327
328
328 def mkdir(self, path=None):
329 def mkdir(self, path=None):
329 return os.mkdir(self.join(path))
330 return os.mkdir(self.join(path))
330
331
331 def mkstemp(self, suffix='', prefix='tmp', dir=None, text=False):
332 def mkstemp(self, suffix='', prefix='tmp', dir=None, text=False):
332 fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
333 fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
333 dir=self.join(dir), text=text)
334 dir=self.join(dir), text=text)
334 dname, fname = util.split(name)
335 dname, fname = util.split(name)
335 if dir:
336 if dir:
336 return fd, os.path.join(dir, fname)
337 return fd, os.path.join(dir, fname)
337 else:
338 else:
338 return fd, fname
339 return fd, fname
339
340
340 def readdir(self, path=None, stat=None, skip=None):
341 def readdir(self, path=None, stat=None, skip=None):
341 return osutil.listdir(self.join(path), stat, skip)
342 return osutil.listdir(self.join(path), stat, skip)
342
343
343 def readlock(self, path):
344 def readlock(self, path):
344 return util.readlock(self.join(path))
345 return util.readlock(self.join(path))
345
346
346 def rename(self, src, dst):
347 def rename(self, src, dst):
347 return util.rename(self.join(src), self.join(dst))
348 return util.rename(self.join(src), self.join(dst))
348
349
349 def readlink(self, path):
350 def readlink(self, path):
350 return os.readlink(self.join(path))
351 return os.readlink(self.join(path))
351
352
352 def removedirs(self, path=None):
353 def removedirs(self, path=None):
353 """Remove a leaf directory and all empty intermediate ones
354 """Remove a leaf directory and all empty intermediate ones
354 """
355 """
355 return util.removedirs(self.join(path))
356 return util.removedirs(self.join(path))
356
357
357 def rmtree(self, path=None, ignore_errors=False, forcibly=False):
358 def rmtree(self, path=None, ignore_errors=False, forcibly=False):
358 """Remove a directory tree recursively
359 """Remove a directory tree recursively
359
360
360 If ``forcibly``, this tries to remove READ-ONLY files, too.
361 If ``forcibly``, this tries to remove READ-ONLY files, too.
361 """
362 """
362 if forcibly:
363 if forcibly:
363 def onerror(function, path, excinfo):
364 def onerror(function, path, excinfo):
364 if function is not os.remove:
365 if function is not os.remove:
365 raise
366 raise
366 # read-only files cannot be unlinked under Windows
367 # read-only files cannot be unlinked under Windows
367 s = os.stat(path)
368 s = os.stat(path)
368 if (s.st_mode & stat.S_IWRITE) != 0:
369 if (s.st_mode & stat.S_IWRITE) != 0:
369 raise
370 raise
370 os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
371 os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
371 os.remove(path)
372 os.remove(path)
372 else:
373 else:
373 onerror = None
374 onerror = None
374 return shutil.rmtree(self.join(path),
375 return shutil.rmtree(self.join(path),
375 ignore_errors=ignore_errors, onerror=onerror)
376 ignore_errors=ignore_errors, onerror=onerror)
376
377
377 def setflags(self, path, l, x):
378 def setflags(self, path, l, x):
378 return util.setflags(self.join(path), l, x)
379 return util.setflags(self.join(path), l, x)
379
380
380 def stat(self, path=None):
381 def stat(self, path=None):
381 return os.stat(self.join(path))
382 return os.stat(self.join(path))
382
383
383 def unlink(self, path=None):
384 def unlink(self, path=None):
384 return util.unlink(self.join(path))
385 return util.unlink(self.join(path))
385
386
386 def unlinkpath(self, path=None, ignoremissing=False):
387 def unlinkpath(self, path=None, ignoremissing=False):
387 return util.unlinkpath(self.join(path), ignoremissing)
388 return util.unlinkpath(self.join(path), ignoremissing)
388
389
389 def utime(self, path=None, t=None):
390 def utime(self, path=None, t=None):
390 return os.utime(self.join(path), t)
391 return os.utime(self.join(path), t)
391
392
392 def walk(self, path=None, onerror=None):
393 def walk(self, path=None, onerror=None):
393 """Yield (dirpath, dirs, files) tuple for each directories under path
394 """Yield (dirpath, dirs, files) tuple for each directories under path
394
395
395 ``dirpath`` is relative one from the root of this vfs. This
396 ``dirpath`` is relative one from the root of this vfs. This
396 uses ``os.sep`` as path separator, even you specify POSIX
397 uses ``os.sep`` as path separator, even you specify POSIX
397 style ``path``.
398 style ``path``.
398
399
399 "The root of this vfs" is represented as empty ``dirpath``.
400 "The root of this vfs" is represented as empty ``dirpath``.
400 """
401 """
401 root = os.path.normpath(self.join(None))
402 root = os.path.normpath(self.join(None))
402 # when dirpath == root, dirpath[prefixlen:] becomes empty
403 # when dirpath == root, dirpath[prefixlen:] becomes empty
403 # because len(dirpath) < prefixlen.
404 # because len(dirpath) < prefixlen.
404 prefixlen = len(pathutil.normasprefix(root))
405 prefixlen = len(pathutil.normasprefix(root))
405 for dirpath, dirs, files in os.walk(self.join(path), onerror=onerror):
406 for dirpath, dirs, files in os.walk(self.join(path), onerror=onerror):
406 yield (dirpath[prefixlen:], dirs, files)
407 yield (dirpath[prefixlen:], dirs, files)
407
408
408 class vfs(abstractvfs):
409 class vfs(abstractvfs):
409 '''Operate files relative to a base directory
410 '''Operate files relative to a base directory
410
411
411 This class is used to hide the details of COW semantics and
412 This class is used to hide the details of COW semantics and
412 remote file access from higher level code.
413 remote file access from higher level code.
413 '''
414 '''
414 def __init__(self, base, audit=True, expandpath=False, realpath=False):
415 def __init__(self, base, audit=True, expandpath=False, realpath=False):
415 if expandpath:
416 if expandpath:
416 base = util.expandpath(base)
417 base = util.expandpath(base)
417 if realpath:
418 if realpath:
418 base = os.path.realpath(base)
419 base = os.path.realpath(base)
419 self.base = base
420 self.base = base
420 self._setmustaudit(audit)
421 self._setmustaudit(audit)
421 self.createmode = None
422 self.createmode = None
422 self._trustnlink = None
423 self._trustnlink = None
423
424
424 def _getmustaudit(self):
425 def _getmustaudit(self):
425 return self._audit
426 return self._audit
426
427
427 def _setmustaudit(self, onoff):
428 def _setmustaudit(self, onoff):
428 self._audit = onoff
429 self._audit = onoff
429 if onoff:
430 if onoff:
430 self.audit = pathutil.pathauditor(self.base)
431 self.audit = pathutil.pathauditor(self.base)
431 else:
432 else:
432 self.audit = util.always
433 self.audit = util.always
433
434
434 mustaudit = property(_getmustaudit, _setmustaudit)
435 mustaudit = property(_getmustaudit, _setmustaudit)
435
436
436 @util.propertycache
437 @util.propertycache
437 def _cansymlink(self):
438 def _cansymlink(self):
438 return util.checklink(self.base)
439 return util.checklink(self.base)
439
440
440 @util.propertycache
441 @util.propertycache
441 def _chmod(self):
442 def _chmod(self):
442 return util.checkexec(self.base)
443 return util.checkexec(self.base)
443
444
444 def _fixfilemode(self, name):
445 def _fixfilemode(self, name):
445 if self.createmode is None or not self._chmod:
446 if self.createmode is None or not self._chmod:
446 return
447 return
447 os.chmod(name, self.createmode & 0666)
448 os.chmod(name, self.createmode & 0666)
448
449
449 def __call__(self, path, mode="r", text=False, atomictemp=False,
450 def __call__(self, path, mode="r", text=False, atomictemp=False,
450 notindexed=False):
451 notindexed=False):
451 '''Open ``path`` file, which is relative to vfs root.
452 '''Open ``path`` file, which is relative to vfs root.
452
453
453 Newly created directories are marked as "not to be indexed by
454 Newly created directories are marked as "not to be indexed by
454 the content indexing service", if ``notindexed`` is specified
455 the content indexing service", if ``notindexed`` is specified
455 for "write" mode access.
456 for "write" mode access.
456 '''
457 '''
457 if self._audit:
458 if self._audit:
458 r = util.checkosfilename(path)
459 r = util.checkosfilename(path)
459 if r:
460 if r:
460 raise util.Abort("%s: %r" % (r, path))
461 raise util.Abort("%s: %r" % (r, path))
461 self.audit(path)
462 self.audit(path)
462 f = self.join(path)
463 f = self.join(path)
463
464
464 if not text and "b" not in mode:
465 if not text and "b" not in mode:
465 mode += "b" # for that other OS
466 mode += "b" # for that other OS
466
467
467 nlink = -1
468 nlink = -1
468 if mode not in ('r', 'rb'):
469 if mode not in ('r', 'rb'):
469 dirname, basename = util.split(f)
470 dirname, basename = util.split(f)
470 # If basename is empty, then the path is malformed because it points
471 # If basename is empty, then the path is malformed because it points
471 # to a directory. Let the posixfile() call below raise IOError.
472 # to a directory. Let the posixfile() call below raise IOError.
472 if basename:
473 if basename:
473 if atomictemp:
474 if atomictemp:
474 util.ensuredirs(dirname, self.createmode, notindexed)
475 util.ensuredirs(dirname, self.createmode, notindexed)
475 return util.atomictempfile(f, mode, self.createmode)
476 return util.atomictempfile(f, mode, self.createmode)
476 try:
477 try:
477 if 'w' in mode:
478 if 'w' in mode:
478 util.unlink(f)
479 util.unlink(f)
479 nlink = 0
480 nlink = 0
480 else:
481 else:
481 # nlinks() may behave differently for files on Windows
482 # nlinks() may behave differently for files on Windows
482 # shares if the file is open.
483 # shares if the file is open.
483 fd = util.posixfile(f)
484 fd = util.posixfile(f)
484 nlink = util.nlinks(f)
485 nlink = util.nlinks(f)
485 if nlink < 1:
486 if nlink < 1:
486 nlink = 2 # force mktempcopy (issue1922)
487 nlink = 2 # force mktempcopy (issue1922)
487 fd.close()
488 fd.close()
488 except (OSError, IOError), e:
489 except (OSError, IOError), e:
489 if e.errno != errno.ENOENT:
490 if e.errno != errno.ENOENT:
490 raise
491 raise
491 nlink = 0
492 nlink = 0
492 util.ensuredirs(dirname, self.createmode, notindexed)
493 util.ensuredirs(dirname, self.createmode, notindexed)
493 if nlink > 0:
494 if nlink > 0:
494 if self._trustnlink is None:
495 if self._trustnlink is None:
495 self._trustnlink = nlink > 1 or util.checknlink(f)
496 self._trustnlink = nlink > 1 or util.checknlink(f)
496 if nlink > 1 or not self._trustnlink:
497 if nlink > 1 or not self._trustnlink:
497 util.rename(util.mktempcopy(f), f)
498 util.rename(util.mktempcopy(f), f)
498 fp = util.posixfile(f, mode)
499 fp = util.posixfile(f, mode)
499 if nlink == 0:
500 if nlink == 0:
500 self._fixfilemode(f)
501 self._fixfilemode(f)
501 return fp
502 return fp
502
503
503 def symlink(self, src, dst):
504 def symlink(self, src, dst):
504 self.audit(dst)
505 self.audit(dst)
505 linkname = self.join(dst)
506 linkname = self.join(dst)
506 try:
507 try:
507 os.unlink(linkname)
508 os.unlink(linkname)
508 except OSError:
509 except OSError:
509 pass
510 pass
510
511
511 util.ensuredirs(os.path.dirname(linkname), self.createmode)
512 util.ensuredirs(os.path.dirname(linkname), self.createmode)
512
513
513 if self._cansymlink:
514 if self._cansymlink:
514 try:
515 try:
515 os.symlink(src, linkname)
516 os.symlink(src, linkname)
516 except OSError, err:
517 except OSError, err:
517 raise OSError(err.errno, _('could not symlink to %r: %s') %
518 raise OSError(err.errno, _('could not symlink to %r: %s') %
518 (src, err.strerror), linkname)
519 (src, err.strerror), linkname)
519 else:
520 else:
520 self.write(dst, src)
521 self.write(dst, src)
521
522
522 def join(self, path, *insidef):
523 def join(self, path, *insidef):
523 if path:
524 if path:
524 return os.path.join(self.base, path, *insidef)
525 return os.path.join(self.base, path, *insidef)
525 else:
526 else:
526 return self.base
527 return self.base
527
528
528 opener = vfs
529 opener = vfs
529
530
530 class auditvfs(object):
531 class auditvfs(object):
531 def __init__(self, vfs):
532 def __init__(self, vfs):
532 self.vfs = vfs
533 self.vfs = vfs
533
534
534 def _getmustaudit(self):
535 def _getmustaudit(self):
535 return self.vfs.mustaudit
536 return self.vfs.mustaudit
536
537
537 def _setmustaudit(self, onoff):
538 def _setmustaudit(self, onoff):
538 self.vfs.mustaudit = onoff
539 self.vfs.mustaudit = onoff
539
540
540 mustaudit = property(_getmustaudit, _setmustaudit)
541 mustaudit = property(_getmustaudit, _setmustaudit)
541
542
542 class filtervfs(abstractvfs, auditvfs):
543 class filtervfs(abstractvfs, auditvfs):
543 '''Wrapper vfs for filtering filenames with a function.'''
544 '''Wrapper vfs for filtering filenames with a function.'''
544
545
545 def __init__(self, vfs, filter):
546 def __init__(self, vfs, filter):
546 auditvfs.__init__(self, vfs)
547 auditvfs.__init__(self, vfs)
547 self._filter = filter
548 self._filter = filter
548
549
549 def __call__(self, path, *args, **kwargs):
550 def __call__(self, path, *args, **kwargs):
550 return self.vfs(self._filter(path), *args, **kwargs)
551 return self.vfs(self._filter(path), *args, **kwargs)
551
552
552 def join(self, path, *insidef):
553 def join(self, path, *insidef):
553 if path:
554 if path:
554 return self.vfs.join(self._filter(self.vfs.reljoin(path, *insidef)))
555 return self.vfs.join(self._filter(self.vfs.reljoin(path, *insidef)))
555 else:
556 else:
556 return self.vfs.join(path)
557 return self.vfs.join(path)
557
558
558 filteropener = filtervfs
559 filteropener = filtervfs
559
560
560 class readonlyvfs(abstractvfs, auditvfs):
561 class readonlyvfs(abstractvfs, auditvfs):
561 '''Wrapper vfs preventing any writing.'''
562 '''Wrapper vfs preventing any writing.'''
562
563
563 def __init__(self, vfs):
564 def __init__(self, vfs):
564 auditvfs.__init__(self, vfs)
565 auditvfs.__init__(self, vfs)
565
566
566 def __call__(self, path, mode='r', *args, **kw):
567 def __call__(self, path, mode='r', *args, **kw):
567 if mode not in ('r', 'rb'):
568 if mode not in ('r', 'rb'):
568 raise util.Abort('this vfs is read only')
569 raise util.Abort('this vfs is read only')
569 return self.vfs(path, mode, *args, **kw)
570 return self.vfs(path, mode, *args, **kw)
570
571
571
572
572 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
573 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
573 '''yield every hg repository under path, always recursively.
574 '''yield every hg repository under path, always recursively.
574 The recurse flag will only control recursion into repo working dirs'''
575 The recurse flag will only control recursion into repo working dirs'''
575 def errhandler(err):
576 def errhandler(err):
576 if err.filename == path:
577 if err.filename == path:
577 raise err
578 raise err
578 samestat = getattr(os.path, 'samestat', None)
579 samestat = getattr(os.path, 'samestat', None)
579 if followsym and samestat is not None:
580 if followsym and samestat is not None:
580 def adddir(dirlst, dirname):
581 def adddir(dirlst, dirname):
581 match = False
582 match = False
582 dirstat = os.stat(dirname)
583 dirstat = os.stat(dirname)
583 for lstdirstat in dirlst:
584 for lstdirstat in dirlst:
584 if samestat(dirstat, lstdirstat):
585 if samestat(dirstat, lstdirstat):
585 match = True
586 match = True
586 break
587 break
587 if not match:
588 if not match:
588 dirlst.append(dirstat)
589 dirlst.append(dirstat)
589 return not match
590 return not match
590 else:
591 else:
591 followsym = False
592 followsym = False
592
593
593 if (seen_dirs is None) and followsym:
594 if (seen_dirs is None) and followsym:
594 seen_dirs = []
595 seen_dirs = []
595 adddir(seen_dirs, path)
596 adddir(seen_dirs, path)
596 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
597 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
597 dirs.sort()
598 dirs.sort()
598 if '.hg' in dirs:
599 if '.hg' in dirs:
599 yield root # found a repository
600 yield root # found a repository
600 qroot = os.path.join(root, '.hg', 'patches')
601 qroot = os.path.join(root, '.hg', 'patches')
601 if os.path.isdir(os.path.join(qroot, '.hg')):
602 if os.path.isdir(os.path.join(qroot, '.hg')):
602 yield qroot # we have a patch queue repo here
603 yield qroot # we have a patch queue repo here
603 if recurse:
604 if recurse:
604 # avoid recursing inside the .hg directory
605 # avoid recursing inside the .hg directory
605 dirs.remove('.hg')
606 dirs.remove('.hg')
606 else:
607 else:
607 dirs[:] = [] # don't descend further
608 dirs[:] = [] # don't descend further
608 elif followsym:
609 elif followsym:
609 newdirs = []
610 newdirs = []
610 for d in dirs:
611 for d in dirs:
611 fname = os.path.join(root, d)
612 fname = os.path.join(root, d)
612 if adddir(seen_dirs, fname):
613 if adddir(seen_dirs, fname):
613 if os.path.islink(fname):
614 if os.path.islink(fname):
614 for hgname in walkrepos(fname, True, seen_dirs):
615 for hgname in walkrepos(fname, True, seen_dirs):
615 yield hgname
616 yield hgname
616 else:
617 else:
617 newdirs.append(d)
618 newdirs.append(d)
618 dirs[:] = newdirs
619 dirs[:] = newdirs
619
620
620 def osrcpath():
621 def osrcpath():
621 '''return default os-specific hgrc search path'''
622 '''return default os-specific hgrc search path'''
622 path = []
623 path = []
623 defaultpath = os.path.join(util.datapath, 'default.d')
624 defaultpath = os.path.join(util.datapath, 'default.d')
624 if os.path.isdir(defaultpath):
625 if os.path.isdir(defaultpath):
625 for f, kind in osutil.listdir(defaultpath):
626 for f, kind in osutil.listdir(defaultpath):
626 if f.endswith('.rc'):
627 if f.endswith('.rc'):
627 path.append(os.path.join(defaultpath, f))
628 path.append(os.path.join(defaultpath, f))
628 path.extend(systemrcpath())
629 path.extend(systemrcpath())
629 path.extend(userrcpath())
630 path.extend(userrcpath())
630 path = [os.path.normpath(f) for f in path]
631 path = [os.path.normpath(f) for f in path]
631 return path
632 return path
632
633
633 _rcpath = None
634 _rcpath = None
634
635
635 def rcpath():
636 def rcpath():
636 '''return hgrc search path. if env var HGRCPATH is set, use it.
637 '''return hgrc search path. if env var HGRCPATH is set, use it.
637 for each item in path, if directory, use files ending in .rc,
638 for each item in path, if directory, use files ending in .rc,
638 else use item.
639 else use item.
639 make HGRCPATH empty to only look in .hg/hgrc of current repo.
640 make HGRCPATH empty to only look in .hg/hgrc of current repo.
640 if no HGRCPATH, use default os-specific path.'''
641 if no HGRCPATH, use default os-specific path.'''
641 global _rcpath
642 global _rcpath
642 if _rcpath is None:
643 if _rcpath is None:
643 if 'HGRCPATH' in os.environ:
644 if 'HGRCPATH' in os.environ:
644 _rcpath = []
645 _rcpath = []
645 for p in os.environ['HGRCPATH'].split(os.pathsep):
646 for p in os.environ['HGRCPATH'].split(os.pathsep):
646 if not p:
647 if not p:
647 continue
648 continue
648 p = util.expandpath(p)
649 p = util.expandpath(p)
649 if os.path.isdir(p):
650 if os.path.isdir(p):
650 for f, kind in osutil.listdir(p):
651 for f, kind in osutil.listdir(p):
651 if f.endswith('.rc'):
652 if f.endswith('.rc'):
652 _rcpath.append(os.path.join(p, f))
653 _rcpath.append(os.path.join(p, f))
653 else:
654 else:
654 _rcpath.append(p)
655 _rcpath.append(p)
655 else:
656 else:
656 _rcpath = osrcpath()
657 _rcpath = osrcpath()
657 return _rcpath
658 return _rcpath
658
659
659 def intrev(repo, rev):
660 def intrev(repo, rev):
660 """Return integer for a given revision that can be used in comparison or
661 """Return integer for a given revision that can be used in comparison or
661 arithmetic operation"""
662 arithmetic operation"""
662 if rev is None:
663 if rev is None:
663 return len(repo)
664 return len(repo)
664 return rev
665 return rev
665
666
666 def revsingle(repo, revspec, default='.'):
667 def revsingle(repo, revspec, default='.'):
667 if not revspec and revspec != 0:
668 if not revspec and revspec != 0:
668 return repo[default]
669 return repo[default]
669
670
670 l = revrange(repo, [revspec])
671 l = revrange(repo, [revspec])
671 if not l:
672 if not l:
672 raise util.Abort(_('empty revision set'))
673 raise util.Abort(_('empty revision set'))
673 return repo[l.last()]
674 return repo[l.last()]
674
675
675 def revpair(repo, revs):
676 def revpair(repo, revs):
676 if not revs:
677 if not revs:
677 return repo.dirstate.p1(), None
678 return repo.dirstate.p1(), None
678
679
679 l = revrange(repo, revs)
680 l = revrange(repo, revs)
680
681
681 if not l:
682 if not l:
682 first = second = None
683 first = second = None
683 elif l.isascending():
684 elif l.isascending():
684 first = l.min()
685 first = l.min()
685 second = l.max()
686 second = l.max()
686 elif l.isdescending():
687 elif l.isdescending():
687 first = l.max()
688 first = l.max()
688 second = l.min()
689 second = l.min()
689 else:
690 else:
690 first = l.first()
691 first = l.first()
691 second = l.last()
692 second = l.last()
692
693
693 if first is None:
694 if first is None:
694 raise util.Abort(_('empty revision range'))
695 raise util.Abort(_('empty revision range'))
695
696
696 if first == second and len(revs) == 1 and _revrangesep not in revs[0]:
697 if first == second and len(revs) == 1 and _revrangesep not in revs[0]:
697 return repo.lookup(first), None
698 return repo.lookup(first), None
698
699
699 return repo.lookup(first), repo.lookup(second)
700 return repo.lookup(first), repo.lookup(second)
700
701
701 _revrangesep = ':'
702 _revrangesep = ':'
702
703
703 def revrange(repo, revs):
704 def revrange(repo, revs):
704 """Yield revision as strings from a list of revision specifications."""
705 """Yield revision as strings from a list of revision specifications."""
705
706
706 def revfix(repo, val, defval):
707 def revfix(repo, val, defval):
707 if not val and val != 0 and defval is not None:
708 if not val and val != 0 and defval is not None:
708 return defval
709 return defval
709 return repo[val].rev()
710 return repo[val].rev()
710
711
711 seen, l = set(), revset.baseset([])
712 seen, l = set(), revset.baseset([])
712
713
713 revsetaliases = [alias for (alias, _) in
714 revsetaliases = [alias for (alias, _) in
714 repo.ui.configitems("revsetalias")]
715 repo.ui.configitems("revsetalias")]
715
716
716 for spec in revs:
717 for spec in revs:
717 if l and not seen:
718 if l and not seen:
718 seen = set(l)
719 seen = set(l)
719 # attempt to parse old-style ranges first to deal with
720 # attempt to parse old-style ranges first to deal with
720 # things like old-tag which contain query metacharacters
721 # things like old-tag which contain query metacharacters
721 try:
722 try:
722 # ... except for revset aliases without arguments. These
723 # ... except for revset aliases without arguments. These
723 # should be parsed as soon as possible, because they might
724 # should be parsed as soon as possible, because they might
724 # clash with a hash prefix.
725 # clash with a hash prefix.
725 if spec in revsetaliases:
726 if spec in revsetaliases:
726 raise error.RepoLookupError
727 raise error.RepoLookupError
727
728
728 if isinstance(spec, int):
729 if isinstance(spec, int):
729 seen.add(spec)
730 seen.add(spec)
730 l = l + revset.baseset([spec])
731 l = l + revset.baseset([spec])
731 continue
732 continue
732
733
733 if _revrangesep in spec:
734 if _revrangesep in spec:
734 start, end = spec.split(_revrangesep, 1)
735 start, end = spec.split(_revrangesep, 1)
735 if start in revsetaliases or end in revsetaliases:
736 if start in revsetaliases or end in revsetaliases:
736 raise error.RepoLookupError
737 raise error.RepoLookupError
737
738
738 start = revfix(repo, start, 0)
739 start = revfix(repo, start, 0)
739 end = revfix(repo, end, len(repo) - 1)
740 end = revfix(repo, end, len(repo) - 1)
740 if end == nullrev and start < 0:
741 if end == nullrev and start < 0:
741 start = nullrev
742 start = nullrev
742 rangeiter = repo.changelog.revs(start, end)
743 rangeiter = repo.changelog.revs(start, end)
743 if not seen and not l:
744 if not seen and not l:
744 # by far the most common case: revs = ["-1:0"]
745 # by far the most common case: revs = ["-1:0"]
745 l = revset.baseset(rangeiter)
746 l = revset.baseset(rangeiter)
746 # defer syncing seen until next iteration
747 # defer syncing seen until next iteration
747 continue
748 continue
748 newrevs = set(rangeiter)
749 newrevs = set(rangeiter)
749 if seen:
750 if seen:
750 newrevs.difference_update(seen)
751 newrevs.difference_update(seen)
751 seen.update(newrevs)
752 seen.update(newrevs)
752 else:
753 else:
753 seen = newrevs
754 seen = newrevs
754 l = l + revset.baseset(sorted(newrevs, reverse=start > end))
755 l = l + revset.baseset(sorted(newrevs, reverse=start > end))
755 continue
756 continue
756 elif spec and spec in repo: # single unquoted rev
757 elif spec and spec in repo: # single unquoted rev
757 rev = revfix(repo, spec, None)
758 rev = revfix(repo, spec, None)
758 if rev in seen:
759 if rev in seen:
759 continue
760 continue
760 seen.add(rev)
761 seen.add(rev)
761 l = l + revset.baseset([rev])
762 l = l + revset.baseset([rev])
762 continue
763 continue
763 except error.RepoLookupError:
764 except error.RepoLookupError:
764 pass
765 pass
765
766
766 # fall through to new-style queries if old-style fails
767 # fall through to new-style queries if old-style fails
767 m = revset.match(repo.ui, spec, repo)
768 m = revset.match(repo.ui, spec, repo)
768 if seen or l:
769 if seen or l:
769 dl = [r for r in m(repo) if r not in seen]
770 dl = [r for r in m(repo) if r not in seen]
770 l = l + revset.baseset(dl)
771 l = l + revset.baseset(dl)
771 seen.update(dl)
772 seen.update(dl)
772 else:
773 else:
773 l = m(repo)
774 l = m(repo)
774
775
775 return l
776 return l
776
777
777 def expandpats(pats):
778 def expandpats(pats):
778 '''Expand bare globs when running on windows.
779 '''Expand bare globs when running on windows.
779 On posix we assume it already has already been done by sh.'''
780 On posix we assume it already has already been done by sh.'''
780 if not util.expandglobs:
781 if not util.expandglobs:
781 return list(pats)
782 return list(pats)
782 ret = []
783 ret = []
783 for kindpat in pats:
784 for kindpat in pats:
784 kind, pat = matchmod._patsplit(kindpat, None)
785 kind, pat = matchmod._patsplit(kindpat, None)
785 if kind is None:
786 if kind is None:
786 try:
787 try:
787 globbed = glob.glob(pat)
788 globbed = glob.glob(pat)
788 except re.error:
789 except re.error:
789 globbed = [pat]
790 globbed = [pat]
790 if globbed:
791 if globbed:
791 ret.extend(globbed)
792 ret.extend(globbed)
792 continue
793 continue
793 ret.append(kindpat)
794 ret.append(kindpat)
794 return ret
795 return ret
795
796
796 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
797 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
797 '''Return a matcher and the patterns that were used.
798 '''Return a matcher and the patterns that were used.
798 The matcher will warn about bad matches.'''
799 The matcher will warn about bad matches.'''
799 if pats == ("",):
800 if pats == ("",):
800 pats = []
801 pats = []
801 if not globbed and default == 'relpath':
802 if not globbed and default == 'relpath':
802 pats = expandpats(pats or [])
803 pats = expandpats(pats or [])
803
804
804 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
805 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
805 default)
806 default)
806 def badfn(f, msg):
807 def badfn(f, msg):
807 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
808 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
808 m.bad = badfn
809 m.bad = badfn
809 if m.always():
810 if m.always():
810 pats = []
811 pats = []
811 return m, pats
812 return m, pats
812
813
813 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
814 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
814 '''Return a matcher that will warn about bad matches.'''
815 '''Return a matcher that will warn about bad matches.'''
815 return matchandpats(ctx, pats, opts, globbed, default)[0]
816 return matchandpats(ctx, pats, opts, globbed, default)[0]
816
817
817 def matchall(repo):
818 def matchall(repo):
818 '''Return a matcher that will efficiently match everything.'''
819 '''Return a matcher that will efficiently match everything.'''
819 return matchmod.always(repo.root, repo.getcwd())
820 return matchmod.always(repo.root, repo.getcwd())
820
821
821 def matchfiles(repo, files):
822 def matchfiles(repo, files):
822 '''Return a matcher that will efficiently match exactly these files.'''
823 '''Return a matcher that will efficiently match exactly these files.'''
823 return matchmod.exact(repo.root, repo.getcwd(), files)
824 return matchmod.exact(repo.root, repo.getcwd(), files)
824
825
825 def addremove(repo, matcher, prefix, opts={}, dry_run=None, similarity=None):
826 def addremove(repo, matcher, prefix, opts={}, dry_run=None, similarity=None):
826 m = matcher
827 m = matcher
827 if dry_run is None:
828 if dry_run is None:
828 dry_run = opts.get('dry_run')
829 dry_run = opts.get('dry_run')
829 if similarity is None:
830 if similarity is None:
830 similarity = float(opts.get('similarity') or 0)
831 similarity = float(opts.get('similarity') or 0)
831
832
832 ret = 0
833 ret = 0
833 join = lambda f: os.path.join(prefix, f)
834 join = lambda f: os.path.join(prefix, f)
834
835
835 def matchessubrepo(matcher, subpath):
836 def matchessubrepo(matcher, subpath):
836 if matcher.exact(subpath):
837 if matcher.exact(subpath):
837 return True
838 return True
838 for f in matcher.files():
839 for f in matcher.files():
839 if f.startswith(subpath):
840 if f.startswith(subpath):
840 return True
841 return True
841 return False
842 return False
842
843
843 wctx = repo[None]
844 wctx = repo[None]
844 for subpath in sorted(wctx.substate):
845 for subpath in sorted(wctx.substate):
845 if opts.get('subrepos') or matchessubrepo(m, subpath):
846 if opts.get('subrepos') or matchessubrepo(m, subpath):
846 sub = wctx.sub(subpath)
847 sub = wctx.sub(subpath)
847 try:
848 try:
848 submatch = matchmod.narrowmatcher(subpath, m)
849 submatch = matchmod.narrowmatcher(subpath, m)
849 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
850 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
850 ret = 1
851 ret = 1
851 except error.LookupError:
852 except error.LookupError:
852 repo.ui.status(_("skipping missing subrepository: %s\n")
853 repo.ui.status(_("skipping missing subrepository: %s\n")
853 % join(subpath))
854 % join(subpath))
854
855
855 rejected = []
856 rejected = []
856 origbad = m.bad
857 origbad = m.bad
857 def badfn(f, msg):
858 def badfn(f, msg):
858 if f in m.files():
859 if f in m.files():
859 origbad(f, msg)
860 origbad(f, msg)
860 rejected.append(f)
861 rejected.append(f)
861
862
862 m.bad = badfn
863 m.bad = badfn
863 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
864 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
864 m.bad = origbad
865 m.bad = origbad
865
866
866 unknownset = set(unknown + forgotten)
867 unknownset = set(unknown + forgotten)
867 toprint = unknownset.copy()
868 toprint = unknownset.copy()
868 toprint.update(deleted)
869 toprint.update(deleted)
869 for abs in sorted(toprint):
870 for abs in sorted(toprint):
870 if repo.ui.verbose or not m.exact(abs):
871 if repo.ui.verbose or not m.exact(abs):
871 if abs in unknownset:
872 if abs in unknownset:
872 status = _('adding %s\n') % m.uipath(abs)
873 status = _('adding %s\n') % m.uipath(abs)
873 else:
874 else:
874 status = _('removing %s\n') % m.uipath(abs)
875 status = _('removing %s\n') % m.uipath(abs)
875 repo.ui.status(status)
876 repo.ui.status(status)
876
877
877 renames = _findrenames(repo, m, added + unknown, removed + deleted,
878 renames = _findrenames(repo, m, added + unknown, removed + deleted,
878 similarity)
879 similarity)
879
880
880 if not dry_run:
881 if not dry_run:
881 _markchanges(repo, unknown + forgotten, deleted, renames)
882 _markchanges(repo, unknown + forgotten, deleted, renames)
882
883
883 for f in rejected:
884 for f in rejected:
884 if f in m.files():
885 if f in m.files():
885 return 1
886 return 1
886 return ret
887 return ret
887
888
888 def marktouched(repo, files, similarity=0.0):
889 def marktouched(repo, files, similarity=0.0):
889 '''Assert that files have somehow been operated upon. files are relative to
890 '''Assert that files have somehow been operated upon. files are relative to
890 the repo root.'''
891 the repo root.'''
891 m = matchfiles(repo, files)
892 m = matchfiles(repo, files)
892 rejected = []
893 rejected = []
893 m.bad = lambda x, y: rejected.append(x)
894 m.bad = lambda x, y: rejected.append(x)
894
895
895 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
896 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
896
897
897 if repo.ui.verbose:
898 if repo.ui.verbose:
898 unknownset = set(unknown + forgotten)
899 unknownset = set(unknown + forgotten)
899 toprint = unknownset.copy()
900 toprint = unknownset.copy()
900 toprint.update(deleted)
901 toprint.update(deleted)
901 for abs in sorted(toprint):
902 for abs in sorted(toprint):
902 if abs in unknownset:
903 if abs in unknownset:
903 status = _('adding %s\n') % abs
904 status = _('adding %s\n') % abs
904 else:
905 else:
905 status = _('removing %s\n') % abs
906 status = _('removing %s\n') % abs
906 repo.ui.status(status)
907 repo.ui.status(status)
907
908
908 renames = _findrenames(repo, m, added + unknown, removed + deleted,
909 renames = _findrenames(repo, m, added + unknown, removed + deleted,
909 similarity)
910 similarity)
910
911
911 _markchanges(repo, unknown + forgotten, deleted, renames)
912 _markchanges(repo, unknown + forgotten, deleted, renames)
912
913
913 for f in rejected:
914 for f in rejected:
914 if f in m.files():
915 if f in m.files():
915 return 1
916 return 1
916 return 0
917 return 0
917
918
918 def _interestingfiles(repo, matcher):
919 def _interestingfiles(repo, matcher):
919 '''Walk dirstate with matcher, looking for files that addremove would care
920 '''Walk dirstate with matcher, looking for files that addremove would care
920 about.
921 about.
921
922
922 This is different from dirstate.status because it doesn't care about
923 This is different from dirstate.status because it doesn't care about
923 whether files are modified or clean.'''
924 whether files are modified or clean.'''
924 added, unknown, deleted, removed, forgotten = [], [], [], [], []
925 added, unknown, deleted, removed, forgotten = [], [], [], [], []
925 audit_path = pathutil.pathauditor(repo.root)
926 audit_path = pathutil.pathauditor(repo.root)
926
927
927 ctx = repo[None]
928 ctx = repo[None]
928 dirstate = repo.dirstate
929 dirstate = repo.dirstate
929 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
930 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
930 full=False)
931 full=False)
931 for abs, st in walkresults.iteritems():
932 for abs, st in walkresults.iteritems():
932 dstate = dirstate[abs]
933 dstate = dirstate[abs]
933 if dstate == '?' and audit_path.check(abs):
934 if dstate == '?' and audit_path.check(abs):
934 unknown.append(abs)
935 unknown.append(abs)
935 elif dstate != 'r' and not st:
936 elif dstate != 'r' and not st:
936 deleted.append(abs)
937 deleted.append(abs)
937 elif dstate == 'r' and st:
938 elif dstate == 'r' and st:
938 forgotten.append(abs)
939 forgotten.append(abs)
939 # for finding renames
940 # for finding renames
940 elif dstate == 'r' and not st:
941 elif dstate == 'r' and not st:
941 removed.append(abs)
942 removed.append(abs)
942 elif dstate == 'a':
943 elif dstate == 'a':
943 added.append(abs)
944 added.append(abs)
944
945
945 return added, unknown, deleted, removed, forgotten
946 return added, unknown, deleted, removed, forgotten
946
947
947 def _findrenames(repo, matcher, added, removed, similarity):
948 def _findrenames(repo, matcher, added, removed, similarity):
948 '''Find renames from removed files to added ones.'''
949 '''Find renames from removed files to added ones.'''
949 renames = {}
950 renames = {}
950 if similarity > 0:
951 if similarity > 0:
951 for old, new, score in similar.findrenames(repo, added, removed,
952 for old, new, score in similar.findrenames(repo, added, removed,
952 similarity):
953 similarity):
953 if (repo.ui.verbose or not matcher.exact(old)
954 if (repo.ui.verbose or not matcher.exact(old)
954 or not matcher.exact(new)):
955 or not matcher.exact(new)):
955 repo.ui.status(_('recording removal of %s as rename to %s '
956 repo.ui.status(_('recording removal of %s as rename to %s '
956 '(%d%% similar)\n') %
957 '(%d%% similar)\n') %
957 (matcher.rel(old), matcher.rel(new),
958 (matcher.rel(old), matcher.rel(new),
958 score * 100))
959 score * 100))
959 renames[new] = old
960 renames[new] = old
960 return renames
961 return renames
961
962
962 def _markchanges(repo, unknown, deleted, renames):
963 def _markchanges(repo, unknown, deleted, renames):
963 '''Marks the files in unknown as added, the files in deleted as removed,
964 '''Marks the files in unknown as added, the files in deleted as removed,
964 and the files in renames as copied.'''
965 and the files in renames as copied.'''
965 wctx = repo[None]
966 wctx = repo[None]
966 wlock = repo.wlock()
967 wlock = repo.wlock()
967 try:
968 try:
968 wctx.forget(deleted)
969 wctx.forget(deleted)
969 wctx.add(unknown)
970 wctx.add(unknown)
970 for new, old in renames.iteritems():
971 for new, old in renames.iteritems():
971 wctx.copy(old, new)
972 wctx.copy(old, new)
972 finally:
973 finally:
973 wlock.release()
974 wlock.release()
974
975
975 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
976 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
976 """Update the dirstate to reflect the intent of copying src to dst. For
977 """Update the dirstate to reflect the intent of copying src to dst. For
977 different reasons it might not end with dst being marked as copied from src.
978 different reasons it might not end with dst being marked as copied from src.
978 """
979 """
979 origsrc = repo.dirstate.copied(src) or src
980 origsrc = repo.dirstate.copied(src) or src
980 if dst == origsrc: # copying back a copy?
981 if dst == origsrc: # copying back a copy?
981 if repo.dirstate[dst] not in 'mn' and not dryrun:
982 if repo.dirstate[dst] not in 'mn' and not dryrun:
982 repo.dirstate.normallookup(dst)
983 repo.dirstate.normallookup(dst)
983 else:
984 else:
984 if repo.dirstate[origsrc] == 'a' and origsrc == src:
985 if repo.dirstate[origsrc] == 'a' and origsrc == src:
985 if not ui.quiet:
986 if not ui.quiet:
986 ui.warn(_("%s has not been committed yet, so no copy "
987 ui.warn(_("%s has not been committed yet, so no copy "
987 "data will be stored for %s.\n")
988 "data will be stored for %s.\n")
988 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
989 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
989 if repo.dirstate[dst] in '?r' and not dryrun:
990 if repo.dirstate[dst] in '?r' and not dryrun:
990 wctx.add([dst])
991 wctx.add([dst])
991 elif not dryrun:
992 elif not dryrun:
992 wctx.copy(origsrc, dst)
993 wctx.copy(origsrc, dst)
993
994
994 def readrequires(opener, supported):
995 def readrequires(opener, supported):
995 '''Reads and parses .hg/requires and checks if all entries found
996 '''Reads and parses .hg/requires and checks if all entries found
996 are in the list of supported features.'''
997 are in the list of supported features.'''
997 requirements = set(opener.read("requires").splitlines())
998 requirements = set(opener.read("requires").splitlines())
998 missings = []
999 missings = []
999 for r in requirements:
1000 for r in requirements:
1000 if r not in supported:
1001 if r not in supported:
1001 if not r or not r[0].isalnum():
1002 if not r or not r[0].isalnum():
1002 raise error.RequirementError(_(".hg/requires file is corrupt"))
1003 raise error.RequirementError(_(".hg/requires file is corrupt"))
1003 missings.append(r)
1004 missings.append(r)
1004 missings.sort()
1005 missings.sort()
1005 if missings:
1006 if missings:
1006 raise error.RequirementError(
1007 raise error.RequirementError(
1007 _("repository requires features unknown to this Mercurial: %s")
1008 _("repository requires features unknown to this Mercurial: %s")
1008 % " ".join(missings),
1009 % " ".join(missings),
1009 hint=_("see http://mercurial.selenic.com/wiki/MissingRequirement"
1010 hint=_("see http://mercurial.selenic.com/wiki/MissingRequirement"
1010 " for more information"))
1011 " for more information"))
1011 return requirements
1012 return requirements
1012
1013
1013 class filecachesubentry(object):
1014 class filecachesubentry(object):
1014 def __init__(self, path, stat):
1015 def __init__(self, path, stat):
1015 self.path = path
1016 self.path = path
1016 self.cachestat = None
1017 self.cachestat = None
1017 self._cacheable = None
1018 self._cacheable = None
1018
1019
1019 if stat:
1020 if stat:
1020 self.cachestat = filecachesubentry.stat(self.path)
1021 self.cachestat = filecachesubentry.stat(self.path)
1021
1022
1022 if self.cachestat:
1023 if self.cachestat:
1023 self._cacheable = self.cachestat.cacheable()
1024 self._cacheable = self.cachestat.cacheable()
1024 else:
1025 else:
1025 # None means we don't know yet
1026 # None means we don't know yet
1026 self._cacheable = None
1027 self._cacheable = None
1027
1028
1028 def refresh(self):
1029 def refresh(self):
1029 if self.cacheable():
1030 if self.cacheable():
1030 self.cachestat = filecachesubentry.stat(self.path)
1031 self.cachestat = filecachesubentry.stat(self.path)
1031
1032
1032 def cacheable(self):
1033 def cacheable(self):
1033 if self._cacheable is not None:
1034 if self._cacheable is not None:
1034 return self._cacheable
1035 return self._cacheable
1035
1036
1036 # we don't know yet, assume it is for now
1037 # we don't know yet, assume it is for now
1037 return True
1038 return True
1038
1039
1039 def changed(self):
1040 def changed(self):
1040 # no point in going further if we can't cache it
1041 # no point in going further if we can't cache it
1041 if not self.cacheable():
1042 if not self.cacheable():
1042 return True
1043 return True
1043
1044
1044 newstat = filecachesubentry.stat(self.path)
1045 newstat = filecachesubentry.stat(self.path)
1045
1046
1046 # we may not know if it's cacheable yet, check again now
1047 # we may not know if it's cacheable yet, check again now
1047 if newstat and self._cacheable is None:
1048 if newstat and self._cacheable is None:
1048 self._cacheable = newstat.cacheable()
1049 self._cacheable = newstat.cacheable()
1049
1050
1050 # check again
1051 # check again
1051 if not self._cacheable:
1052 if not self._cacheable:
1052 return True
1053 return True
1053
1054
1054 if self.cachestat != newstat:
1055 if self.cachestat != newstat:
1055 self.cachestat = newstat
1056 self.cachestat = newstat
1056 return True
1057 return True
1057 else:
1058 else:
1058 return False
1059 return False
1059
1060
1060 @staticmethod
1061 @staticmethod
1061 def stat(path):
1062 def stat(path):
1062 try:
1063 try:
1063 return util.cachestat(path)
1064 return util.cachestat(path)
1064 except OSError, e:
1065 except OSError, e:
1065 if e.errno != errno.ENOENT:
1066 if e.errno != errno.ENOENT:
1066 raise
1067 raise
1067
1068
1068 class filecacheentry(object):
1069 class filecacheentry(object):
1069 def __init__(self, paths, stat=True):
1070 def __init__(self, paths, stat=True):
1070 self._entries = []
1071 self._entries = []
1071 for path in paths:
1072 for path in paths:
1072 self._entries.append(filecachesubentry(path, stat))
1073 self._entries.append(filecachesubentry(path, stat))
1073
1074
1074 def changed(self):
1075 def changed(self):
1075 '''true if any entry has changed'''
1076 '''true if any entry has changed'''
1076 for entry in self._entries:
1077 for entry in self._entries:
1077 if entry.changed():
1078 if entry.changed():
1078 return True
1079 return True
1079 return False
1080 return False
1080
1081
1081 def refresh(self):
1082 def refresh(self):
1082 for entry in self._entries:
1083 for entry in self._entries:
1083 entry.refresh()
1084 entry.refresh()
1084
1085
1085 class filecache(object):
1086 class filecache(object):
1086 '''A property like decorator that tracks files under .hg/ for updates.
1087 '''A property like decorator that tracks files under .hg/ for updates.
1087
1088
1088 Records stat info when called in _filecache.
1089 Records stat info when called in _filecache.
1089
1090
1090 On subsequent calls, compares old stat info with new info, and recreates the
1091 On subsequent calls, compares old stat info with new info, and recreates the
1091 object when any of the files changes, updating the new stat info in
1092 object when any of the files changes, updating the new stat info in
1092 _filecache.
1093 _filecache.
1093
1094
1094 Mercurial either atomic renames or appends for files under .hg,
1095 Mercurial either atomic renames or appends for files under .hg,
1095 so to ensure the cache is reliable we need the filesystem to be able
1096 so to ensure the cache is reliable we need the filesystem to be able
1096 to tell us if a file has been replaced. If it can't, we fallback to
1097 to tell us if a file has been replaced. If it can't, we fallback to
1097 recreating the object on every call (essentially the same behaviour as
1098 recreating the object on every call (essentially the same behaviour as
1098 propertycache).
1099 propertycache).
1099
1100
1100 '''
1101 '''
1101 def __init__(self, *paths):
1102 def __init__(self, *paths):
1102 self.paths = paths
1103 self.paths = paths
1103
1104
1104 def join(self, obj, fname):
1105 def join(self, obj, fname):
1105 """Used to compute the runtime path of a cached file.
1106 """Used to compute the runtime path of a cached file.
1106
1107
1107 Users should subclass filecache and provide their own version of this
1108 Users should subclass filecache and provide their own version of this
1108 function to call the appropriate join function on 'obj' (an instance
1109 function to call the appropriate join function on 'obj' (an instance
1109 of the class that its member function was decorated).
1110 of the class that its member function was decorated).
1110 """
1111 """
1111 return obj.join(fname)
1112 return obj.join(fname)
1112
1113
1113 def __call__(self, func):
1114 def __call__(self, func):
1114 self.func = func
1115 self.func = func
1115 self.name = func.__name__
1116 self.name = func.__name__
1116 return self
1117 return self
1117
1118
1118 def __get__(self, obj, type=None):
1119 def __get__(self, obj, type=None):
1119 # do we need to check if the file changed?
1120 # do we need to check if the file changed?
1120 if self.name in obj.__dict__:
1121 if self.name in obj.__dict__:
1121 assert self.name in obj._filecache, self.name
1122 assert self.name in obj._filecache, self.name
1122 return obj.__dict__[self.name]
1123 return obj.__dict__[self.name]
1123
1124
1124 entry = obj._filecache.get(self.name)
1125 entry = obj._filecache.get(self.name)
1125
1126
1126 if entry:
1127 if entry:
1127 if entry.changed():
1128 if entry.changed():
1128 entry.obj = self.func(obj)
1129 entry.obj = self.func(obj)
1129 else:
1130 else:
1130 paths = [self.join(obj, path) for path in self.paths]
1131 paths = [self.join(obj, path) for path in self.paths]
1131
1132
1132 # We stat -before- creating the object so our cache doesn't lie if
1133 # We stat -before- creating the object so our cache doesn't lie if
1133 # a writer modified between the time we read and stat
1134 # a writer modified between the time we read and stat
1134 entry = filecacheentry(paths, True)
1135 entry = filecacheentry(paths, True)
1135 entry.obj = self.func(obj)
1136 entry.obj = self.func(obj)
1136
1137
1137 obj._filecache[self.name] = entry
1138 obj._filecache[self.name] = entry
1138
1139
1139 obj.__dict__[self.name] = entry.obj
1140 obj.__dict__[self.name] = entry.obj
1140 return entry.obj
1141 return entry.obj
1141
1142
1142 def __set__(self, obj, value):
1143 def __set__(self, obj, value):
1143 if self.name not in obj._filecache:
1144 if self.name not in obj._filecache:
1144 # we add an entry for the missing value because X in __dict__
1145 # we add an entry for the missing value because X in __dict__
1145 # implies X in _filecache
1146 # implies X in _filecache
1146 paths = [self.join(obj, path) for path in self.paths]
1147 paths = [self.join(obj, path) for path in self.paths]
1147 ce = filecacheentry(paths, False)
1148 ce = filecacheentry(paths, False)
1148 obj._filecache[self.name] = ce
1149 obj._filecache[self.name] = ce
1149 else:
1150 else:
1150 ce = obj._filecache[self.name]
1151 ce = obj._filecache[self.name]
1151
1152
1152 ce.obj = value # update cached copy
1153 ce.obj = value # update cached copy
1153 obj.__dict__[self.name] = value # update copy returned by obj.x
1154 obj.__dict__[self.name] = value # update copy returned by obj.x
1154
1155
1155 def __delete__(self, obj):
1156 def __delete__(self, obj):
1156 try:
1157 try:
1157 del obj.__dict__[self.name]
1158 del obj.__dict__[self.name]
1158 except KeyError:
1159 except KeyError:
1159 raise AttributeError(self.name)
1160 raise AttributeError(self.name)
@@ -1,90 +1,90
1
1
2 $ cat << EOF > buggylocking.py
2 $ cat << EOF > buggylocking.py
3 > """A small extension that acquire locks in the wrong order
3 > """A small extension that acquire locks in the wrong order
4 > """
4 > """
5 >
5 >
6 > from mercurial import cmdutil
6 > from mercurial import cmdutil
7 >
7 >
8 > cmdtable = {}
8 > cmdtable = {}
9 > command = cmdutil.command(cmdtable)
9 > command = cmdutil.command(cmdtable)
10 >
10 >
11 > @command('buggylocking', [], '')
11 > @command('buggylocking', [], '')
12 > def buggylocking(ui, repo):
12 > def buggylocking(ui, repo):
13 > tr = repo.transaction('buggy')
13 > tr = repo.transaction('buggy')
14 > lo = repo.lock()
14 > lo = repo.lock()
15 > wl = repo.wlock()
15 > wl = repo.wlock()
16 > wl.release()
16 > wl.release()
17 > lo.release()
17 > lo.release()
18 >
18 >
19 > @command('properlocking', [], '')
19 > @command('properlocking', [], '')
20 > def properlocking(ui, repo):
20 > def properlocking(ui, repo):
21 > """check that reentrance is fine"""
21 > """check that reentrance is fine"""
22 > wl = repo.wlock()
22 > wl = repo.wlock()
23 > lo = repo.lock()
23 > lo = repo.lock()
24 > tr = repo.transaction('proper')
24 > tr = repo.transaction('proper')
25 > tr2 = repo.transaction('proper')
25 > tr2 = repo.transaction('proper')
26 > lo2 = repo.lock()
26 > lo2 = repo.lock()
27 > wl2 = repo.wlock()
27 > wl2 = repo.wlock()
28 > wl2.release()
28 > wl2.release()
29 > lo2.release()
29 > lo2.release()
30 > tr2.close()
30 > tr2.close()
31 > tr.close()
31 > tr.close()
32 > lo.release()
32 > lo.release()
33 > wl.release()
33 > wl.release()
34 >
34 >
35 > @command('nowaitlocking', [], '')
35 > @command('nowaitlocking', [], '')
36 > def nowaitlocking(ui, repo):
36 > def nowaitlocking(ui, repo):
37 > lo = repo.lock()
37 > lo = repo.lock()
38 > wl = repo.wlock(wait=False)
38 > wl = repo.wlock(wait=False)
39 > wl.release()
39 > wl.release()
40 > lo.release()
40 > lo.release()
41 > EOF
41 > EOF
42
42
43 $ cat << EOF >> $HGRCPATH
43 $ cat << EOF >> $HGRCPATH
44 > [extensions]
44 > [extensions]
45 > buggylocking=$TESTTMP/buggylocking.py
45 > buggylocking=$TESTTMP/buggylocking.py
46 > [devel]
46 > [devel]
47 > all=1
47 > all=1
48 > EOF
48 > EOF
49
49
50 $ hg init lock-checker
50 $ hg init lock-checker
51 $ cd lock-checker
51 $ cd lock-checker
52 $ hg buggylocking
52 $ hg buggylocking
53 transaction with no lock at: $TESTTMP/buggylocking.py:11 (buggylocking)
53 devel-warn: transaction with no lock at: $TESTTMP/buggylocking.py:11 (buggylocking)
54 "wlock" acquired after "lock" at: $TESTTMP/buggylocking.py:13 (buggylocking)
54 devel-warn: "wlock" acquired after "lock" at: $TESTTMP/buggylocking.py:13 (buggylocking)
55 $ cat << EOF >> $HGRCPATH
55 $ cat << EOF >> $HGRCPATH
56 > [devel]
56 > [devel]
57 > all=0
57 > all=0
58 > check-locks=1
58 > check-locks=1
59 > EOF
59 > EOF
60 $ hg buggylocking
60 $ hg buggylocking
61 transaction with no lock at: $TESTTMP/buggylocking.py:11 (buggylocking)
61 devel-warn: transaction with no lock at: $TESTTMP/buggylocking.py:11 (buggylocking)
62 "wlock" acquired after "lock" at: $TESTTMP/buggylocking.py:13 (buggylocking)
62 devel-warn: "wlock" acquired after "lock" at: $TESTTMP/buggylocking.py:13 (buggylocking)
63 $ hg buggylocking --traceback
63 $ hg buggylocking --traceback
64 transaction with no lock at:
64 devel-warn: transaction with no lock at:
65 */hg:* in * (glob)
65 */hg:* in * (glob)
66 */mercurial/dispatch.py:* in run (glob)
66 */mercurial/dispatch.py:* in run (glob)
67 */mercurial/dispatch.py:* in dispatch (glob)
67 */mercurial/dispatch.py:* in dispatch (glob)
68 */mercurial/dispatch.py:* in _runcatch (glob)
68 */mercurial/dispatch.py:* in _runcatch (glob)
69 */mercurial/dispatch.py:* in _dispatch (glob)
69 */mercurial/dispatch.py:* in _dispatch (glob)
70 */mercurial/dispatch.py:* in runcommand (glob)
70 */mercurial/dispatch.py:* in runcommand (glob)
71 */mercurial/dispatch.py:* in _runcommand (glob)
71 */mercurial/dispatch.py:* in _runcommand (glob)
72 */mercurial/dispatch.py:* in checkargs (glob)
72 */mercurial/dispatch.py:* in checkargs (glob)
73 */mercurial/dispatch.py:* in <lambda> (glob)
73 */mercurial/dispatch.py:* in <lambda> (glob)
74 */mercurial/util.py:* in check (glob)
74 */mercurial/util.py:* in check (glob)
75 $TESTTMP/buggylocking.py:* in buggylocking (glob)
75 $TESTTMP/buggylocking.py:* in buggylocking (glob)
76 "wlock" acquired after "lock" at:
76 devel-warn: "wlock" acquired after "lock" at:
77 */hg:* in * (glob)
77 */hg:* in * (glob)
78 */mercurial/dispatch.py:* in run (glob)
78 */mercurial/dispatch.py:* in run (glob)
79 */mercurial/dispatch.py:* in dispatch (glob)
79 */mercurial/dispatch.py:* in dispatch (glob)
80 */mercurial/dispatch.py:* in _runcatch (glob)
80 */mercurial/dispatch.py:* in _runcatch (glob)
81 */mercurial/dispatch.py:* in _dispatch (glob)
81 */mercurial/dispatch.py:* in _dispatch (glob)
82 */mercurial/dispatch.py:* in runcommand (glob)
82 */mercurial/dispatch.py:* in runcommand (glob)
83 */mercurial/dispatch.py:* in _runcommand (glob)
83 */mercurial/dispatch.py:* in _runcommand (glob)
84 */mercurial/dispatch.py:* in checkargs (glob)
84 */mercurial/dispatch.py:* in checkargs (glob)
85 */mercurial/dispatch.py:* in <lambda> (glob)
85 */mercurial/dispatch.py:* in <lambda> (glob)
86 */mercurial/util.py:* in check (glob)
86 */mercurial/util.py:* in check (glob)
87 $TESTTMP/buggylocking.py:* in buggylocking (glob)
87 $TESTTMP/buggylocking.py:* in buggylocking (glob)
88 $ hg properlocking
88 $ hg properlocking
89 $ hg nowaitlocking
89 $ hg nowaitlocking
90 $ cd ..
90 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now