##// END OF EJS Templates
scmutil: use util.queue/util.empty for py3 compat
timeless -
r28819:826d457d default
parent child Browse files
Show More
@@ -1,1379 +1,1378
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import Queue
11 import contextlib
10 import contextlib
12 import errno
11 import errno
13 import glob
12 import glob
14 import os
13 import os
15 import re
14 import re
16 import shutil
15 import shutil
17 import stat
16 import stat
18 import tempfile
17 import tempfile
19 import threading
18 import threading
20
19
21 from .i18n import _
20 from .i18n import _
22 from .node import wdirrev
21 from .node import wdirrev
23 from . import (
22 from . import (
24 encoding,
23 encoding,
25 error,
24 error,
26 match as matchmod,
25 match as matchmod,
27 osutil,
26 osutil,
28 pathutil,
27 pathutil,
29 phases,
28 phases,
30 revset,
29 revset,
31 similar,
30 similar,
32 util,
31 util,
33 )
32 )
34
33
35 if os.name == 'nt':
34 if os.name == 'nt':
36 from . import scmwindows as scmplatform
35 from . import scmwindows as scmplatform
37 else:
36 else:
38 from . import scmposix as scmplatform
37 from . import scmposix as scmplatform
39
38
40 systemrcpath = scmplatform.systemrcpath
39 systemrcpath = scmplatform.systemrcpath
41 userrcpath = scmplatform.userrcpath
40 userrcpath = scmplatform.userrcpath
42
41
43 class status(tuple):
42 class status(tuple):
44 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
43 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
45 and 'ignored' properties are only relevant to the working copy.
44 and 'ignored' properties are only relevant to the working copy.
46 '''
45 '''
47
46
48 __slots__ = ()
47 __slots__ = ()
49
48
50 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
49 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
51 clean):
50 clean):
52 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
51 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
53 ignored, clean))
52 ignored, clean))
54
53
55 @property
54 @property
56 def modified(self):
55 def modified(self):
57 '''files that have been modified'''
56 '''files that have been modified'''
58 return self[0]
57 return self[0]
59
58
60 @property
59 @property
61 def added(self):
60 def added(self):
62 '''files that have been added'''
61 '''files that have been added'''
63 return self[1]
62 return self[1]
64
63
65 @property
64 @property
66 def removed(self):
65 def removed(self):
67 '''files that have been removed'''
66 '''files that have been removed'''
68 return self[2]
67 return self[2]
69
68
70 @property
69 @property
71 def deleted(self):
70 def deleted(self):
72 '''files that are in the dirstate, but have been deleted from the
71 '''files that are in the dirstate, but have been deleted from the
73 working copy (aka "missing")
72 working copy (aka "missing")
74 '''
73 '''
75 return self[3]
74 return self[3]
76
75
77 @property
76 @property
78 def unknown(self):
77 def unknown(self):
79 '''files not in the dirstate that are not ignored'''
78 '''files not in the dirstate that are not ignored'''
80 return self[4]
79 return self[4]
81
80
82 @property
81 @property
83 def ignored(self):
82 def ignored(self):
84 '''files not in the dirstate that are ignored (by _dirignore())'''
83 '''files not in the dirstate that are ignored (by _dirignore())'''
85 return self[5]
84 return self[5]
86
85
87 @property
86 @property
88 def clean(self):
87 def clean(self):
89 '''files that have not been modified'''
88 '''files that have not been modified'''
90 return self[6]
89 return self[6]
91
90
92 def __repr__(self, *args, **kwargs):
91 def __repr__(self, *args, **kwargs):
93 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
92 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
94 'unknown=%r, ignored=%r, clean=%r>') % self)
93 'unknown=%r, ignored=%r, clean=%r>') % self)
95
94
96 def itersubrepos(ctx1, ctx2):
95 def itersubrepos(ctx1, ctx2):
97 """find subrepos in ctx1 or ctx2"""
96 """find subrepos in ctx1 or ctx2"""
98 # Create a (subpath, ctx) mapping where we prefer subpaths from
97 # Create a (subpath, ctx) mapping where we prefer subpaths from
99 # ctx1. The subpaths from ctx2 are important when the .hgsub file
98 # ctx1. The subpaths from ctx2 are important when the .hgsub file
100 # has been modified (in ctx2) but not yet committed (in ctx1).
99 # has been modified (in ctx2) but not yet committed (in ctx1).
101 subpaths = dict.fromkeys(ctx2.substate, ctx2)
100 subpaths = dict.fromkeys(ctx2.substate, ctx2)
102 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
101 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
103
102
104 missing = set()
103 missing = set()
105
104
106 for subpath in ctx2.substate:
105 for subpath in ctx2.substate:
107 if subpath not in ctx1.substate:
106 if subpath not in ctx1.substate:
108 del subpaths[subpath]
107 del subpaths[subpath]
109 missing.add(subpath)
108 missing.add(subpath)
110
109
111 for subpath, ctx in sorted(subpaths.iteritems()):
110 for subpath, ctx in sorted(subpaths.iteritems()):
112 yield subpath, ctx.sub(subpath)
111 yield subpath, ctx.sub(subpath)
113
112
114 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
113 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
115 # status and diff will have an accurate result when it does
114 # status and diff will have an accurate result when it does
116 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
115 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
117 # against itself.
116 # against itself.
118 for subpath in missing:
117 for subpath in missing:
119 yield subpath, ctx2.nullsub(subpath, ctx1)
118 yield subpath, ctx2.nullsub(subpath, ctx1)
120
119
121 def nochangesfound(ui, repo, excluded=None):
120 def nochangesfound(ui, repo, excluded=None):
122 '''Report no changes for push/pull, excluded is None or a list of
121 '''Report no changes for push/pull, excluded is None or a list of
123 nodes excluded from the push/pull.
122 nodes excluded from the push/pull.
124 '''
123 '''
125 secretlist = []
124 secretlist = []
126 if excluded:
125 if excluded:
127 for n in excluded:
126 for n in excluded:
128 if n not in repo:
127 if n not in repo:
129 # discovery should not have included the filtered revision,
128 # discovery should not have included the filtered revision,
130 # we have to explicitly exclude it until discovery is cleanup.
129 # we have to explicitly exclude it until discovery is cleanup.
131 continue
130 continue
132 ctx = repo[n]
131 ctx = repo[n]
133 if ctx.phase() >= phases.secret and not ctx.extinct():
132 if ctx.phase() >= phases.secret and not ctx.extinct():
134 secretlist.append(n)
133 secretlist.append(n)
135
134
136 if secretlist:
135 if secretlist:
137 ui.status(_("no changes found (ignored %d secret changesets)\n")
136 ui.status(_("no changes found (ignored %d secret changesets)\n")
138 % len(secretlist))
137 % len(secretlist))
139 else:
138 else:
140 ui.status(_("no changes found\n"))
139 ui.status(_("no changes found\n"))
141
140
142 def checknewlabel(repo, lbl, kind):
141 def checknewlabel(repo, lbl, kind):
143 # Do not use the "kind" parameter in ui output.
142 # Do not use the "kind" parameter in ui output.
144 # It makes strings difficult to translate.
143 # It makes strings difficult to translate.
145 if lbl in ['tip', '.', 'null']:
144 if lbl in ['tip', '.', 'null']:
146 raise error.Abort(_("the name '%s' is reserved") % lbl)
145 raise error.Abort(_("the name '%s' is reserved") % lbl)
147 for c in (':', '\0', '\n', '\r'):
146 for c in (':', '\0', '\n', '\r'):
148 if c in lbl:
147 if c in lbl:
149 raise error.Abort(_("%r cannot be used in a name") % c)
148 raise error.Abort(_("%r cannot be used in a name") % c)
150 try:
149 try:
151 int(lbl)
150 int(lbl)
152 raise error.Abort(_("cannot use an integer as a name"))
151 raise error.Abort(_("cannot use an integer as a name"))
153 except ValueError:
152 except ValueError:
154 pass
153 pass
155
154
156 def checkfilename(f):
155 def checkfilename(f):
157 '''Check that the filename f is an acceptable filename for a tracked file'''
156 '''Check that the filename f is an acceptable filename for a tracked file'''
158 if '\r' in f or '\n' in f:
157 if '\r' in f or '\n' in f:
159 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
158 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
160
159
161 def checkportable(ui, f):
160 def checkportable(ui, f):
162 '''Check if filename f is portable and warn or abort depending on config'''
161 '''Check if filename f is portable and warn or abort depending on config'''
163 checkfilename(f)
162 checkfilename(f)
164 abort, warn = checkportabilityalert(ui)
163 abort, warn = checkportabilityalert(ui)
165 if abort or warn:
164 if abort or warn:
166 msg = util.checkwinfilename(f)
165 msg = util.checkwinfilename(f)
167 if msg:
166 if msg:
168 msg = "%s: %r" % (msg, f)
167 msg = "%s: %r" % (msg, f)
169 if abort:
168 if abort:
170 raise error.Abort(msg)
169 raise error.Abort(msg)
171 ui.warn(_("warning: %s\n") % msg)
170 ui.warn(_("warning: %s\n") % msg)
172
171
173 def checkportabilityalert(ui):
172 def checkportabilityalert(ui):
174 '''check if the user's config requests nothing, a warning, or abort for
173 '''check if the user's config requests nothing, a warning, or abort for
175 non-portable filenames'''
174 non-portable filenames'''
176 val = ui.config('ui', 'portablefilenames', 'warn')
175 val = ui.config('ui', 'portablefilenames', 'warn')
177 lval = val.lower()
176 lval = val.lower()
178 bval = util.parsebool(val)
177 bval = util.parsebool(val)
179 abort = os.name == 'nt' or lval == 'abort'
178 abort = os.name == 'nt' or lval == 'abort'
180 warn = bval or lval == 'warn'
179 warn = bval or lval == 'warn'
181 if bval is None and not (warn or abort or lval == 'ignore'):
180 if bval is None and not (warn or abort or lval == 'ignore'):
182 raise error.ConfigError(
181 raise error.ConfigError(
183 _("ui.portablefilenames value is invalid ('%s')") % val)
182 _("ui.portablefilenames value is invalid ('%s')") % val)
184 return abort, warn
183 return abort, warn
185
184
186 class casecollisionauditor(object):
185 class casecollisionauditor(object):
187 def __init__(self, ui, abort, dirstate):
186 def __init__(self, ui, abort, dirstate):
188 self._ui = ui
187 self._ui = ui
189 self._abort = abort
188 self._abort = abort
190 allfiles = '\0'.join(dirstate._map)
189 allfiles = '\0'.join(dirstate._map)
191 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
190 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
192 self._dirstate = dirstate
191 self._dirstate = dirstate
193 # The purpose of _newfiles is so that we don't complain about
192 # The purpose of _newfiles is so that we don't complain about
194 # case collisions if someone were to call this object with the
193 # case collisions if someone were to call this object with the
195 # same filename twice.
194 # same filename twice.
196 self._newfiles = set()
195 self._newfiles = set()
197
196
198 def __call__(self, f):
197 def __call__(self, f):
199 if f in self._newfiles:
198 if f in self._newfiles:
200 return
199 return
201 fl = encoding.lower(f)
200 fl = encoding.lower(f)
202 if fl in self._loweredfiles and f not in self._dirstate:
201 if fl in self._loweredfiles and f not in self._dirstate:
203 msg = _('possible case-folding collision for %s') % f
202 msg = _('possible case-folding collision for %s') % f
204 if self._abort:
203 if self._abort:
205 raise error.Abort(msg)
204 raise error.Abort(msg)
206 self._ui.warn(_("warning: %s\n") % msg)
205 self._ui.warn(_("warning: %s\n") % msg)
207 self._loweredfiles.add(fl)
206 self._loweredfiles.add(fl)
208 self._newfiles.add(f)
207 self._newfiles.add(f)
209
208
210 def filteredhash(repo, maxrev):
209 def filteredhash(repo, maxrev):
211 """build hash of filtered revisions in the current repoview.
210 """build hash of filtered revisions in the current repoview.
212
211
213 Multiple caches perform up-to-date validation by checking that the
212 Multiple caches perform up-to-date validation by checking that the
214 tiprev and tipnode stored in the cache file match the current repository.
213 tiprev and tipnode stored in the cache file match the current repository.
215 However, this is not sufficient for validating repoviews because the set
214 However, this is not sufficient for validating repoviews because the set
216 of revisions in the view may change without the repository tiprev and
215 of revisions in the view may change without the repository tiprev and
217 tipnode changing.
216 tipnode changing.
218
217
219 This function hashes all the revs filtered from the view and returns
218 This function hashes all the revs filtered from the view and returns
220 that SHA-1 digest.
219 that SHA-1 digest.
221 """
220 """
222 cl = repo.changelog
221 cl = repo.changelog
223 if not cl.filteredrevs:
222 if not cl.filteredrevs:
224 return None
223 return None
225 key = None
224 key = None
226 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
225 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
227 if revs:
226 if revs:
228 s = util.sha1()
227 s = util.sha1()
229 for rev in revs:
228 for rev in revs:
230 s.update('%s;' % rev)
229 s.update('%s;' % rev)
231 key = s.digest()
230 key = s.digest()
232 return key
231 return key
233
232
234 class abstractvfs(object):
233 class abstractvfs(object):
235 """Abstract base class; cannot be instantiated"""
234 """Abstract base class; cannot be instantiated"""
236
235
237 def __init__(self, *args, **kwargs):
236 def __init__(self, *args, **kwargs):
238 '''Prevent instantiation; don't call this from subclasses.'''
237 '''Prevent instantiation; don't call this from subclasses.'''
239 raise NotImplementedError('attempted instantiating ' + str(type(self)))
238 raise NotImplementedError('attempted instantiating ' + str(type(self)))
240
239
241 def tryread(self, path):
240 def tryread(self, path):
242 '''gracefully return an empty string for missing files'''
241 '''gracefully return an empty string for missing files'''
243 try:
242 try:
244 return self.read(path)
243 return self.read(path)
245 except IOError as inst:
244 except IOError as inst:
246 if inst.errno != errno.ENOENT:
245 if inst.errno != errno.ENOENT:
247 raise
246 raise
248 return ""
247 return ""
249
248
250 def tryreadlines(self, path, mode='rb'):
249 def tryreadlines(self, path, mode='rb'):
251 '''gracefully return an empty array for missing files'''
250 '''gracefully return an empty array for missing files'''
252 try:
251 try:
253 return self.readlines(path, mode=mode)
252 return self.readlines(path, mode=mode)
254 except IOError as inst:
253 except IOError as inst:
255 if inst.errno != errno.ENOENT:
254 if inst.errno != errno.ENOENT:
256 raise
255 raise
257 return []
256 return []
258
257
259 def open(self, path, mode="r", text=False, atomictemp=False,
258 def open(self, path, mode="r", text=False, atomictemp=False,
260 notindexed=False, backgroundclose=False):
259 notindexed=False, backgroundclose=False):
261 '''Open ``path`` file, which is relative to vfs root.
260 '''Open ``path`` file, which is relative to vfs root.
262
261
263 Newly created directories are marked as "not to be indexed by
262 Newly created directories are marked as "not to be indexed by
264 the content indexing service", if ``notindexed`` is specified
263 the content indexing service", if ``notindexed`` is specified
265 for "write" mode access.
264 for "write" mode access.
266 '''
265 '''
267 self.open = self.__call__
266 self.open = self.__call__
268 return self.__call__(path, mode, text, atomictemp, notindexed,
267 return self.__call__(path, mode, text, atomictemp, notindexed,
269 backgroundclose=backgroundclose)
268 backgroundclose=backgroundclose)
270
269
271 def read(self, path):
270 def read(self, path):
272 with self(path, 'rb') as fp:
271 with self(path, 'rb') as fp:
273 return fp.read()
272 return fp.read()
274
273
275 def readlines(self, path, mode='rb'):
274 def readlines(self, path, mode='rb'):
276 with self(path, mode=mode) as fp:
275 with self(path, mode=mode) as fp:
277 return fp.readlines()
276 return fp.readlines()
278
277
279 def write(self, path, data, backgroundclose=False):
278 def write(self, path, data, backgroundclose=False):
280 with self(path, 'wb', backgroundclose=backgroundclose) as fp:
279 with self(path, 'wb', backgroundclose=backgroundclose) as fp:
281 return fp.write(data)
280 return fp.write(data)
282
281
283 def writelines(self, path, data, mode='wb', notindexed=False):
282 def writelines(self, path, data, mode='wb', notindexed=False):
284 with self(path, mode=mode, notindexed=notindexed) as fp:
283 with self(path, mode=mode, notindexed=notindexed) as fp:
285 return fp.writelines(data)
284 return fp.writelines(data)
286
285
287 def append(self, path, data):
286 def append(self, path, data):
288 with self(path, 'ab') as fp:
287 with self(path, 'ab') as fp:
289 return fp.write(data)
288 return fp.write(data)
290
289
291 def basename(self, path):
290 def basename(self, path):
292 """return base element of a path (as os.path.basename would do)
291 """return base element of a path (as os.path.basename would do)
293
292
294 This exists to allow handling of strange encoding if needed."""
293 This exists to allow handling of strange encoding if needed."""
295 return os.path.basename(path)
294 return os.path.basename(path)
296
295
297 def chmod(self, path, mode):
296 def chmod(self, path, mode):
298 return os.chmod(self.join(path), mode)
297 return os.chmod(self.join(path), mode)
299
298
300 def dirname(self, path):
299 def dirname(self, path):
301 """return dirname element of a path (as os.path.dirname would do)
300 """return dirname element of a path (as os.path.dirname would do)
302
301
303 This exists to allow handling of strange encoding if needed."""
302 This exists to allow handling of strange encoding if needed."""
304 return os.path.dirname(path)
303 return os.path.dirname(path)
305
304
306 def exists(self, path=None):
305 def exists(self, path=None):
307 return os.path.exists(self.join(path))
306 return os.path.exists(self.join(path))
308
307
309 def fstat(self, fp):
308 def fstat(self, fp):
310 return util.fstat(fp)
309 return util.fstat(fp)
311
310
312 def isdir(self, path=None):
311 def isdir(self, path=None):
313 return os.path.isdir(self.join(path))
312 return os.path.isdir(self.join(path))
314
313
315 def isfile(self, path=None):
314 def isfile(self, path=None):
316 return os.path.isfile(self.join(path))
315 return os.path.isfile(self.join(path))
317
316
318 def islink(self, path=None):
317 def islink(self, path=None):
319 return os.path.islink(self.join(path))
318 return os.path.islink(self.join(path))
320
319
321 def isfileorlink(self, path=None):
320 def isfileorlink(self, path=None):
322 '''return whether path is a regular file or a symlink
321 '''return whether path is a regular file or a symlink
323
322
324 Unlike isfile, this doesn't follow symlinks.'''
323 Unlike isfile, this doesn't follow symlinks.'''
325 try:
324 try:
326 st = self.lstat(path)
325 st = self.lstat(path)
327 except OSError:
326 except OSError:
328 return False
327 return False
329 mode = st.st_mode
328 mode = st.st_mode
330 return stat.S_ISREG(mode) or stat.S_ISLNK(mode)
329 return stat.S_ISREG(mode) or stat.S_ISLNK(mode)
331
330
332 def reljoin(self, *paths):
331 def reljoin(self, *paths):
333 """join various elements of a path together (as os.path.join would do)
332 """join various elements of a path together (as os.path.join would do)
334
333
335 The vfs base is not injected so that path stay relative. This exists
334 The vfs base is not injected so that path stay relative. This exists
336 to allow handling of strange encoding if needed."""
335 to allow handling of strange encoding if needed."""
337 return os.path.join(*paths)
336 return os.path.join(*paths)
338
337
339 def split(self, path):
338 def split(self, path):
340 """split top-most element of a path (as os.path.split would do)
339 """split top-most element of a path (as os.path.split would do)
341
340
342 This exists to allow handling of strange encoding if needed."""
341 This exists to allow handling of strange encoding if needed."""
343 return os.path.split(path)
342 return os.path.split(path)
344
343
345 def lexists(self, path=None):
344 def lexists(self, path=None):
346 return os.path.lexists(self.join(path))
345 return os.path.lexists(self.join(path))
347
346
348 def lstat(self, path=None):
347 def lstat(self, path=None):
349 return os.lstat(self.join(path))
348 return os.lstat(self.join(path))
350
349
351 def listdir(self, path=None):
350 def listdir(self, path=None):
352 return os.listdir(self.join(path))
351 return os.listdir(self.join(path))
353
352
354 def makedir(self, path=None, notindexed=True):
353 def makedir(self, path=None, notindexed=True):
355 return util.makedir(self.join(path), notindexed)
354 return util.makedir(self.join(path), notindexed)
356
355
357 def makedirs(self, path=None, mode=None):
356 def makedirs(self, path=None, mode=None):
358 return util.makedirs(self.join(path), mode)
357 return util.makedirs(self.join(path), mode)
359
358
360 def makelock(self, info, path):
359 def makelock(self, info, path):
361 return util.makelock(info, self.join(path))
360 return util.makelock(info, self.join(path))
362
361
363 def mkdir(self, path=None):
362 def mkdir(self, path=None):
364 return os.mkdir(self.join(path))
363 return os.mkdir(self.join(path))
365
364
366 def mkstemp(self, suffix='', prefix='tmp', dir=None, text=False):
365 def mkstemp(self, suffix='', prefix='tmp', dir=None, text=False):
367 fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
366 fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
368 dir=self.join(dir), text=text)
367 dir=self.join(dir), text=text)
369 dname, fname = util.split(name)
368 dname, fname = util.split(name)
370 if dir:
369 if dir:
371 return fd, os.path.join(dir, fname)
370 return fd, os.path.join(dir, fname)
372 else:
371 else:
373 return fd, fname
372 return fd, fname
374
373
375 def readdir(self, path=None, stat=None, skip=None):
374 def readdir(self, path=None, stat=None, skip=None):
376 return osutil.listdir(self.join(path), stat, skip)
375 return osutil.listdir(self.join(path), stat, skip)
377
376
378 def readlock(self, path):
377 def readlock(self, path):
379 return util.readlock(self.join(path))
378 return util.readlock(self.join(path))
380
379
381 def rename(self, src, dst):
380 def rename(self, src, dst):
382 return util.rename(self.join(src), self.join(dst))
381 return util.rename(self.join(src), self.join(dst))
383
382
384 def readlink(self, path):
383 def readlink(self, path):
385 return os.readlink(self.join(path))
384 return os.readlink(self.join(path))
386
385
387 def removedirs(self, path=None):
386 def removedirs(self, path=None):
388 """Remove a leaf directory and all empty intermediate ones
387 """Remove a leaf directory and all empty intermediate ones
389 """
388 """
390 return util.removedirs(self.join(path))
389 return util.removedirs(self.join(path))
391
390
392 def rmtree(self, path=None, ignore_errors=False, forcibly=False):
391 def rmtree(self, path=None, ignore_errors=False, forcibly=False):
393 """Remove a directory tree recursively
392 """Remove a directory tree recursively
394
393
395 If ``forcibly``, this tries to remove READ-ONLY files, too.
394 If ``forcibly``, this tries to remove READ-ONLY files, too.
396 """
395 """
397 if forcibly:
396 if forcibly:
398 def onerror(function, path, excinfo):
397 def onerror(function, path, excinfo):
399 if function is not os.remove:
398 if function is not os.remove:
400 raise
399 raise
401 # read-only files cannot be unlinked under Windows
400 # read-only files cannot be unlinked under Windows
402 s = os.stat(path)
401 s = os.stat(path)
403 if (s.st_mode & stat.S_IWRITE) != 0:
402 if (s.st_mode & stat.S_IWRITE) != 0:
404 raise
403 raise
405 os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
404 os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
406 os.remove(path)
405 os.remove(path)
407 else:
406 else:
408 onerror = None
407 onerror = None
409 return shutil.rmtree(self.join(path),
408 return shutil.rmtree(self.join(path),
410 ignore_errors=ignore_errors, onerror=onerror)
409 ignore_errors=ignore_errors, onerror=onerror)
411
410
412 def setflags(self, path, l, x):
411 def setflags(self, path, l, x):
413 return util.setflags(self.join(path), l, x)
412 return util.setflags(self.join(path), l, x)
414
413
415 def stat(self, path=None):
414 def stat(self, path=None):
416 return os.stat(self.join(path))
415 return os.stat(self.join(path))
417
416
418 def unlink(self, path=None):
417 def unlink(self, path=None):
419 return util.unlink(self.join(path))
418 return util.unlink(self.join(path))
420
419
421 def unlinkpath(self, path=None, ignoremissing=False):
420 def unlinkpath(self, path=None, ignoremissing=False):
422 return util.unlinkpath(self.join(path), ignoremissing)
421 return util.unlinkpath(self.join(path), ignoremissing)
423
422
424 def utime(self, path=None, t=None):
423 def utime(self, path=None, t=None):
425 return os.utime(self.join(path), t)
424 return os.utime(self.join(path), t)
426
425
427 def walk(self, path=None, onerror=None):
426 def walk(self, path=None, onerror=None):
428 """Yield (dirpath, dirs, files) tuple for each directories under path
427 """Yield (dirpath, dirs, files) tuple for each directories under path
429
428
430 ``dirpath`` is relative one from the root of this vfs. This
429 ``dirpath`` is relative one from the root of this vfs. This
431 uses ``os.sep`` as path separator, even you specify POSIX
430 uses ``os.sep`` as path separator, even you specify POSIX
432 style ``path``.
431 style ``path``.
433
432
434 "The root of this vfs" is represented as empty ``dirpath``.
433 "The root of this vfs" is represented as empty ``dirpath``.
435 """
434 """
436 root = os.path.normpath(self.join(None))
435 root = os.path.normpath(self.join(None))
437 # when dirpath == root, dirpath[prefixlen:] becomes empty
436 # when dirpath == root, dirpath[prefixlen:] becomes empty
438 # because len(dirpath) < prefixlen.
437 # because len(dirpath) < prefixlen.
439 prefixlen = len(pathutil.normasprefix(root))
438 prefixlen = len(pathutil.normasprefix(root))
440 for dirpath, dirs, files in os.walk(self.join(path), onerror=onerror):
439 for dirpath, dirs, files in os.walk(self.join(path), onerror=onerror):
441 yield (dirpath[prefixlen:], dirs, files)
440 yield (dirpath[prefixlen:], dirs, files)
442
441
443 @contextlib.contextmanager
442 @contextlib.contextmanager
444 def backgroundclosing(self, ui, expectedcount=-1):
443 def backgroundclosing(self, ui, expectedcount=-1):
445 """Allow files to be closed asynchronously.
444 """Allow files to be closed asynchronously.
446
445
447 When this context manager is active, ``backgroundclose`` can be passed
446 When this context manager is active, ``backgroundclose`` can be passed
448 to ``__call__``/``open`` to result in the file possibly being closed
447 to ``__call__``/``open`` to result in the file possibly being closed
449 asynchronously, on a background thread.
448 asynchronously, on a background thread.
450 """
449 """
451 # This is an arbitrary restriction and could be changed if we ever
450 # This is an arbitrary restriction and could be changed if we ever
452 # have a use case.
451 # have a use case.
453 vfs = getattr(self, 'vfs', self)
452 vfs = getattr(self, 'vfs', self)
454 if getattr(vfs, '_backgroundfilecloser', None):
453 if getattr(vfs, '_backgroundfilecloser', None):
455 raise error.Abort('can only have 1 active background file closer')
454 raise error.Abort('can only have 1 active background file closer')
456
455
457 with backgroundfilecloser(ui, expectedcount=expectedcount) as bfc:
456 with backgroundfilecloser(ui, expectedcount=expectedcount) as bfc:
458 try:
457 try:
459 vfs._backgroundfilecloser = bfc
458 vfs._backgroundfilecloser = bfc
460 yield bfc
459 yield bfc
461 finally:
460 finally:
462 vfs._backgroundfilecloser = None
461 vfs._backgroundfilecloser = None
463
462
464 class vfs(abstractvfs):
463 class vfs(abstractvfs):
465 '''Operate files relative to a base directory
464 '''Operate files relative to a base directory
466
465
467 This class is used to hide the details of COW semantics and
466 This class is used to hide the details of COW semantics and
468 remote file access from higher level code.
467 remote file access from higher level code.
469 '''
468 '''
470 def __init__(self, base, audit=True, expandpath=False, realpath=False):
469 def __init__(self, base, audit=True, expandpath=False, realpath=False):
471 if expandpath:
470 if expandpath:
472 base = util.expandpath(base)
471 base = util.expandpath(base)
473 if realpath:
472 if realpath:
474 base = os.path.realpath(base)
473 base = os.path.realpath(base)
475 self.base = base
474 self.base = base
476 self.mustaudit = audit
475 self.mustaudit = audit
477 self.createmode = None
476 self.createmode = None
478 self._trustnlink = None
477 self._trustnlink = None
479
478
480 @property
479 @property
481 def mustaudit(self):
480 def mustaudit(self):
482 return self._audit
481 return self._audit
483
482
484 @mustaudit.setter
483 @mustaudit.setter
485 def mustaudit(self, onoff):
484 def mustaudit(self, onoff):
486 self._audit = onoff
485 self._audit = onoff
487 if onoff:
486 if onoff:
488 self.audit = pathutil.pathauditor(self.base)
487 self.audit = pathutil.pathauditor(self.base)
489 else:
488 else:
490 self.audit = util.always
489 self.audit = util.always
491
490
492 @util.propertycache
491 @util.propertycache
493 def _cansymlink(self):
492 def _cansymlink(self):
494 return util.checklink(self.base)
493 return util.checklink(self.base)
495
494
496 @util.propertycache
495 @util.propertycache
497 def _chmod(self):
496 def _chmod(self):
498 return util.checkexec(self.base)
497 return util.checkexec(self.base)
499
498
500 def _fixfilemode(self, name):
499 def _fixfilemode(self, name):
501 if self.createmode is None or not self._chmod:
500 if self.createmode is None or not self._chmod:
502 return
501 return
503 os.chmod(name, self.createmode & 0o666)
502 os.chmod(name, self.createmode & 0o666)
504
503
505 def __call__(self, path, mode="r", text=False, atomictemp=False,
504 def __call__(self, path, mode="r", text=False, atomictemp=False,
506 notindexed=False, backgroundclose=False):
505 notindexed=False, backgroundclose=False):
507 '''Open ``path`` file, which is relative to vfs root.
506 '''Open ``path`` file, which is relative to vfs root.
508
507
509 Newly created directories are marked as "not to be indexed by
508 Newly created directories are marked as "not to be indexed by
510 the content indexing service", if ``notindexed`` is specified
509 the content indexing service", if ``notindexed`` is specified
511 for "write" mode access.
510 for "write" mode access.
512
511
513 If ``backgroundclose`` is passed, the file may be closed asynchronously.
512 If ``backgroundclose`` is passed, the file may be closed asynchronously.
514 It can only be used if the ``self.backgroundclosing()`` context manager
513 It can only be used if the ``self.backgroundclosing()`` context manager
515 is active. This should only be specified if the following criteria hold:
514 is active. This should only be specified if the following criteria hold:
516
515
517 1. There is a potential for writing thousands of files. Unless you
516 1. There is a potential for writing thousands of files. Unless you
518 are writing thousands of files, the performance benefits of
517 are writing thousands of files, the performance benefits of
519 asynchronously closing files is not realized.
518 asynchronously closing files is not realized.
520 2. Files are opened exactly once for the ``backgroundclosing``
519 2. Files are opened exactly once for the ``backgroundclosing``
521 active duration and are therefore free of race conditions between
520 active duration and are therefore free of race conditions between
522 closing a file on a background thread and reopening it. (If the
521 closing a file on a background thread and reopening it. (If the
523 file were opened multiple times, there could be unflushed data
522 file were opened multiple times, there could be unflushed data
524 because the original file handle hasn't been flushed/closed yet.)
523 because the original file handle hasn't been flushed/closed yet.)
525 '''
524 '''
526 if self._audit:
525 if self._audit:
527 r = util.checkosfilename(path)
526 r = util.checkosfilename(path)
528 if r:
527 if r:
529 raise error.Abort("%s: %r" % (r, path))
528 raise error.Abort("%s: %r" % (r, path))
530 self.audit(path)
529 self.audit(path)
531 f = self.join(path)
530 f = self.join(path)
532
531
533 if not text and "b" not in mode:
532 if not text and "b" not in mode:
534 mode += "b" # for that other OS
533 mode += "b" # for that other OS
535
534
536 nlink = -1
535 nlink = -1
537 if mode not in ('r', 'rb'):
536 if mode not in ('r', 'rb'):
538 dirname, basename = util.split(f)
537 dirname, basename = util.split(f)
539 # If basename is empty, then the path is malformed because it points
538 # If basename is empty, then the path is malformed because it points
540 # to a directory. Let the posixfile() call below raise IOError.
539 # to a directory. Let the posixfile() call below raise IOError.
541 if basename:
540 if basename:
542 if atomictemp:
541 if atomictemp:
543 util.ensuredirs(dirname, self.createmode, notindexed)
542 util.ensuredirs(dirname, self.createmode, notindexed)
544 return util.atomictempfile(f, mode, self.createmode)
543 return util.atomictempfile(f, mode, self.createmode)
545 try:
544 try:
546 if 'w' in mode:
545 if 'w' in mode:
547 util.unlink(f)
546 util.unlink(f)
548 nlink = 0
547 nlink = 0
549 else:
548 else:
550 # nlinks() may behave differently for files on Windows
549 # nlinks() may behave differently for files on Windows
551 # shares if the file is open.
550 # shares if the file is open.
552 with util.posixfile(f):
551 with util.posixfile(f):
553 nlink = util.nlinks(f)
552 nlink = util.nlinks(f)
554 if nlink < 1:
553 if nlink < 1:
555 nlink = 2 # force mktempcopy (issue1922)
554 nlink = 2 # force mktempcopy (issue1922)
556 except (OSError, IOError) as e:
555 except (OSError, IOError) as e:
557 if e.errno != errno.ENOENT:
556 if e.errno != errno.ENOENT:
558 raise
557 raise
559 nlink = 0
558 nlink = 0
560 util.ensuredirs(dirname, self.createmode, notindexed)
559 util.ensuredirs(dirname, self.createmode, notindexed)
561 if nlink > 0:
560 if nlink > 0:
562 if self._trustnlink is None:
561 if self._trustnlink is None:
563 self._trustnlink = nlink > 1 or util.checknlink(f)
562 self._trustnlink = nlink > 1 or util.checknlink(f)
564 if nlink > 1 or not self._trustnlink:
563 if nlink > 1 or not self._trustnlink:
565 util.rename(util.mktempcopy(f), f)
564 util.rename(util.mktempcopy(f), f)
566 fp = util.posixfile(f, mode)
565 fp = util.posixfile(f, mode)
567 if nlink == 0:
566 if nlink == 0:
568 self._fixfilemode(f)
567 self._fixfilemode(f)
569
568
570 if backgroundclose:
569 if backgroundclose:
571 if not self._backgroundfilecloser:
570 if not self._backgroundfilecloser:
572 raise error.Abort('backgroundclose can only be used when a '
571 raise error.Abort('backgroundclose can only be used when a '
573 'backgroundclosing context manager is active')
572 'backgroundclosing context manager is active')
574
573
575 fp = delayclosedfile(fp, self._backgroundfilecloser)
574 fp = delayclosedfile(fp, self._backgroundfilecloser)
576
575
577 return fp
576 return fp
578
577
579 def symlink(self, src, dst):
578 def symlink(self, src, dst):
580 self.audit(dst)
579 self.audit(dst)
581 linkname = self.join(dst)
580 linkname = self.join(dst)
582 try:
581 try:
583 os.unlink(linkname)
582 os.unlink(linkname)
584 except OSError:
583 except OSError:
585 pass
584 pass
586
585
587 util.ensuredirs(os.path.dirname(linkname), self.createmode)
586 util.ensuredirs(os.path.dirname(linkname), self.createmode)
588
587
589 if self._cansymlink:
588 if self._cansymlink:
590 try:
589 try:
591 os.symlink(src, linkname)
590 os.symlink(src, linkname)
592 except OSError as err:
591 except OSError as err:
593 raise OSError(err.errno, _('could not symlink to %r: %s') %
592 raise OSError(err.errno, _('could not symlink to %r: %s') %
594 (src, err.strerror), linkname)
593 (src, err.strerror), linkname)
595 else:
594 else:
596 self.write(dst, src)
595 self.write(dst, src)
597
596
598 def join(self, path, *insidef):
597 def join(self, path, *insidef):
599 if path:
598 if path:
600 return os.path.join(self.base, path, *insidef)
599 return os.path.join(self.base, path, *insidef)
601 else:
600 else:
602 return self.base
601 return self.base
603
602
604 opener = vfs
603 opener = vfs
605
604
606 class auditvfs(object):
605 class auditvfs(object):
607 def __init__(self, vfs):
606 def __init__(self, vfs):
608 self.vfs = vfs
607 self.vfs = vfs
609
608
610 @property
609 @property
611 def mustaudit(self):
610 def mustaudit(self):
612 return self.vfs.mustaudit
611 return self.vfs.mustaudit
613
612
614 @mustaudit.setter
613 @mustaudit.setter
615 def mustaudit(self, onoff):
614 def mustaudit(self, onoff):
616 self.vfs.mustaudit = onoff
615 self.vfs.mustaudit = onoff
617
616
618 class filtervfs(abstractvfs, auditvfs):
617 class filtervfs(abstractvfs, auditvfs):
619 '''Wrapper vfs for filtering filenames with a function.'''
618 '''Wrapper vfs for filtering filenames with a function.'''
620
619
621 def __init__(self, vfs, filter):
620 def __init__(self, vfs, filter):
622 auditvfs.__init__(self, vfs)
621 auditvfs.__init__(self, vfs)
623 self._filter = filter
622 self._filter = filter
624
623
625 def __call__(self, path, *args, **kwargs):
624 def __call__(self, path, *args, **kwargs):
626 return self.vfs(self._filter(path), *args, **kwargs)
625 return self.vfs(self._filter(path), *args, **kwargs)
627
626
628 def join(self, path, *insidef):
627 def join(self, path, *insidef):
629 if path:
628 if path:
630 return self.vfs.join(self._filter(self.vfs.reljoin(path, *insidef)))
629 return self.vfs.join(self._filter(self.vfs.reljoin(path, *insidef)))
631 else:
630 else:
632 return self.vfs.join(path)
631 return self.vfs.join(path)
633
632
634 filteropener = filtervfs
633 filteropener = filtervfs
635
634
636 class readonlyvfs(abstractvfs, auditvfs):
635 class readonlyvfs(abstractvfs, auditvfs):
637 '''Wrapper vfs preventing any writing.'''
636 '''Wrapper vfs preventing any writing.'''
638
637
639 def __init__(self, vfs):
638 def __init__(self, vfs):
640 auditvfs.__init__(self, vfs)
639 auditvfs.__init__(self, vfs)
641
640
642 def __call__(self, path, mode='r', *args, **kw):
641 def __call__(self, path, mode='r', *args, **kw):
643 if mode not in ('r', 'rb'):
642 if mode not in ('r', 'rb'):
644 raise error.Abort('this vfs is read only')
643 raise error.Abort('this vfs is read only')
645 return self.vfs(path, mode, *args, **kw)
644 return self.vfs(path, mode, *args, **kw)
646
645
647 def join(self, path, *insidef):
646 def join(self, path, *insidef):
648 return self.vfs.join(path, *insidef)
647 return self.vfs.join(path, *insidef)
649
648
650 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
649 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
651 '''yield every hg repository under path, always recursively.
650 '''yield every hg repository under path, always recursively.
652 The recurse flag will only control recursion into repo working dirs'''
651 The recurse flag will only control recursion into repo working dirs'''
653 def errhandler(err):
652 def errhandler(err):
654 if err.filename == path:
653 if err.filename == path:
655 raise err
654 raise err
656 samestat = getattr(os.path, 'samestat', None)
655 samestat = getattr(os.path, 'samestat', None)
657 if followsym and samestat is not None:
656 if followsym and samestat is not None:
658 def adddir(dirlst, dirname):
657 def adddir(dirlst, dirname):
659 match = False
658 match = False
660 dirstat = os.stat(dirname)
659 dirstat = os.stat(dirname)
661 for lstdirstat in dirlst:
660 for lstdirstat in dirlst:
662 if samestat(dirstat, lstdirstat):
661 if samestat(dirstat, lstdirstat):
663 match = True
662 match = True
664 break
663 break
665 if not match:
664 if not match:
666 dirlst.append(dirstat)
665 dirlst.append(dirstat)
667 return not match
666 return not match
668 else:
667 else:
669 followsym = False
668 followsym = False
670
669
671 if (seen_dirs is None) and followsym:
670 if (seen_dirs is None) and followsym:
672 seen_dirs = []
671 seen_dirs = []
673 adddir(seen_dirs, path)
672 adddir(seen_dirs, path)
674 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
673 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
675 dirs.sort()
674 dirs.sort()
676 if '.hg' in dirs:
675 if '.hg' in dirs:
677 yield root # found a repository
676 yield root # found a repository
678 qroot = os.path.join(root, '.hg', 'patches')
677 qroot = os.path.join(root, '.hg', 'patches')
679 if os.path.isdir(os.path.join(qroot, '.hg')):
678 if os.path.isdir(os.path.join(qroot, '.hg')):
680 yield qroot # we have a patch queue repo here
679 yield qroot # we have a patch queue repo here
681 if recurse:
680 if recurse:
682 # avoid recursing inside the .hg directory
681 # avoid recursing inside the .hg directory
683 dirs.remove('.hg')
682 dirs.remove('.hg')
684 else:
683 else:
685 dirs[:] = [] # don't descend further
684 dirs[:] = [] # don't descend further
686 elif followsym:
685 elif followsym:
687 newdirs = []
686 newdirs = []
688 for d in dirs:
687 for d in dirs:
689 fname = os.path.join(root, d)
688 fname = os.path.join(root, d)
690 if adddir(seen_dirs, fname):
689 if adddir(seen_dirs, fname):
691 if os.path.islink(fname):
690 if os.path.islink(fname):
692 for hgname in walkrepos(fname, True, seen_dirs):
691 for hgname in walkrepos(fname, True, seen_dirs):
693 yield hgname
692 yield hgname
694 else:
693 else:
695 newdirs.append(d)
694 newdirs.append(d)
696 dirs[:] = newdirs
695 dirs[:] = newdirs
697
696
698 def osrcpath():
697 def osrcpath():
699 '''return default os-specific hgrc search path'''
698 '''return default os-specific hgrc search path'''
700 path = []
699 path = []
701 defaultpath = os.path.join(util.datapath, 'default.d')
700 defaultpath = os.path.join(util.datapath, 'default.d')
702 if os.path.isdir(defaultpath):
701 if os.path.isdir(defaultpath):
703 for f, kind in osutil.listdir(defaultpath):
702 for f, kind in osutil.listdir(defaultpath):
704 if f.endswith('.rc'):
703 if f.endswith('.rc'):
705 path.append(os.path.join(defaultpath, f))
704 path.append(os.path.join(defaultpath, f))
706 path.extend(systemrcpath())
705 path.extend(systemrcpath())
707 path.extend(userrcpath())
706 path.extend(userrcpath())
708 path = [os.path.normpath(f) for f in path]
707 path = [os.path.normpath(f) for f in path]
709 return path
708 return path
710
709
711 _rcpath = None
710 _rcpath = None
712
711
713 def rcpath():
712 def rcpath():
714 '''return hgrc search path. if env var HGRCPATH is set, use it.
713 '''return hgrc search path. if env var HGRCPATH is set, use it.
715 for each item in path, if directory, use files ending in .rc,
714 for each item in path, if directory, use files ending in .rc,
716 else use item.
715 else use item.
717 make HGRCPATH empty to only look in .hg/hgrc of current repo.
716 make HGRCPATH empty to only look in .hg/hgrc of current repo.
718 if no HGRCPATH, use default os-specific path.'''
717 if no HGRCPATH, use default os-specific path.'''
719 global _rcpath
718 global _rcpath
720 if _rcpath is None:
719 if _rcpath is None:
721 if 'HGRCPATH' in os.environ:
720 if 'HGRCPATH' in os.environ:
722 _rcpath = []
721 _rcpath = []
723 for p in os.environ['HGRCPATH'].split(os.pathsep):
722 for p in os.environ['HGRCPATH'].split(os.pathsep):
724 if not p:
723 if not p:
725 continue
724 continue
726 p = util.expandpath(p)
725 p = util.expandpath(p)
727 if os.path.isdir(p):
726 if os.path.isdir(p):
728 for f, kind in osutil.listdir(p):
727 for f, kind in osutil.listdir(p):
729 if f.endswith('.rc'):
728 if f.endswith('.rc'):
730 _rcpath.append(os.path.join(p, f))
729 _rcpath.append(os.path.join(p, f))
731 else:
730 else:
732 _rcpath.append(p)
731 _rcpath.append(p)
733 else:
732 else:
734 _rcpath = osrcpath()
733 _rcpath = osrcpath()
735 return _rcpath
734 return _rcpath
736
735
737 def intrev(rev):
736 def intrev(rev):
738 """Return integer for a given revision that can be used in comparison or
737 """Return integer for a given revision that can be used in comparison or
739 arithmetic operation"""
738 arithmetic operation"""
740 if rev is None:
739 if rev is None:
741 return wdirrev
740 return wdirrev
742 return rev
741 return rev
743
742
744 def revsingle(repo, revspec, default='.'):
743 def revsingle(repo, revspec, default='.'):
745 if not revspec and revspec != 0:
744 if not revspec and revspec != 0:
746 return repo[default]
745 return repo[default]
747
746
748 l = revrange(repo, [revspec])
747 l = revrange(repo, [revspec])
749 if not l:
748 if not l:
750 raise error.Abort(_('empty revision set'))
749 raise error.Abort(_('empty revision set'))
751 return repo[l.last()]
750 return repo[l.last()]
752
751
753 def _pairspec(revspec):
752 def _pairspec(revspec):
754 tree = revset.parse(revspec)
753 tree = revset.parse(revspec)
755 tree = revset.optimize(tree, True)[1] # fix up "x^:y" -> "(x^):y"
754 tree = revset.optimize(tree, True)[1] # fix up "x^:y" -> "(x^):y"
756 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
755 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
757
756
758 def revpair(repo, revs):
757 def revpair(repo, revs):
759 if not revs:
758 if not revs:
760 return repo.dirstate.p1(), None
759 return repo.dirstate.p1(), None
761
760
762 l = revrange(repo, revs)
761 l = revrange(repo, revs)
763
762
764 if not l:
763 if not l:
765 first = second = None
764 first = second = None
766 elif l.isascending():
765 elif l.isascending():
767 first = l.min()
766 first = l.min()
768 second = l.max()
767 second = l.max()
769 elif l.isdescending():
768 elif l.isdescending():
770 first = l.max()
769 first = l.max()
771 second = l.min()
770 second = l.min()
772 else:
771 else:
773 first = l.first()
772 first = l.first()
774 second = l.last()
773 second = l.last()
775
774
776 if first is None:
775 if first is None:
777 raise error.Abort(_('empty revision range'))
776 raise error.Abort(_('empty revision range'))
778 if (first == second and len(revs) >= 2
777 if (first == second and len(revs) >= 2
779 and not all(revrange(repo, [r]) for r in revs)):
778 and not all(revrange(repo, [r]) for r in revs)):
780 raise error.Abort(_('empty revision on one side of range'))
779 raise error.Abort(_('empty revision on one side of range'))
781
780
782 # if top-level is range expression, the result must always be a pair
781 # if top-level is range expression, the result must always be a pair
783 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
782 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
784 return repo.lookup(first), None
783 return repo.lookup(first), None
785
784
786 return repo.lookup(first), repo.lookup(second)
785 return repo.lookup(first), repo.lookup(second)
787
786
788 def revrange(repo, revs):
787 def revrange(repo, revs):
789 """Yield revision as strings from a list of revision specifications."""
788 """Yield revision as strings from a list of revision specifications."""
790 allspecs = []
789 allspecs = []
791 for spec in revs:
790 for spec in revs:
792 if isinstance(spec, int):
791 if isinstance(spec, int):
793 spec = revset.formatspec('rev(%d)', spec)
792 spec = revset.formatspec('rev(%d)', spec)
794 allspecs.append(spec)
793 allspecs.append(spec)
795 m = revset.matchany(repo.ui, allspecs, repo)
794 m = revset.matchany(repo.ui, allspecs, repo)
796 return m(repo)
795 return m(repo)
797
796
798 def meaningfulparents(repo, ctx):
797 def meaningfulparents(repo, ctx):
799 """Return list of meaningful (or all if debug) parentrevs for rev.
798 """Return list of meaningful (or all if debug) parentrevs for rev.
800
799
801 For merges (two non-nullrev revisions) both parents are meaningful.
800 For merges (two non-nullrev revisions) both parents are meaningful.
802 Otherwise the first parent revision is considered meaningful if it
801 Otherwise the first parent revision is considered meaningful if it
803 is not the preceding revision.
802 is not the preceding revision.
804 """
803 """
805 parents = ctx.parents()
804 parents = ctx.parents()
806 if len(parents) > 1:
805 if len(parents) > 1:
807 return parents
806 return parents
808 if repo.ui.debugflag:
807 if repo.ui.debugflag:
809 return [parents[0], repo['null']]
808 return [parents[0], repo['null']]
810 if parents[0].rev() >= intrev(ctx.rev()) - 1:
809 if parents[0].rev() >= intrev(ctx.rev()) - 1:
811 return []
810 return []
812 return parents
811 return parents
813
812
814 def expandpats(pats):
813 def expandpats(pats):
815 '''Expand bare globs when running on windows.
814 '''Expand bare globs when running on windows.
816 On posix we assume it already has already been done by sh.'''
815 On posix we assume it already has already been done by sh.'''
817 if not util.expandglobs:
816 if not util.expandglobs:
818 return list(pats)
817 return list(pats)
819 ret = []
818 ret = []
820 for kindpat in pats:
819 for kindpat in pats:
821 kind, pat = matchmod._patsplit(kindpat, None)
820 kind, pat = matchmod._patsplit(kindpat, None)
822 if kind is None:
821 if kind is None:
823 try:
822 try:
824 globbed = glob.glob(pat)
823 globbed = glob.glob(pat)
825 except re.error:
824 except re.error:
826 globbed = [pat]
825 globbed = [pat]
827 if globbed:
826 if globbed:
828 ret.extend(globbed)
827 ret.extend(globbed)
829 continue
828 continue
830 ret.append(kindpat)
829 ret.append(kindpat)
831 return ret
830 return ret
832
831
833 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
832 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
834 badfn=None):
833 badfn=None):
835 '''Return a matcher and the patterns that were used.
834 '''Return a matcher and the patterns that were used.
836 The matcher will warn about bad matches, unless an alternate badfn callback
835 The matcher will warn about bad matches, unless an alternate badfn callback
837 is provided.'''
836 is provided.'''
838 if pats == ("",):
837 if pats == ("",):
839 pats = []
838 pats = []
840 if opts is None:
839 if opts is None:
841 opts = {}
840 opts = {}
842 if not globbed and default == 'relpath':
841 if not globbed and default == 'relpath':
843 pats = expandpats(pats or [])
842 pats = expandpats(pats or [])
844
843
845 def bad(f, msg):
844 def bad(f, msg):
846 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
845 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
847
846
848 if badfn is None:
847 if badfn is None:
849 badfn = bad
848 badfn = bad
850
849
851 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
850 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
852 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
851 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
853
852
854 if m.always():
853 if m.always():
855 pats = []
854 pats = []
856 return m, pats
855 return m, pats
857
856
858 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
857 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
859 badfn=None):
858 badfn=None):
860 '''Return a matcher that will warn about bad matches.'''
859 '''Return a matcher that will warn about bad matches.'''
861 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
860 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
862
861
863 def matchall(repo):
862 def matchall(repo):
864 '''Return a matcher that will efficiently match everything.'''
863 '''Return a matcher that will efficiently match everything.'''
865 return matchmod.always(repo.root, repo.getcwd())
864 return matchmod.always(repo.root, repo.getcwd())
866
865
867 def matchfiles(repo, files, badfn=None):
866 def matchfiles(repo, files, badfn=None):
868 '''Return a matcher that will efficiently match exactly these files.'''
867 '''Return a matcher that will efficiently match exactly these files.'''
869 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
868 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
870
869
871 def origpath(ui, repo, filepath):
870 def origpath(ui, repo, filepath):
872 '''customize where .orig files are created
871 '''customize where .orig files are created
873
872
874 Fetch user defined path from config file: [ui] origbackuppath = <path>
873 Fetch user defined path from config file: [ui] origbackuppath = <path>
875 Fall back to default (filepath) if not specified
874 Fall back to default (filepath) if not specified
876 '''
875 '''
877 origbackuppath = ui.config('ui', 'origbackuppath', None)
876 origbackuppath = ui.config('ui', 'origbackuppath', None)
878 if origbackuppath is None:
877 if origbackuppath is None:
879 return filepath + ".orig"
878 return filepath + ".orig"
880
879
881 filepathfromroot = os.path.relpath(filepath, start=repo.root)
880 filepathfromroot = os.path.relpath(filepath, start=repo.root)
882 fullorigpath = repo.wjoin(origbackuppath, filepathfromroot)
881 fullorigpath = repo.wjoin(origbackuppath, filepathfromroot)
883
882
884 origbackupdir = repo.vfs.dirname(fullorigpath)
883 origbackupdir = repo.vfs.dirname(fullorigpath)
885 if not repo.vfs.exists(origbackupdir):
884 if not repo.vfs.exists(origbackupdir):
886 ui.note(_('creating directory: %s\n') % origbackupdir)
885 ui.note(_('creating directory: %s\n') % origbackupdir)
887 util.makedirs(origbackupdir)
886 util.makedirs(origbackupdir)
888
887
889 return fullorigpath + ".orig"
888 return fullorigpath + ".orig"
890
889
891 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
890 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
892 if opts is None:
891 if opts is None:
893 opts = {}
892 opts = {}
894 m = matcher
893 m = matcher
895 if dry_run is None:
894 if dry_run is None:
896 dry_run = opts.get('dry_run')
895 dry_run = opts.get('dry_run')
897 if similarity is None:
896 if similarity is None:
898 similarity = float(opts.get('similarity') or 0)
897 similarity = float(opts.get('similarity') or 0)
899
898
900 ret = 0
899 ret = 0
901 join = lambda f: os.path.join(prefix, f)
900 join = lambda f: os.path.join(prefix, f)
902
901
903 def matchessubrepo(matcher, subpath):
902 def matchessubrepo(matcher, subpath):
904 if matcher.exact(subpath):
903 if matcher.exact(subpath):
905 return True
904 return True
906 for f in matcher.files():
905 for f in matcher.files():
907 if f.startswith(subpath):
906 if f.startswith(subpath):
908 return True
907 return True
909 return False
908 return False
910
909
911 wctx = repo[None]
910 wctx = repo[None]
912 for subpath in sorted(wctx.substate):
911 for subpath in sorted(wctx.substate):
913 if opts.get('subrepos') or matchessubrepo(m, subpath):
912 if opts.get('subrepos') or matchessubrepo(m, subpath):
914 sub = wctx.sub(subpath)
913 sub = wctx.sub(subpath)
915 try:
914 try:
916 submatch = matchmod.subdirmatcher(subpath, m)
915 submatch = matchmod.subdirmatcher(subpath, m)
917 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
916 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
918 ret = 1
917 ret = 1
919 except error.LookupError:
918 except error.LookupError:
920 repo.ui.status(_("skipping missing subrepository: %s\n")
919 repo.ui.status(_("skipping missing subrepository: %s\n")
921 % join(subpath))
920 % join(subpath))
922
921
923 rejected = []
922 rejected = []
924 def badfn(f, msg):
923 def badfn(f, msg):
925 if f in m.files():
924 if f in m.files():
926 m.bad(f, msg)
925 m.bad(f, msg)
927 rejected.append(f)
926 rejected.append(f)
928
927
929 badmatch = matchmod.badmatch(m, badfn)
928 badmatch = matchmod.badmatch(m, badfn)
930 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
929 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
931 badmatch)
930 badmatch)
932
931
933 unknownset = set(unknown + forgotten)
932 unknownset = set(unknown + forgotten)
934 toprint = unknownset.copy()
933 toprint = unknownset.copy()
935 toprint.update(deleted)
934 toprint.update(deleted)
936 for abs in sorted(toprint):
935 for abs in sorted(toprint):
937 if repo.ui.verbose or not m.exact(abs):
936 if repo.ui.verbose or not m.exact(abs):
938 if abs in unknownset:
937 if abs in unknownset:
939 status = _('adding %s\n') % m.uipath(abs)
938 status = _('adding %s\n') % m.uipath(abs)
940 else:
939 else:
941 status = _('removing %s\n') % m.uipath(abs)
940 status = _('removing %s\n') % m.uipath(abs)
942 repo.ui.status(status)
941 repo.ui.status(status)
943
942
944 renames = _findrenames(repo, m, added + unknown, removed + deleted,
943 renames = _findrenames(repo, m, added + unknown, removed + deleted,
945 similarity)
944 similarity)
946
945
947 if not dry_run:
946 if not dry_run:
948 _markchanges(repo, unknown + forgotten, deleted, renames)
947 _markchanges(repo, unknown + forgotten, deleted, renames)
949
948
950 for f in rejected:
949 for f in rejected:
951 if f in m.files():
950 if f in m.files():
952 return 1
951 return 1
953 return ret
952 return ret
954
953
955 def marktouched(repo, files, similarity=0.0):
954 def marktouched(repo, files, similarity=0.0):
956 '''Assert that files have somehow been operated upon. files are relative to
955 '''Assert that files have somehow been operated upon. files are relative to
957 the repo root.'''
956 the repo root.'''
958 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
957 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
959 rejected = []
958 rejected = []
960
959
961 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
960 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
962
961
963 if repo.ui.verbose:
962 if repo.ui.verbose:
964 unknownset = set(unknown + forgotten)
963 unknownset = set(unknown + forgotten)
965 toprint = unknownset.copy()
964 toprint = unknownset.copy()
966 toprint.update(deleted)
965 toprint.update(deleted)
967 for abs in sorted(toprint):
966 for abs in sorted(toprint):
968 if abs in unknownset:
967 if abs in unknownset:
969 status = _('adding %s\n') % abs
968 status = _('adding %s\n') % abs
970 else:
969 else:
971 status = _('removing %s\n') % abs
970 status = _('removing %s\n') % abs
972 repo.ui.status(status)
971 repo.ui.status(status)
973
972
974 renames = _findrenames(repo, m, added + unknown, removed + deleted,
973 renames = _findrenames(repo, m, added + unknown, removed + deleted,
975 similarity)
974 similarity)
976
975
977 _markchanges(repo, unknown + forgotten, deleted, renames)
976 _markchanges(repo, unknown + forgotten, deleted, renames)
978
977
979 for f in rejected:
978 for f in rejected:
980 if f in m.files():
979 if f in m.files():
981 return 1
980 return 1
982 return 0
981 return 0
983
982
984 def _interestingfiles(repo, matcher):
983 def _interestingfiles(repo, matcher):
985 '''Walk dirstate with matcher, looking for files that addremove would care
984 '''Walk dirstate with matcher, looking for files that addremove would care
986 about.
985 about.
987
986
988 This is different from dirstate.status because it doesn't care about
987 This is different from dirstate.status because it doesn't care about
989 whether files are modified or clean.'''
988 whether files are modified or clean.'''
990 added, unknown, deleted, removed, forgotten = [], [], [], [], []
989 added, unknown, deleted, removed, forgotten = [], [], [], [], []
991 audit_path = pathutil.pathauditor(repo.root)
990 audit_path = pathutil.pathauditor(repo.root)
992
991
993 ctx = repo[None]
992 ctx = repo[None]
994 dirstate = repo.dirstate
993 dirstate = repo.dirstate
995 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
994 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
996 full=False)
995 full=False)
997 for abs, st in walkresults.iteritems():
996 for abs, st in walkresults.iteritems():
998 dstate = dirstate[abs]
997 dstate = dirstate[abs]
999 if dstate == '?' and audit_path.check(abs):
998 if dstate == '?' and audit_path.check(abs):
1000 unknown.append(abs)
999 unknown.append(abs)
1001 elif dstate != 'r' and not st:
1000 elif dstate != 'r' and not st:
1002 deleted.append(abs)
1001 deleted.append(abs)
1003 elif dstate == 'r' and st:
1002 elif dstate == 'r' and st:
1004 forgotten.append(abs)
1003 forgotten.append(abs)
1005 # for finding renames
1004 # for finding renames
1006 elif dstate == 'r' and not st:
1005 elif dstate == 'r' and not st:
1007 removed.append(abs)
1006 removed.append(abs)
1008 elif dstate == 'a':
1007 elif dstate == 'a':
1009 added.append(abs)
1008 added.append(abs)
1010
1009
1011 return added, unknown, deleted, removed, forgotten
1010 return added, unknown, deleted, removed, forgotten
1012
1011
1013 def _findrenames(repo, matcher, added, removed, similarity):
1012 def _findrenames(repo, matcher, added, removed, similarity):
1014 '''Find renames from removed files to added ones.'''
1013 '''Find renames from removed files to added ones.'''
1015 renames = {}
1014 renames = {}
1016 if similarity > 0:
1015 if similarity > 0:
1017 for old, new, score in similar.findrenames(repo, added, removed,
1016 for old, new, score in similar.findrenames(repo, added, removed,
1018 similarity):
1017 similarity):
1019 if (repo.ui.verbose or not matcher.exact(old)
1018 if (repo.ui.verbose or not matcher.exact(old)
1020 or not matcher.exact(new)):
1019 or not matcher.exact(new)):
1021 repo.ui.status(_('recording removal of %s as rename to %s '
1020 repo.ui.status(_('recording removal of %s as rename to %s '
1022 '(%d%% similar)\n') %
1021 '(%d%% similar)\n') %
1023 (matcher.rel(old), matcher.rel(new),
1022 (matcher.rel(old), matcher.rel(new),
1024 score * 100))
1023 score * 100))
1025 renames[new] = old
1024 renames[new] = old
1026 return renames
1025 return renames
1027
1026
1028 def _markchanges(repo, unknown, deleted, renames):
1027 def _markchanges(repo, unknown, deleted, renames):
1029 '''Marks the files in unknown as added, the files in deleted as removed,
1028 '''Marks the files in unknown as added, the files in deleted as removed,
1030 and the files in renames as copied.'''
1029 and the files in renames as copied.'''
1031 wctx = repo[None]
1030 wctx = repo[None]
1032 with repo.wlock():
1031 with repo.wlock():
1033 wctx.forget(deleted)
1032 wctx.forget(deleted)
1034 wctx.add(unknown)
1033 wctx.add(unknown)
1035 for new, old in renames.iteritems():
1034 for new, old in renames.iteritems():
1036 wctx.copy(old, new)
1035 wctx.copy(old, new)
1037
1036
1038 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1037 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1039 """Update the dirstate to reflect the intent of copying src to dst. For
1038 """Update the dirstate to reflect the intent of copying src to dst. For
1040 different reasons it might not end with dst being marked as copied from src.
1039 different reasons it might not end with dst being marked as copied from src.
1041 """
1040 """
1042 origsrc = repo.dirstate.copied(src) or src
1041 origsrc = repo.dirstate.copied(src) or src
1043 if dst == origsrc: # copying back a copy?
1042 if dst == origsrc: # copying back a copy?
1044 if repo.dirstate[dst] not in 'mn' and not dryrun:
1043 if repo.dirstate[dst] not in 'mn' and not dryrun:
1045 repo.dirstate.normallookup(dst)
1044 repo.dirstate.normallookup(dst)
1046 else:
1045 else:
1047 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1046 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1048 if not ui.quiet:
1047 if not ui.quiet:
1049 ui.warn(_("%s has not been committed yet, so no copy "
1048 ui.warn(_("%s has not been committed yet, so no copy "
1050 "data will be stored for %s.\n")
1049 "data will be stored for %s.\n")
1051 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1050 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1052 if repo.dirstate[dst] in '?r' and not dryrun:
1051 if repo.dirstate[dst] in '?r' and not dryrun:
1053 wctx.add([dst])
1052 wctx.add([dst])
1054 elif not dryrun:
1053 elif not dryrun:
1055 wctx.copy(origsrc, dst)
1054 wctx.copy(origsrc, dst)
1056
1055
1057 def readrequires(opener, supported):
1056 def readrequires(opener, supported):
1058 '''Reads and parses .hg/requires and checks if all entries found
1057 '''Reads and parses .hg/requires and checks if all entries found
1059 are in the list of supported features.'''
1058 are in the list of supported features.'''
1060 requirements = set(opener.read("requires").splitlines())
1059 requirements = set(opener.read("requires").splitlines())
1061 missings = []
1060 missings = []
1062 for r in requirements:
1061 for r in requirements:
1063 if r not in supported:
1062 if r not in supported:
1064 if not r or not r[0].isalnum():
1063 if not r or not r[0].isalnum():
1065 raise error.RequirementError(_(".hg/requires file is corrupt"))
1064 raise error.RequirementError(_(".hg/requires file is corrupt"))
1066 missings.append(r)
1065 missings.append(r)
1067 missings.sort()
1066 missings.sort()
1068 if missings:
1067 if missings:
1069 raise error.RequirementError(
1068 raise error.RequirementError(
1070 _("repository requires features unknown to this Mercurial: %s")
1069 _("repository requires features unknown to this Mercurial: %s")
1071 % " ".join(missings),
1070 % " ".join(missings),
1072 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
1071 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
1073 " for more information"))
1072 " for more information"))
1074 return requirements
1073 return requirements
1075
1074
1076 def writerequires(opener, requirements):
1075 def writerequires(opener, requirements):
1077 with opener('requires', 'w') as fp:
1076 with opener('requires', 'w') as fp:
1078 for r in sorted(requirements):
1077 for r in sorted(requirements):
1079 fp.write("%s\n" % r)
1078 fp.write("%s\n" % r)
1080
1079
1081 class filecachesubentry(object):
1080 class filecachesubentry(object):
1082 def __init__(self, path, stat):
1081 def __init__(self, path, stat):
1083 self.path = path
1082 self.path = path
1084 self.cachestat = None
1083 self.cachestat = None
1085 self._cacheable = None
1084 self._cacheable = None
1086
1085
1087 if stat:
1086 if stat:
1088 self.cachestat = filecachesubentry.stat(self.path)
1087 self.cachestat = filecachesubentry.stat(self.path)
1089
1088
1090 if self.cachestat:
1089 if self.cachestat:
1091 self._cacheable = self.cachestat.cacheable()
1090 self._cacheable = self.cachestat.cacheable()
1092 else:
1091 else:
1093 # None means we don't know yet
1092 # None means we don't know yet
1094 self._cacheable = None
1093 self._cacheable = None
1095
1094
1096 def refresh(self):
1095 def refresh(self):
1097 if self.cacheable():
1096 if self.cacheable():
1098 self.cachestat = filecachesubentry.stat(self.path)
1097 self.cachestat = filecachesubentry.stat(self.path)
1099
1098
1100 def cacheable(self):
1099 def cacheable(self):
1101 if self._cacheable is not None:
1100 if self._cacheable is not None:
1102 return self._cacheable
1101 return self._cacheable
1103
1102
1104 # we don't know yet, assume it is for now
1103 # we don't know yet, assume it is for now
1105 return True
1104 return True
1106
1105
1107 def changed(self):
1106 def changed(self):
1108 # no point in going further if we can't cache it
1107 # no point in going further if we can't cache it
1109 if not self.cacheable():
1108 if not self.cacheable():
1110 return True
1109 return True
1111
1110
1112 newstat = filecachesubentry.stat(self.path)
1111 newstat = filecachesubentry.stat(self.path)
1113
1112
1114 # we may not know if it's cacheable yet, check again now
1113 # we may not know if it's cacheable yet, check again now
1115 if newstat and self._cacheable is None:
1114 if newstat and self._cacheable is None:
1116 self._cacheable = newstat.cacheable()
1115 self._cacheable = newstat.cacheable()
1117
1116
1118 # check again
1117 # check again
1119 if not self._cacheable:
1118 if not self._cacheable:
1120 return True
1119 return True
1121
1120
1122 if self.cachestat != newstat:
1121 if self.cachestat != newstat:
1123 self.cachestat = newstat
1122 self.cachestat = newstat
1124 return True
1123 return True
1125 else:
1124 else:
1126 return False
1125 return False
1127
1126
1128 @staticmethod
1127 @staticmethod
1129 def stat(path):
1128 def stat(path):
1130 try:
1129 try:
1131 return util.cachestat(path)
1130 return util.cachestat(path)
1132 except OSError as e:
1131 except OSError as e:
1133 if e.errno != errno.ENOENT:
1132 if e.errno != errno.ENOENT:
1134 raise
1133 raise
1135
1134
1136 class filecacheentry(object):
1135 class filecacheentry(object):
1137 def __init__(self, paths, stat=True):
1136 def __init__(self, paths, stat=True):
1138 self._entries = []
1137 self._entries = []
1139 for path in paths:
1138 for path in paths:
1140 self._entries.append(filecachesubentry(path, stat))
1139 self._entries.append(filecachesubentry(path, stat))
1141
1140
1142 def changed(self):
1141 def changed(self):
1143 '''true if any entry has changed'''
1142 '''true if any entry has changed'''
1144 for entry in self._entries:
1143 for entry in self._entries:
1145 if entry.changed():
1144 if entry.changed():
1146 return True
1145 return True
1147 return False
1146 return False
1148
1147
1149 def refresh(self):
1148 def refresh(self):
1150 for entry in self._entries:
1149 for entry in self._entries:
1151 entry.refresh()
1150 entry.refresh()
1152
1151
1153 class filecache(object):
1152 class filecache(object):
1154 '''A property like decorator that tracks files under .hg/ for updates.
1153 '''A property like decorator that tracks files under .hg/ for updates.
1155
1154
1156 Records stat info when called in _filecache.
1155 Records stat info when called in _filecache.
1157
1156
1158 On subsequent calls, compares old stat info with new info, and recreates the
1157 On subsequent calls, compares old stat info with new info, and recreates the
1159 object when any of the files changes, updating the new stat info in
1158 object when any of the files changes, updating the new stat info in
1160 _filecache.
1159 _filecache.
1161
1160
1162 Mercurial either atomic renames or appends for files under .hg,
1161 Mercurial either atomic renames or appends for files under .hg,
1163 so to ensure the cache is reliable we need the filesystem to be able
1162 so to ensure the cache is reliable we need the filesystem to be able
1164 to tell us if a file has been replaced. If it can't, we fallback to
1163 to tell us if a file has been replaced. If it can't, we fallback to
1165 recreating the object on every call (essentially the same behavior as
1164 recreating the object on every call (essentially the same behavior as
1166 propertycache).
1165 propertycache).
1167
1166
1168 '''
1167 '''
1169 def __init__(self, *paths):
1168 def __init__(self, *paths):
1170 self.paths = paths
1169 self.paths = paths
1171
1170
1172 def join(self, obj, fname):
1171 def join(self, obj, fname):
1173 """Used to compute the runtime path of a cached file.
1172 """Used to compute the runtime path of a cached file.
1174
1173
1175 Users should subclass filecache and provide their own version of this
1174 Users should subclass filecache and provide their own version of this
1176 function to call the appropriate join function on 'obj' (an instance
1175 function to call the appropriate join function on 'obj' (an instance
1177 of the class that its member function was decorated).
1176 of the class that its member function was decorated).
1178 """
1177 """
1179 return obj.join(fname)
1178 return obj.join(fname)
1180
1179
1181 def __call__(self, func):
1180 def __call__(self, func):
1182 self.func = func
1181 self.func = func
1183 self.name = func.__name__
1182 self.name = func.__name__
1184 return self
1183 return self
1185
1184
1186 def __get__(self, obj, type=None):
1185 def __get__(self, obj, type=None):
1187 # do we need to check if the file changed?
1186 # do we need to check if the file changed?
1188 if self.name in obj.__dict__:
1187 if self.name in obj.__dict__:
1189 assert self.name in obj._filecache, self.name
1188 assert self.name in obj._filecache, self.name
1190 return obj.__dict__[self.name]
1189 return obj.__dict__[self.name]
1191
1190
1192 entry = obj._filecache.get(self.name)
1191 entry = obj._filecache.get(self.name)
1193
1192
1194 if entry:
1193 if entry:
1195 if entry.changed():
1194 if entry.changed():
1196 entry.obj = self.func(obj)
1195 entry.obj = self.func(obj)
1197 else:
1196 else:
1198 paths = [self.join(obj, path) for path in self.paths]
1197 paths = [self.join(obj, path) for path in self.paths]
1199
1198
1200 # We stat -before- creating the object so our cache doesn't lie if
1199 # We stat -before- creating the object so our cache doesn't lie if
1201 # a writer modified between the time we read and stat
1200 # a writer modified between the time we read and stat
1202 entry = filecacheentry(paths, True)
1201 entry = filecacheentry(paths, True)
1203 entry.obj = self.func(obj)
1202 entry.obj = self.func(obj)
1204
1203
1205 obj._filecache[self.name] = entry
1204 obj._filecache[self.name] = entry
1206
1205
1207 obj.__dict__[self.name] = entry.obj
1206 obj.__dict__[self.name] = entry.obj
1208 return entry.obj
1207 return entry.obj
1209
1208
1210 def __set__(self, obj, value):
1209 def __set__(self, obj, value):
1211 if self.name not in obj._filecache:
1210 if self.name not in obj._filecache:
1212 # we add an entry for the missing value because X in __dict__
1211 # we add an entry for the missing value because X in __dict__
1213 # implies X in _filecache
1212 # implies X in _filecache
1214 paths = [self.join(obj, path) for path in self.paths]
1213 paths = [self.join(obj, path) for path in self.paths]
1215 ce = filecacheentry(paths, False)
1214 ce = filecacheentry(paths, False)
1216 obj._filecache[self.name] = ce
1215 obj._filecache[self.name] = ce
1217 else:
1216 else:
1218 ce = obj._filecache[self.name]
1217 ce = obj._filecache[self.name]
1219
1218
1220 ce.obj = value # update cached copy
1219 ce.obj = value # update cached copy
1221 obj.__dict__[self.name] = value # update copy returned by obj.x
1220 obj.__dict__[self.name] = value # update copy returned by obj.x
1222
1221
1223 def __delete__(self, obj):
1222 def __delete__(self, obj):
1224 try:
1223 try:
1225 del obj.__dict__[self.name]
1224 del obj.__dict__[self.name]
1226 except KeyError:
1225 except KeyError:
1227 raise AttributeError(self.name)
1226 raise AttributeError(self.name)
1228
1227
1229 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1228 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1230 if lock is None:
1229 if lock is None:
1231 raise error.LockInheritanceContractViolation(
1230 raise error.LockInheritanceContractViolation(
1232 'lock can only be inherited while held')
1231 'lock can only be inherited while held')
1233 if environ is None:
1232 if environ is None:
1234 environ = {}
1233 environ = {}
1235 with lock.inherit() as locker:
1234 with lock.inherit() as locker:
1236 environ[envvar] = locker
1235 environ[envvar] = locker
1237 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1236 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1238
1237
1239 def wlocksub(repo, cmd, *args, **kwargs):
1238 def wlocksub(repo, cmd, *args, **kwargs):
1240 """run cmd as a subprocess that allows inheriting repo's wlock
1239 """run cmd as a subprocess that allows inheriting repo's wlock
1241
1240
1242 This can only be called while the wlock is held. This takes all the
1241 This can only be called while the wlock is held. This takes all the
1243 arguments that ui.system does, and returns the exit code of the
1242 arguments that ui.system does, and returns the exit code of the
1244 subprocess."""
1243 subprocess."""
1245 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1244 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1246 **kwargs)
1245 **kwargs)
1247
1246
1248 def gdinitconfig(ui):
1247 def gdinitconfig(ui):
1249 """helper function to know if a repo should be created as general delta
1248 """helper function to know if a repo should be created as general delta
1250 """
1249 """
1251 # experimental config: format.generaldelta
1250 # experimental config: format.generaldelta
1252 return (ui.configbool('format', 'generaldelta', False)
1251 return (ui.configbool('format', 'generaldelta', False)
1253 or ui.configbool('format', 'usegeneraldelta', True))
1252 or ui.configbool('format', 'usegeneraldelta', True))
1254
1253
1255 def gddeltaconfig(ui):
1254 def gddeltaconfig(ui):
1256 """helper function to know if incoming delta should be optimised
1255 """helper function to know if incoming delta should be optimised
1257 """
1256 """
1258 # experimental config: format.generaldelta
1257 # experimental config: format.generaldelta
1259 return ui.configbool('format', 'generaldelta', False)
1258 return ui.configbool('format', 'generaldelta', False)
1260
1259
1261 class delayclosedfile(object):
1260 class delayclosedfile(object):
1262 """Proxy for a file object whose close is delayed.
1261 """Proxy for a file object whose close is delayed.
1263
1262
1264 Do not instantiate outside of the vfs layer.
1263 Do not instantiate outside of the vfs layer.
1265 """
1264 """
1266
1265
1267 def __init__(self, fh, closer):
1266 def __init__(self, fh, closer):
1268 object.__setattr__(self, '_origfh', fh)
1267 object.__setattr__(self, '_origfh', fh)
1269 object.__setattr__(self, '_closer', closer)
1268 object.__setattr__(self, '_closer', closer)
1270
1269
1271 def __getattr__(self, attr):
1270 def __getattr__(self, attr):
1272 return getattr(self._origfh, attr)
1271 return getattr(self._origfh, attr)
1273
1272
1274 def __setattr__(self, attr, value):
1273 def __setattr__(self, attr, value):
1275 return setattr(self._origfh, attr, value)
1274 return setattr(self._origfh, attr, value)
1276
1275
1277 def __delattr__(self, attr):
1276 def __delattr__(self, attr):
1278 return delattr(self._origfh, attr)
1277 return delattr(self._origfh, attr)
1279
1278
1280 def __enter__(self):
1279 def __enter__(self):
1281 return self._origfh.__enter__()
1280 return self._origfh.__enter__()
1282
1281
1283 def __exit__(self, exc_type, exc_value, exc_tb):
1282 def __exit__(self, exc_type, exc_value, exc_tb):
1284 self._closer.close(self._origfh)
1283 self._closer.close(self._origfh)
1285
1284
1286 def close(self):
1285 def close(self):
1287 self._closer.close(self._origfh)
1286 self._closer.close(self._origfh)
1288
1287
1289 class backgroundfilecloser(object):
1288 class backgroundfilecloser(object):
1290 """Coordinates background closing of file handles on multiple threads."""
1289 """Coordinates background closing of file handles on multiple threads."""
1291 def __init__(self, ui, expectedcount=-1):
1290 def __init__(self, ui, expectedcount=-1):
1292 self._running = False
1291 self._running = False
1293 self._entered = False
1292 self._entered = False
1294 self._threads = []
1293 self._threads = []
1295 self._threadexception = None
1294 self._threadexception = None
1296
1295
1297 # Only Windows/NTFS has slow file closing. So only enable by default
1296 # Only Windows/NTFS has slow file closing. So only enable by default
1298 # on that platform. But allow to be enabled elsewhere for testing.
1297 # on that platform. But allow to be enabled elsewhere for testing.
1299 defaultenabled = os.name == 'nt'
1298 defaultenabled = os.name == 'nt'
1300 enabled = ui.configbool('worker', 'backgroundclose', defaultenabled)
1299 enabled = ui.configbool('worker', 'backgroundclose', defaultenabled)
1301
1300
1302 if not enabled:
1301 if not enabled:
1303 return
1302 return
1304
1303
1305 # There is overhead to starting and stopping the background threads.
1304 # There is overhead to starting and stopping the background threads.
1306 # Don't do background processing unless the file count is large enough
1305 # Don't do background processing unless the file count is large enough
1307 # to justify it.
1306 # to justify it.
1308 minfilecount = ui.configint('worker', 'backgroundcloseminfilecount',
1307 minfilecount = ui.configint('worker', 'backgroundcloseminfilecount',
1309 2048)
1308 2048)
1310 # FUTURE dynamically start background threads after minfilecount closes.
1309 # FUTURE dynamically start background threads after minfilecount closes.
1311 # (We don't currently have any callers that don't know their file count)
1310 # (We don't currently have any callers that don't know their file count)
1312 if expectedcount > 0 and expectedcount < minfilecount:
1311 if expectedcount > 0 and expectedcount < minfilecount:
1313 return
1312 return
1314
1313
1315 # Windows defaults to a limit of 512 open files. A buffer of 128
1314 # Windows defaults to a limit of 512 open files. A buffer of 128
1316 # should give us enough headway.
1315 # should give us enough headway.
1317 maxqueue = ui.configint('worker', 'backgroundclosemaxqueue', 384)
1316 maxqueue = ui.configint('worker', 'backgroundclosemaxqueue', 384)
1318 threadcount = ui.configint('worker', 'backgroundclosethreadcount', 4)
1317 threadcount = ui.configint('worker', 'backgroundclosethreadcount', 4)
1319
1318
1320 ui.debug('starting %d threads for background file closing\n' %
1319 ui.debug('starting %d threads for background file closing\n' %
1321 threadcount)
1320 threadcount)
1322
1321
1323 self._queue = Queue.Queue(maxsize=maxqueue)
1322 self._queue = util.queue(maxsize=maxqueue)
1324 self._running = True
1323 self._running = True
1325
1324
1326 for i in range(threadcount):
1325 for i in range(threadcount):
1327 t = threading.Thread(target=self._worker, name='backgroundcloser')
1326 t = threading.Thread(target=self._worker, name='backgroundcloser')
1328 self._threads.append(t)
1327 self._threads.append(t)
1329 t.start()
1328 t.start()
1330
1329
1331 def __enter__(self):
1330 def __enter__(self):
1332 self._entered = True
1331 self._entered = True
1333 return self
1332 return self
1334
1333
1335 def __exit__(self, exc_type, exc_value, exc_tb):
1334 def __exit__(self, exc_type, exc_value, exc_tb):
1336 self._running = False
1335 self._running = False
1337
1336
1338 # Wait for threads to finish closing so open files don't linger for
1337 # Wait for threads to finish closing so open files don't linger for
1339 # longer than lifetime of context manager.
1338 # longer than lifetime of context manager.
1340 for t in self._threads:
1339 for t in self._threads:
1341 t.join()
1340 t.join()
1342
1341
1343 def _worker(self):
1342 def _worker(self):
1344 """Main routine for worker thread."""
1343 """Main routine for worker thread."""
1345 while True:
1344 while True:
1346 try:
1345 try:
1347 fh = self._queue.get(block=True, timeout=0.100)
1346 fh = self._queue.get(block=True, timeout=0.100)
1348 # Need to catch or the thread will terminate and
1347 # Need to catch or the thread will terminate and
1349 # we could orphan file descriptors.
1348 # we could orphan file descriptors.
1350 try:
1349 try:
1351 fh.close()
1350 fh.close()
1352 except Exception as e:
1351 except Exception as e:
1353 # Stash so can re-raise from main thread later.
1352 # Stash so can re-raise from main thread later.
1354 self._threadexception = e
1353 self._threadexception = e
1355 except Queue.Empty:
1354 except util.empty:
1356 if not self._running:
1355 if not self._running:
1357 break
1356 break
1358
1357
1359 def close(self, fh):
1358 def close(self, fh):
1360 """Schedule a file for closing."""
1359 """Schedule a file for closing."""
1361 if not self._entered:
1360 if not self._entered:
1362 raise error.Abort('can only call close() when context manager '
1361 raise error.Abort('can only call close() when context manager '
1363 'active')
1362 'active')
1364
1363
1365 # If a background thread encountered an exception, raise now so we fail
1364 # If a background thread encountered an exception, raise now so we fail
1366 # fast. Otherwise we may potentially go on for minutes until the error
1365 # fast. Otherwise we may potentially go on for minutes until the error
1367 # is acted on.
1366 # is acted on.
1368 if self._threadexception:
1367 if self._threadexception:
1369 e = self._threadexception
1368 e = self._threadexception
1370 self._threadexception = None
1369 self._threadexception = None
1371 raise e
1370 raise e
1372
1371
1373 # If we're not actively running, close synchronously.
1372 # If we're not actively running, close synchronously.
1374 if not self._running:
1373 if not self._running:
1375 fh.close()
1374 fh.close()
1376 return
1375 return
1377
1376
1378 self._queue.put(fh, block=True, timeout=None)
1377 self._queue.put(fh, block=True, timeout=None)
1379
1378
@@ -1,232 +1,232
1 #require test-repo
1 #require test-repo
2
2
3 $ cd "$TESTDIR"/..
3 $ cd "$TESTDIR"/..
4
4
5 $ hg files 'set:(**.py)' | sed 's|\\|/|g' | xargs python contrib/check-py3-compat.py
5 $ hg files 'set:(**.py)' | sed 's|\\|/|g' | xargs python contrib/check-py3-compat.py
6 doc/check-seclevel.py not using absolute_import
6 doc/check-seclevel.py not using absolute_import
7 doc/gendoc.py not using absolute_import
7 doc/gendoc.py not using absolute_import
8 doc/hgmanpage.py not using absolute_import
8 doc/hgmanpage.py not using absolute_import
9 hgext/color.py not using absolute_import
9 hgext/color.py not using absolute_import
10 hgext/eol.py not using absolute_import
10 hgext/eol.py not using absolute_import
11 hgext/extdiff.py not using absolute_import
11 hgext/extdiff.py not using absolute_import
12 hgext/factotum.py not using absolute_import
12 hgext/factotum.py not using absolute_import
13 hgext/fetch.py not using absolute_import
13 hgext/fetch.py not using absolute_import
14 hgext/fsmonitor/pywatchman/__init__.py not using absolute_import
14 hgext/fsmonitor/pywatchman/__init__.py not using absolute_import
15 hgext/fsmonitor/pywatchman/__init__.py requires print_function
15 hgext/fsmonitor/pywatchman/__init__.py requires print_function
16 hgext/fsmonitor/pywatchman/capabilities.py not using absolute_import
16 hgext/fsmonitor/pywatchman/capabilities.py not using absolute_import
17 hgext/fsmonitor/pywatchman/pybser.py not using absolute_import
17 hgext/fsmonitor/pywatchman/pybser.py not using absolute_import
18 hgext/gpg.py not using absolute_import
18 hgext/gpg.py not using absolute_import
19 hgext/graphlog.py not using absolute_import
19 hgext/graphlog.py not using absolute_import
20 hgext/hgcia.py not using absolute_import
20 hgext/hgcia.py not using absolute_import
21 hgext/hgk.py not using absolute_import
21 hgext/hgk.py not using absolute_import
22 hgext/highlight/__init__.py not using absolute_import
22 hgext/highlight/__init__.py not using absolute_import
23 hgext/highlight/highlight.py not using absolute_import
23 hgext/highlight/highlight.py not using absolute_import
24 hgext/histedit.py not using absolute_import
24 hgext/histedit.py not using absolute_import
25 hgext/largefiles/__init__.py not using absolute_import
25 hgext/largefiles/__init__.py not using absolute_import
26 hgext/largefiles/basestore.py not using absolute_import
26 hgext/largefiles/basestore.py not using absolute_import
27 hgext/largefiles/lfcommands.py not using absolute_import
27 hgext/largefiles/lfcommands.py not using absolute_import
28 hgext/largefiles/lfutil.py not using absolute_import
28 hgext/largefiles/lfutil.py not using absolute_import
29 hgext/largefiles/localstore.py not using absolute_import
29 hgext/largefiles/localstore.py not using absolute_import
30 hgext/largefiles/overrides.py not using absolute_import
30 hgext/largefiles/overrides.py not using absolute_import
31 hgext/largefiles/proto.py not using absolute_import
31 hgext/largefiles/proto.py not using absolute_import
32 hgext/largefiles/remotestore.py not using absolute_import
32 hgext/largefiles/remotestore.py not using absolute_import
33 hgext/largefiles/reposetup.py not using absolute_import
33 hgext/largefiles/reposetup.py not using absolute_import
34 hgext/largefiles/uisetup.py not using absolute_import
34 hgext/largefiles/uisetup.py not using absolute_import
35 hgext/largefiles/wirestore.py not using absolute_import
35 hgext/largefiles/wirestore.py not using absolute_import
36 hgext/mq.py not using absolute_import
36 hgext/mq.py not using absolute_import
37 hgext/rebase.py not using absolute_import
37 hgext/rebase.py not using absolute_import
38 hgext/share.py not using absolute_import
38 hgext/share.py not using absolute_import
39 hgext/win32text.py not using absolute_import
39 hgext/win32text.py not using absolute_import
40 i18n/check-translation.py not using absolute_import
40 i18n/check-translation.py not using absolute_import
41 i18n/polib.py not using absolute_import
41 i18n/polib.py not using absolute_import
42 setup.py not using absolute_import
42 setup.py not using absolute_import
43 tests/heredoctest.py requires print_function
43 tests/heredoctest.py requires print_function
44 tests/killdaemons.py not using absolute_import
44 tests/killdaemons.py not using absolute_import
45 tests/md5sum.py not using absolute_import
45 tests/md5sum.py not using absolute_import
46 tests/mockblackbox.py not using absolute_import
46 tests/mockblackbox.py not using absolute_import
47 tests/printenv.py not using absolute_import
47 tests/printenv.py not using absolute_import
48 tests/readlink.py not using absolute_import
48 tests/readlink.py not using absolute_import
49 tests/readlink.py requires print_function
49 tests/readlink.py requires print_function
50 tests/revlog-formatv0.py not using absolute_import
50 tests/revlog-formatv0.py not using absolute_import
51 tests/run-tests.py not using absolute_import
51 tests/run-tests.py not using absolute_import
52 tests/sitecustomize.py not using absolute_import
52 tests/sitecustomize.py not using absolute_import
53 tests/svn-safe-append.py not using absolute_import
53 tests/svn-safe-append.py not using absolute_import
54 tests/svnxml.py not using absolute_import
54 tests/svnxml.py not using absolute_import
55 tests/test-atomictempfile.py not using absolute_import
55 tests/test-atomictempfile.py not using absolute_import
56 tests/test-demandimport.py not using absolute_import
56 tests/test-demandimport.py not using absolute_import
57 tests/test-demandimport.py requires print_function
57 tests/test-demandimport.py requires print_function
58 tests/test-doctest.py not using absolute_import
58 tests/test-doctest.py not using absolute_import
59 tests/test-hgwebdir-paths.py not using absolute_import
59 tests/test-hgwebdir-paths.py not using absolute_import
60 tests/test-lrucachedict.py not using absolute_import
60 tests/test-lrucachedict.py not using absolute_import
61 tests/test-lrucachedict.py requires print_function
61 tests/test-lrucachedict.py requires print_function
62 tests/test-manifest.py not using absolute_import
62 tests/test-manifest.py not using absolute_import
63 tests/test-pathencode.py not using absolute_import
63 tests/test-pathencode.py not using absolute_import
64 tests/test-pathencode.py requires print_function
64 tests/test-pathencode.py requires print_function
65 tests/test-run-tests.py not using absolute_import
65 tests/test-run-tests.py not using absolute_import
66 tests/test-simplemerge.py not using absolute_import
66 tests/test-simplemerge.py not using absolute_import
67 tests/test-symlink-os-yes-fs-no.py not using absolute_import
67 tests/test-symlink-os-yes-fs-no.py not using absolute_import
68 tests/test-trusted.py not using absolute_import
68 tests/test-trusted.py not using absolute_import
69 tests/test-trusted.py requires print_function
69 tests/test-trusted.py requires print_function
70 tests/test-ui-color.py not using absolute_import
70 tests/test-ui-color.py not using absolute_import
71 tests/test-url.py not using absolute_import
71 tests/test-url.py not using absolute_import
72
72
73 #if py3exe
73 #if py3exe
74 $ hg files 'set:(**.py)' | sed 's|\\|/|g' | xargs $PYTHON3 contrib/check-py3-compat.py
74 $ hg files 'set:(**.py)' | sed 's|\\|/|g' | xargs $PYTHON3 contrib/check-py3-compat.py
75 contrib/check-code.py: invalid syntax: (unicode error) 'unicodeescape' codec can't decode bytes in position *-*: malformed \N character escape (<unknown>, line *) (glob)
75 contrib/check-code.py: invalid syntax: (unicode error) 'unicodeescape' codec can't decode bytes in position *-*: malformed \N character escape (<unknown>, line *) (glob)
76 doc/hgmanpage.py: invalid syntax: invalid syntax (<unknown>, line *) (glob)
76 doc/hgmanpage.py: invalid syntax: invalid syntax (<unknown>, line *) (glob)
77 hgext/acl.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
77 hgext/acl.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
78 hgext/automv.py: error importing module: <SyntaxError> invalid syntax (commands.py, line *) (line *) (glob)
78 hgext/automv.py: error importing module: <SyntaxError> invalid syntax (commands.py, line *) (line *) (glob)
79 hgext/blackbox.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
79 hgext/blackbox.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
80 hgext/bugzilla.py: error importing module: <ImportError> No module named 'urlparse' (line *) (glob)
80 hgext/bugzilla.py: error importing module: <ImportError> No module named 'urlparse' (line *) (glob)
81 hgext/censor.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
81 hgext/censor.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
82 hgext/chgserver.py: error importing module: <ImportError> No module named 'SocketServer' (line *) (glob)
82 hgext/chgserver.py: error importing module: <ImportError> No module named 'SocketServer' (line *) (glob)
83 hgext/children.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
83 hgext/children.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
84 hgext/churn.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
84 hgext/churn.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
85 hgext/clonebundles.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
85 hgext/clonebundles.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
86 hgext/color.py: invalid syntax: invalid syntax (<unknown>, line *) (glob)
86 hgext/color.py: invalid syntax: invalid syntax (<unknown>, line *) (glob)
87 hgext/convert/bzr.py: error importing module: <SystemError> Parent module 'hgext.convert' not loaded, cannot perform relative import (line *) (glob)
87 hgext/convert/bzr.py: error importing module: <SystemError> Parent module 'hgext.convert' not loaded, cannot perform relative import (line *) (glob)
88 hgext/convert/common.py: error importing module: <ImportError> No module named 'cPickle' (line *) (glob)
88 hgext/convert/common.py: error importing module: <ImportError> No module named 'cPickle' (line *) (glob)
89 hgext/convert/convcmd.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
89 hgext/convert/convcmd.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
90 hgext/convert/cvs.py: error importing module: <ImportError> No module named 'cStringIO' (line *) (glob)
90 hgext/convert/cvs.py: error importing module: <ImportError> No module named 'cStringIO' (line *) (glob)
91 hgext/convert/cvsps.py: error importing module: <ImportError> No module named 'cPickle' (line *) (glob)
91 hgext/convert/cvsps.py: error importing module: <ImportError> No module named 'cPickle' (line *) (glob)
92 hgext/convert/darcs.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
92 hgext/convert/darcs.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
93 hgext/convert/filemap.py: error importing module: <SystemError> Parent module 'hgext.convert' not loaded, cannot perform relative import (line *) (glob)
93 hgext/convert/filemap.py: error importing module: <SystemError> Parent module 'hgext.convert' not loaded, cannot perform relative import (line *) (glob)
94 hgext/convert/git.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
94 hgext/convert/git.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
95 hgext/convert/gnuarch.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
95 hgext/convert/gnuarch.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
96 hgext/convert/hg.py: error importing module: <ImportError> No module named 'cStringIO' (line *) (glob)
96 hgext/convert/hg.py: error importing module: <ImportError> No module named 'cStringIO' (line *) (glob)
97 hgext/convert/monotone.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
97 hgext/convert/monotone.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
98 hgext/convert/p*.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
98 hgext/convert/p*.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
99 hgext/convert/subversion.py: error importing module: <ImportError> No module named 'cPickle' (line *) (glob)
99 hgext/convert/subversion.py: error importing module: <ImportError> No module named 'cPickle' (line *) (glob)
100 hgext/convert/transport.py: error importing module: <ImportError> No module named 'svn.client' (line *) (glob)
100 hgext/convert/transport.py: error importing module: <ImportError> No module named 'svn.client' (line *) (glob)
101 hgext/eol.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
101 hgext/eol.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
102 hgext/extdiff.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
102 hgext/extdiff.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
103 hgext/factotum.py: error importing: <ImportError> No module named 'cStringIO' (error at url.py:*) (glob)
103 hgext/factotum.py: error importing: <ImportError> No module named 'cStringIO' (error at url.py:*) (glob)
104 hgext/fetch.py: error importing module: <SyntaxError> invalid syntax (commands.py, line *) (line *) (glob)
104 hgext/fetch.py: error importing module: <SyntaxError> invalid syntax (commands.py, line *) (line *) (glob)
105 hgext/fsmonitor/state.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
105 hgext/fsmonitor/state.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
106 hgext/fsmonitor/watchmanclient.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
106 hgext/fsmonitor/watchmanclient.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
107 hgext/gpg.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
107 hgext/gpg.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
108 hgext/graphlog.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
108 hgext/graphlog.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
109 hgext/hgcia.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
109 hgext/hgcia.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
110 hgext/hgk.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
110 hgext/hgk.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
111 hgext/highlight/highlight.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
111 hgext/highlight/highlight.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
112 hgext/histedit.py: error importing module: <SyntaxError> invalid syntax (bundle*.py, line *) (line *) (glob)
112 hgext/histedit.py: error importing module: <SyntaxError> invalid syntax (bundle*.py, line *) (line *) (glob)
113 hgext/keyword.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
113 hgext/keyword.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
114 hgext/largefiles/basestore.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
114 hgext/largefiles/basestore.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
115 hgext/largefiles/lfcommands.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
115 hgext/largefiles/lfcommands.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
116 hgext/largefiles/lfutil.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
116 hgext/largefiles/lfutil.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
117 hgext/largefiles/localstore.py: error importing module: <ImportError> No module named 'lfutil' (line *) (glob)
117 hgext/largefiles/localstore.py: error importing module: <ImportError> No module named 'lfutil' (line *) (glob)
118 hgext/largefiles/overrides.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
118 hgext/largefiles/overrides.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
119 hgext/largefiles/proto.py: error importing module: <ImportError> No module named 'urllib2' (line *) (glob)
119 hgext/largefiles/proto.py: error importing module: <ImportError> No module named 'urllib2' (line *) (glob)
120 hgext/largefiles/remotestore.py: error importing module: <ImportError> No module named 'urllib2' (line *) (glob)
120 hgext/largefiles/remotestore.py: error importing module: <ImportError> No module named 'urllib2' (line *) (glob)
121 hgext/largefiles/reposetup.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
121 hgext/largefiles/reposetup.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
122 hgext/largefiles/uisetup.py: error importing module: <SyntaxError> invalid syntax (archival.py, line *) (line *) (glob)
122 hgext/largefiles/uisetup.py: error importing module: <SyntaxError> invalid syntax (archival.py, line *) (line *) (glob)
123 hgext/largefiles/wirestore.py: error importing module: <ImportError> No module named 'lfutil' (line *) (glob)
123 hgext/largefiles/wirestore.py: error importing module: <ImportError> No module named 'lfutil' (line *) (glob)
124 hgext/mq.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
124 hgext/mq.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
125 hgext/notify.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
125 hgext/notify.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
126 hgext/pager.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
126 hgext/pager.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
127 hgext/patchbomb.py: error importing module: <ImportError> No module named 'cStringIO' (line *) (glob)
127 hgext/patchbomb.py: error importing module: <ImportError> No module named 'cStringIO' (line *) (glob)
128 hgext/purge.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
128 hgext/purge.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
129 hgext/rebase.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
129 hgext/rebase.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
130 hgext/record.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
130 hgext/record.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
131 hgext/relink.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
131 hgext/relink.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
132 hgext/schemes.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
132 hgext/schemes.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
133 hgext/share.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
133 hgext/share.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
134 hgext/shelve.py: error importing module: <SyntaxError> invalid syntax (bundle*.py, line *) (line *) (glob)
134 hgext/shelve.py: error importing module: <SyntaxError> invalid syntax (bundle*.py, line *) (line *) (glob)
135 hgext/strip.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
135 hgext/strip.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
136 hgext/transplant.py: error importing: <SyntaxError> invalid syntax (bundle*.py, line *) (error at bundlerepo.py:*) (glob)
136 hgext/transplant.py: error importing: <SyntaxError> invalid syntax (bundle*.py, line *) (error at bundlerepo.py:*) (glob)
137 hgext/win*text.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
137 hgext/win*text.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
138 mercurial/archival.py: invalid syntax: invalid syntax (<unknown>, line *) (glob)
138 mercurial/archival.py: invalid syntax: invalid syntax (<unknown>, line *) (glob)
139 mercurial/bookmarks.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
139 mercurial/bookmarks.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
140 mercurial/branchmap.py: error importing: <ImportError> No module named 'Queue' (error at scmutil.py:*) (glob)
140 mercurial/branchmap.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
141 mercurial/bundle*.py: invalid syntax: invalid syntax (<unknown>, line *) (glob)
141 mercurial/bundle*.py: invalid syntax: invalid syntax (<unknown>, line *) (glob)
142 mercurial/bundlerepo.py: error importing module: <SyntaxError> invalid syntax (bundle*.py, line *) (line *) (glob)
142 mercurial/bundlerepo.py: error importing module: <SyntaxError> invalid syntax (bundle*.py, line *) (line *) (glob)
143 mercurial/byterange.py: error importing module: <ImportError> No module named 'urllib2' (line *) (glob)
143 mercurial/byterange.py: error importing module: <ImportError> No module named 'urllib2' (line *) (glob)
144 mercurial/changegroup.py: error importing: <ImportError> No module named 'Queue' (error at scmutil.py:*) (glob)
144 mercurial/changegroup.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
145 mercurial/changelog.py: error importing: <ImportError> No module named 'cStringIO' (error at mpatch.py:*) (glob)
145 mercurial/changelog.py: error importing: <ImportError> No module named 'cStringIO' (error at mpatch.py:*) (glob)
146 mercurial/cmdutil.py: error importing module: <ImportError> No module named 'cStringIO' (line *) (glob)
146 mercurial/cmdutil.py: error importing module: <ImportError> No module named 'cStringIO' (line *) (glob)
147 mercurial/commands.py: invalid syntax: invalid syntax (<unknown>, line *) (glob)
147 mercurial/commands.py: invalid syntax: invalid syntax (<unknown>, line *) (glob)
148 mercurial/commandserver.py: error importing module: <ImportError> No module named 'SocketServer' (line *) (glob)
148 mercurial/commandserver.py: error importing module: <ImportError> No module named 'SocketServer' (line *) (glob)
149 mercurial/config.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
149 mercurial/config.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
150 mercurial/context.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
150 mercurial/context.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
151 mercurial/copies.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
151 mercurial/copies.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
152 mercurial/crecord.py: error importing module: <ImportError> No module named 'cStringIO' (line *) (glob)
152 mercurial/crecord.py: error importing module: <ImportError> No module named 'cStringIO' (line *) (glob)
153 mercurial/destutil.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
153 mercurial/destutil.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
154 mercurial/dirstate.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
154 mercurial/dirstate.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
155 mercurial/discovery.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
155 mercurial/discovery.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
156 mercurial/dispatch.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
156 mercurial/dispatch.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
157 mercurial/exchange.py: error importing module: <ImportError> No module named 'urllib2' (line *) (glob)
157 mercurial/exchange.py: error importing module: <ImportError> No module named 'urllib2' (line *) (glob)
158 mercurial/extensions.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
158 mercurial/extensions.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
159 mercurial/filelog.py: error importing: <ImportError> No module named 'cStringIO' (error at mpatch.py:*) (glob)
159 mercurial/filelog.py: error importing: <ImportError> No module named 'cStringIO' (error at mpatch.py:*) (glob)
160 mercurial/filemerge.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
160 mercurial/filemerge.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
161 mercurial/fileset.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
161 mercurial/fileset.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
162 mercurial/formatter.py: error importing module: <ImportError> No module named 'cPickle' (line *) (glob)
162 mercurial/formatter.py: error importing module: <ImportError> No module named 'cPickle' (line *) (glob)
163 mercurial/graphmod.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
163 mercurial/graphmod.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
164 mercurial/help.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
164 mercurial/help.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
165 mercurial/hg.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
165 mercurial/hg.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
166 mercurial/hgweb/common.py: error importing module: <ImportError> No module named 'BaseHTTPServer' (line *) (glob)
166 mercurial/hgweb/common.py: error importing module: <ImportError> No module named 'BaseHTTPServer' (line *) (glob)
167 mercurial/hgweb/hgweb_mod.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob)
167 mercurial/hgweb/hgweb_mod.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob)
168 mercurial/hgweb/hgwebdir_mod.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob)
168 mercurial/hgweb/hgwebdir_mod.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob)
169 mercurial/hgweb/protocol.py: error importing module: <ImportError> No module named 'cStringIO' (line *) (glob)
169 mercurial/hgweb/protocol.py: error importing module: <ImportError> No module named 'cStringIO' (line *) (glob)
170 mercurial/hgweb/request.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob)
170 mercurial/hgweb/request.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob)
171 mercurial/hgweb/server.py: error importing module: <ImportError> No module named 'BaseHTTPServer' (line *) (glob)
171 mercurial/hgweb/server.py: error importing module: <ImportError> No module named 'BaseHTTPServer' (line *) (glob)
172 mercurial/hgweb/webcommands.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob)
172 mercurial/hgweb/webcommands.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob)
173 mercurial/hgweb/webutil.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob)
173 mercurial/hgweb/webutil.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob)
174 mercurial/hgweb/wsgicgi.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob)
174 mercurial/hgweb/wsgicgi.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob)
175 mercurial/hook.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
175 mercurial/hook.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
176 mercurial/httpclient/_readers.py: error importing module: <ImportError> No module named 'httplib' (line *) (glob)
176 mercurial/httpclient/_readers.py: error importing module: <ImportError> No module named 'httplib' (line *) (glob)
177 mercurial/httpconnection.py: error importing module: <ImportError> No module named 'urllib2' (line *) (glob)
177 mercurial/httpconnection.py: error importing module: <ImportError> No module named 'urllib2' (line *) (glob)
178 mercurial/httppeer.py: error importing module: <ImportError> No module named 'httplib' (line *) (glob)
178 mercurial/httppeer.py: error importing module: <ImportError> No module named 'httplib' (line *) (glob)
179 mercurial/keepalive.py: error importing module: <ImportError> No module named 'httplib' (line *) (glob)
179 mercurial/keepalive.py: error importing module: <ImportError> No module named 'httplib' (line *) (glob)
180 mercurial/localrepo.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
180 mercurial/localrepo.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
181 mercurial/lock.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
181 mercurial/lock.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
182 mercurial/mail.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
182 mercurial/mail.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
183 mercurial/manifest.py: error importing: <ImportError> No module named 'cStringIO' (error at mpatch.py:*) (glob)
183 mercurial/manifest.py: error importing: <ImportError> No module named 'cStringIO' (error at mpatch.py:*) (glob)
184 mercurial/match.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
184 mercurial/match.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
185 mercurial/mdiff.py: error importing: <ImportError> No module named 'cStringIO' (error at mpatch.py:*) (glob)
185 mercurial/mdiff.py: error importing: <ImportError> No module named 'cStringIO' (error at mpatch.py:*) (glob)
186 mercurial/merge.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
186 mercurial/merge.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
187 mercurial/minirst.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
187 mercurial/minirst.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
188 mercurial/namespaces.py: error importing: <ImportError> No module named 'cStringIO' (error at patch.py:*) (glob)
188 mercurial/namespaces.py: error importing: <ImportError> No module named 'cStringIO' (error at patch.py:*) (glob)
189 mercurial/obsolete.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
189 mercurial/obsolete.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
190 mercurial/patch.py: error importing module: <ImportError> No module named 'cStringIO' (line *) (glob)
190 mercurial/patch.py: error importing module: <ImportError> No module named 'cStringIO' (line *) (glob)
191 mercurial/pathutil.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
191 mercurial/pathutil.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
192 mercurial/peer.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
192 mercurial/peer.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
193 mercurial/pure/mpatch.py: error importing module: <ImportError> No module named 'cStringIO' (line *) (glob)
193 mercurial/pure/mpatch.py: error importing module: <ImportError> No module named 'cStringIO' (line *) (glob)
194 mercurial/pure/parsers.py: error importing module: <ImportError> No module named 'cStringIO' (line *) (glob)
194 mercurial/pure/parsers.py: error importing module: <ImportError> No module named 'cStringIO' (line *) (glob)
195 mercurial/pushkey.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
195 mercurial/pushkey.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
196 mercurial/pvec.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
196 mercurial/pvec.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
197 mercurial/registrar.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
197 mercurial/registrar.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
198 mercurial/repair.py: error importing module: <SyntaxError> invalid syntax (bundle*.py, line *) (line *) (glob)
198 mercurial/repair.py: error importing module: <SyntaxError> invalid syntax (bundle*.py, line *) (line *) (glob)
199 mercurial/repoview.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
199 mercurial/repoview.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
200 mercurial/revlog.py: error importing: <ImportError> No module named 'cStringIO' (error at mpatch.py:*) (glob)
200 mercurial/revlog.py: error importing: <ImportError> No module named 'cStringIO' (error at mpatch.py:*) (glob)
201 mercurial/revset.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
201 mercurial/revset.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
202 mercurial/scmutil.py: error importing module: <ImportError> No module named 'Queue' (line *) (glob)
202 mercurial/scmutil.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
203 mercurial/scmwindows.py: error importing module: <ImportError> No module named '_winreg' (line *) (glob)
203 mercurial/scmwindows.py: error importing module: <ImportError> No module named '_winreg' (line *) (glob)
204 mercurial/similar.py: error importing: <ImportError> No module named 'cStringIO' (error at mpatch.py:*) (glob)
204 mercurial/similar.py: error importing: <ImportError> No module named 'cStringIO' (error at mpatch.py:*) (glob)
205 mercurial/simplemerge.py: error importing: <ImportError> No module named 'cStringIO' (error at mpatch.py:*) (glob)
205 mercurial/simplemerge.py: error importing: <ImportError> No module named 'cStringIO' (error at mpatch.py:*) (glob)
206 mercurial/sshpeer.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
206 mercurial/sshpeer.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
207 mercurial/sshserver.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
207 mercurial/sshserver.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
208 mercurial/sslutil.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
208 mercurial/sslutil.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
209 mercurial/statichttprepo.py: error importing module: <ImportError> No module named 'urllib2' (line *) (glob)
209 mercurial/statichttprepo.py: error importing module: <ImportError> No module named 'urllib2' (line *) (glob)
210 mercurial/store.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
210 mercurial/store.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
211 mercurial/streamclone.py: error importing: <ImportError> No module named 'Queue' (error at scmutil.py:*) (glob)
211 mercurial/streamclone.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
212 mercurial/subrepo.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
212 mercurial/subrepo.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
213 mercurial/tagmerge.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
213 mercurial/tagmerge.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
214 mercurial/tags.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
214 mercurial/tags.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
215 mercurial/templatefilters.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
215 mercurial/templatefilters.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
216 mercurial/templatekw.py: error importing: <ImportError> No module named 'cStringIO' (error at patch.py:*) (glob)
216 mercurial/templatekw.py: error importing: <ImportError> No module named 'cStringIO' (error at patch.py:*) (glob)
217 mercurial/templater.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
217 mercurial/templater.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
218 mercurial/transaction.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
218 mercurial/transaction.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
219 mercurial/ui.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
219 mercurial/ui.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
220 mercurial/unionrepo.py: error importing: <ImportError> No module named 'cStringIO' (error at mpatch.py:*) (glob)
220 mercurial/unionrepo.py: error importing: <ImportError> No module named 'cStringIO' (error at mpatch.py:*) (glob)
221 mercurial/url.py: error importing module: <ImportError> No module named 'cStringIO' (line *) (glob)
221 mercurial/url.py: error importing module: <ImportError> No module named 'cStringIO' (line *) (glob)
222 mercurial/util.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
222 mercurial/util.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
223 mercurial/verify.py: error importing: <ImportError> No module named 'cStringIO' (error at mpatch.py:*) (glob)
223 mercurial/verify.py: error importing: <ImportError> No module named 'cStringIO' (error at mpatch.py:*) (glob)
224 mercurial/win*.py: error importing module: <ImportError> No module named 'msvcrt' (line *) (glob)
224 mercurial/win*.py: error importing module: <ImportError> No module named 'msvcrt' (line *) (glob)
225 mercurial/windows.py: error importing module: <ImportError> No module named '_winreg' (line *) (glob)
225 mercurial/windows.py: error importing module: <ImportError> No module named '_winreg' (line *) (glob)
226 mercurial/wireproto.py: error importing module: <SyntaxError> invalid syntax (bundle*.py, line *) (line *) (glob)
226 mercurial/wireproto.py: error importing module: <SyntaxError> invalid syntax (bundle*.py, line *) (line *) (glob)
227 tests/readlink.py: invalid syntax: invalid syntax (<unknown>, line *) (glob)
227 tests/readlink.py: invalid syntax: invalid syntax (<unknown>, line *) (glob)
228 tests/test-demandimport.py: invalid syntax: invalid syntax (<unknown>, line *) (glob)
228 tests/test-demandimport.py: invalid syntax: invalid syntax (<unknown>, line *) (glob)
229 tests/test-lrucachedict.py: invalid syntax: invalid syntax (<unknown>, line *) (glob)
229 tests/test-lrucachedict.py: invalid syntax: invalid syntax (<unknown>, line *) (glob)
230 tests/test-trusted.py: invalid syntax: invalid syntax (<unknown>, line *) (glob)
230 tests/test-trusted.py: invalid syntax: invalid syntax (<unknown>, line *) (glob)
231
231
232 #endif
232 #endif
General Comments 0
You need to be logged in to leave comments. Login now