##// END OF EJS Templates
merge with stable
Augie Fackler -
r45643:3a6ec080 merge default
parent child Browse files
Show More
@@ -1,81 +1,82 b''
1 # hgdemandimport - global demand-loading of modules for Mercurial
1 # hgdemandimport - global demand-loading of modules for Mercurial
2 #
2 #
3 # Copyright 2017 Facebook Inc.
3 # Copyright 2017 Facebook Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''demandimport - automatic demand-loading of modules'''
8 '''demandimport - automatic demand-loading of modules'''
9
9
10 # This is in a separate package from mercurial because in Python 3,
10 # This is in a separate package from mercurial because in Python 3,
11 # demand loading is per-package. Keeping demandimport in the mercurial package
11 # demand loading is per-package. Keeping demandimport in the mercurial package
12 # would disable demand loading for any modules in mercurial.
12 # would disable demand loading for any modules in mercurial.
13
13
14 from __future__ import absolute_import
14 from __future__ import absolute_import
15
15
16 import os
16 import os
17 import sys
17 import sys
18
18
19 if sys.version_info[0] >= 3:
19 if sys.version_info[0] >= 3:
20 from . import demandimportpy3 as demandimport
20 from . import demandimportpy3 as demandimport
21 else:
21 else:
22 from . import demandimportpy2 as demandimport
22 from . import demandimportpy2 as demandimport
23
23
24 # Full module names which can't be lazy imported.
24 # Full module names which can't be lazy imported.
25 # Extensions can add to this set.
25 # Extensions can add to this set.
26 IGNORES = {
26 IGNORES = {
27 '__future__',
27 '__future__',
28 '_hashlib',
28 '_hashlib',
29 # ImportError during pkg_resources/__init__.py:fixup_namespace_package
29 # ImportError during pkg_resources/__init__.py:fixup_namespace_package
30 '_imp',
30 '_imp',
31 '_xmlplus',
31 '_xmlplus',
32 'fcntl',
32 'fcntl',
33 'nt', # pathlib2 tests the existence of built-in 'nt' module
33 'nt', # pathlib2 tests the existence of built-in 'nt' module
34 'win32com.gen_py',
34 'win32com.gen_py',
35 'win32com.shell', # 'appdirs' tries to import win32com.shell
35 'win32com.shell', # 'appdirs' tries to import win32com.shell
36 '_winreg', # 2.7 mimetypes needs immediate ImportError
36 '_winreg', # 2.7 mimetypes needs immediate ImportError
37 'pythoncom',
37 'pythoncom',
38 # imported by tarfile, not available under Windows
38 # imported by tarfile, not available under Windows
39 'pwd',
39 'pwd',
40 'grp',
40 'grp',
41 # imported by profile, itself imported by hotshot.stats,
41 # imported by profile, itself imported by hotshot.stats,
42 # not available under Windows
42 # not available under Windows
43 'resource',
43 'resource',
44 # this trips up many extension authors
44 # this trips up many extension authors
45 'gtk',
45 'gtk',
46 # setuptools' pkg_resources.py expects "from __main__ import x" to
46 # setuptools' pkg_resources.py expects "from __main__ import x" to
47 # raise ImportError if x not defined
47 # raise ImportError if x not defined
48 '__main__',
48 '__main__',
49 '_ssl', # conditional imports in the stdlib, issue1964
49 '_ssl', # conditional imports in the stdlib, issue1964
50 '_sre', # issue4920
50 '_sre', # issue4920
51 'rfc822',
51 'rfc822',
52 'mimetools',
52 'mimetools',
53 'sqlalchemy.events', # has import-time side effects (issue5085)
53 'sqlalchemy.events', # has import-time side effects (issue5085)
54 # setuptools 8 expects this module to explode early when not on windows
54 # setuptools 8 expects this module to explode early when not on windows
55 'distutils.msvc9compiler',
55 'distutils.msvc9compiler',
56 '__builtin__',
56 '__builtin__',
57 'builtins',
57 'builtins',
58 'urwid.command_map', # for pudb
58 'urwid.command_map', # for pudb
59 'lzma',
59 }
60 }
60
61
61 _pypy = '__pypy__' in sys.builtin_module_names
62 _pypy = '__pypy__' in sys.builtin_module_names
62
63
63 if _pypy:
64 if _pypy:
64 # _ctypes.pointer is shadowed by "from ... import pointer" (PyPy 5)
65 # _ctypes.pointer is shadowed by "from ... import pointer" (PyPy 5)
65 IGNORES.add('_ctypes.pointer')
66 IGNORES.add('_ctypes.pointer')
66
67
67 demandimport.init(IGNORES)
68 demandimport.init(IGNORES)
68
69
69 # Re-export.
70 # Re-export.
70 isenabled = demandimport.isenabled
71 isenabled = demandimport.isenabled
71 disable = demandimport.disable
72 disable = demandimport.disable
72 deactivated = demandimport.deactivated
73 deactivated = demandimport.deactivated
73
74
74
75
75 def enable():
76 def enable():
76 # chg pre-imports modules so do not enable demandimport for it
77 # chg pre-imports modules so do not enable demandimport for it
77 if (
78 if (
78 'CHGINTERNALMARK' not in os.environ
79 'CHGINTERNALMARK' not in os.environ
79 and os.environ.get('HGDEMANDIMPORT') != 'disable'
80 and os.environ.get('HGDEMANDIMPORT') != 'disable'
80 ):
81 ):
81 demandimport.enable()
82 demandimport.enable()
@@ -1,390 +1,395 b''
1 # archival.py - revision archival for mercurial
1 # archival.py - revision archival for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import gzip
10 import gzip
11 import os
11 import os
12 import struct
12 import struct
13 import tarfile
13 import tarfile
14 import time
14 import time
15 import zipfile
15 import zipfile
16 import zlib
16 import zlib
17
17
18 from .i18n import _
18 from .i18n import _
19 from .node import nullrev
19 from .node import nullrev
20 from .pycompat import open
20 from .pycompat import open
21
21
22 from . import (
22 from . import (
23 error,
23 error,
24 formatter,
24 formatter,
25 match as matchmod,
25 match as matchmod,
26 pycompat,
26 pycompat,
27 scmutil,
27 scmutil,
28 util,
28 util,
29 vfs as vfsmod,
29 vfs as vfsmod,
30 )
30 )
31
31
32 stringio = util.stringio
32 stringio = util.stringio
33
33
34 # from unzip source code:
34 # from unzip source code:
35 _UNX_IFREG = 0x8000
35 _UNX_IFREG = 0x8000
36 _UNX_IFLNK = 0xA000
36 _UNX_IFLNK = 0xA000
37
37
38
38
39 def tidyprefix(dest, kind, prefix):
39 def tidyprefix(dest, kind, prefix):
40 '''choose prefix to use for names in archive. make sure prefix is
40 '''choose prefix to use for names in archive. make sure prefix is
41 safe for consumers.'''
41 safe for consumers.'''
42
42
43 if prefix:
43 if prefix:
44 prefix = util.normpath(prefix)
44 prefix = util.normpath(prefix)
45 else:
45 else:
46 if not isinstance(dest, bytes):
46 if not isinstance(dest, bytes):
47 raise ValueError(b'dest must be string if no prefix')
47 raise ValueError(b'dest must be string if no prefix')
48 prefix = os.path.basename(dest)
48 prefix = os.path.basename(dest)
49 lower = prefix.lower()
49 lower = prefix.lower()
50 for sfx in exts.get(kind, []):
50 for sfx in exts.get(kind, []):
51 if lower.endswith(sfx):
51 if lower.endswith(sfx):
52 prefix = prefix[: -len(sfx)]
52 prefix = prefix[: -len(sfx)]
53 break
53 break
54 lpfx = os.path.normpath(util.localpath(prefix))
54 lpfx = os.path.normpath(util.localpath(prefix))
55 prefix = util.pconvert(lpfx)
55 prefix = util.pconvert(lpfx)
56 if not prefix.endswith(b'/'):
56 if not prefix.endswith(b'/'):
57 prefix += b'/'
57 prefix += b'/'
58 # Drop the leading '.' path component if present, so Windows can read the
58 # Drop the leading '.' path component if present, so Windows can read the
59 # zip files (issue4634)
59 # zip files (issue4634)
60 if prefix.startswith(b'./'):
60 if prefix.startswith(b'./'):
61 prefix = prefix[2:]
61 prefix = prefix[2:]
62 if prefix.startswith(b'../') or os.path.isabs(lpfx) or b'/../' in prefix:
62 if prefix.startswith(b'../') or os.path.isabs(lpfx) or b'/../' in prefix:
63 raise error.Abort(_(b'archive prefix contains illegal components'))
63 raise error.Abort(_(b'archive prefix contains illegal components'))
64 return prefix
64 return prefix
65
65
66
66
67 exts = {
67 exts = {
68 b'tar': [b'.tar'],
68 b'tar': [b'.tar'],
69 b'tbz2': [b'.tbz2', b'.tar.bz2'],
69 b'tbz2': [b'.tbz2', b'.tar.bz2'],
70 b'tgz': [b'.tgz', b'.tar.gz'],
70 b'tgz': [b'.tgz', b'.tar.gz'],
71 b'zip': [b'.zip'],
71 b'zip': [b'.zip'],
72 b'txz': [b'.txz', b'.tar.xz'],
72 b'txz': [b'.txz', b'.tar.xz'],
73 }
73 }
74
74
75
75
76 def guesskind(dest):
76 def guesskind(dest):
77 for kind, extensions in pycompat.iteritems(exts):
77 for kind, extensions in pycompat.iteritems(exts):
78 if any(dest.endswith(ext) for ext in extensions):
78 if any(dest.endswith(ext) for ext in extensions):
79 return kind
79 return kind
80 return None
80 return None
81
81
82
82
83 def _rootctx(repo):
83 def _rootctx(repo):
84 # repo[0] may be hidden
84 # repo[0] may be hidden
85 for rev in repo:
85 for rev in repo:
86 return repo[rev]
86 return repo[rev]
87 return repo[nullrev]
87 return repo[nullrev]
88
88
89
89
90 # {tags} on ctx includes local tags and 'tip', with no current way to limit
90 # {tags} on ctx includes local tags and 'tip', with no current way to limit
91 # that to global tags. Therefore, use {latesttag} as a substitute when
91 # that to global tags. Therefore, use {latesttag} as a substitute when
92 # the distance is 0, since that will be the list of global tags on ctx.
92 # the distance is 0, since that will be the list of global tags on ctx.
93 _defaultmetatemplate = br'''
93 _defaultmetatemplate = br'''
94 repo: {root}
94 repo: {root}
95 node: {ifcontains(rev, revset("wdir()"), "{p1node}{dirty}", "{node}")}
95 node: {ifcontains(rev, revset("wdir()"), "{p1node}{dirty}", "{node}")}
96 branch: {branch|utf8}
96 branch: {branch|utf8}
97 {ifeq(latesttagdistance, 0, join(latesttag % "tag: {tag}", "\n"),
97 {ifeq(latesttagdistance, 0, join(latesttag % "tag: {tag}", "\n"),
98 separate("\n",
98 separate("\n",
99 join(latesttag % "latesttag: {tag}", "\n"),
99 join(latesttag % "latesttag: {tag}", "\n"),
100 "latesttagdistance: {latesttagdistance}",
100 "latesttagdistance: {latesttagdistance}",
101 "changessincelatesttag: {changessincelatesttag}"))}
101 "changessincelatesttag: {changessincelatesttag}"))}
102 '''[
102 '''[
103 1:
103 1:
104 ] # drop leading '\n'
104 ] # drop leading '\n'
105
105
106
106
107 def buildmetadata(ctx):
107 def buildmetadata(ctx):
108 '''build content of .hg_archival.txt'''
108 '''build content of .hg_archival.txt'''
109 repo = ctx.repo()
109 repo = ctx.repo()
110
110
111 opts = {
111 opts = {
112 b'template': repo.ui.config(
112 b'template': repo.ui.config(
113 b'experimental', b'archivemetatemplate', _defaultmetatemplate
113 b'experimental', b'archivemetatemplate', _defaultmetatemplate
114 )
114 )
115 }
115 }
116
116
117 out = util.stringio()
117 out = util.stringio()
118
118
119 fm = formatter.formatter(repo.ui, out, b'archive', opts)
119 fm = formatter.formatter(repo.ui, out, b'archive', opts)
120 fm.startitem()
120 fm.startitem()
121 fm.context(ctx=ctx)
121 fm.context(ctx=ctx)
122 fm.data(root=_rootctx(repo).hex())
122 fm.data(root=_rootctx(repo).hex())
123
123
124 if ctx.rev() is None:
124 if ctx.rev() is None:
125 dirty = b''
125 dirty = b''
126 if ctx.dirty(missing=True):
126 if ctx.dirty(missing=True):
127 dirty = b'+'
127 dirty = b'+'
128 fm.data(dirty=dirty)
128 fm.data(dirty=dirty)
129 fm.end()
129 fm.end()
130
130
131 return out.getvalue()
131 return out.getvalue()
132
132
133
133
134 class tarit(object):
134 class tarit(object):
135 '''write archive to tar file or stream. can write uncompressed,
135 '''write archive to tar file or stream. can write uncompressed,
136 or compress with gzip or bzip2.'''
136 or compress with gzip or bzip2.'''
137
137
138 if pycompat.ispy3:
138 if pycompat.ispy3:
139 GzipFileWithTime = gzip.GzipFile # camelcase-required
139 GzipFileWithTime = gzip.GzipFile # camelcase-required
140 else:
140 else:
141
141
142 class GzipFileWithTime(gzip.GzipFile):
142 class GzipFileWithTime(gzip.GzipFile):
143 def __init__(self, *args, **kw):
143 def __init__(self, *args, **kw):
144 timestamp = None
144 timestamp = None
145 if 'mtime' in kw:
145 if 'mtime' in kw:
146 timestamp = kw.pop('mtime')
146 timestamp = kw.pop('mtime')
147 if timestamp is None:
147 if timestamp is None:
148 self.timestamp = time.time()
148 self.timestamp = time.time()
149 else:
149 else:
150 self.timestamp = timestamp
150 self.timestamp = timestamp
151 gzip.GzipFile.__init__(self, *args, **kw)
151 gzip.GzipFile.__init__(self, *args, **kw)
152
152
153 def _write_gzip_header(self):
153 def _write_gzip_header(self):
154 self.fileobj.write(b'\037\213') # magic header
154 self.fileobj.write(b'\037\213') # magic header
155 self.fileobj.write(b'\010') # compression method
155 self.fileobj.write(b'\010') # compression method
156 fname = self.name
156 fname = self.name
157 if fname and fname.endswith(b'.gz'):
157 if fname and fname.endswith(b'.gz'):
158 fname = fname[:-3]
158 fname = fname[:-3]
159 flags = 0
159 flags = 0
160 if fname:
160 if fname:
161 flags = gzip.FNAME # pytype: disable=module-attr
161 flags = gzip.FNAME # pytype: disable=module-attr
162 self.fileobj.write(pycompat.bytechr(flags))
162 self.fileobj.write(pycompat.bytechr(flags))
163 gzip.write32u( # pytype: disable=module-attr
163 gzip.write32u( # pytype: disable=module-attr
164 self.fileobj, int(self.timestamp)
164 self.fileobj, int(self.timestamp)
165 )
165 )
166 self.fileobj.write(b'\002')
166 self.fileobj.write(b'\002')
167 self.fileobj.write(b'\377')
167 self.fileobj.write(b'\377')
168 if fname:
168 if fname:
169 self.fileobj.write(fname + b'\000')
169 self.fileobj.write(fname + b'\000')
170
170
171 def __init__(self, dest, mtime, kind=b''):
171 def __init__(self, dest, mtime, kind=b''):
172 self.mtime = mtime
172 self.mtime = mtime
173 self.fileobj = None
173 self.fileobj = None
174
174
175 def taropen(mode, name=b'', fileobj=None):
175 def taropen(mode, name=b'', fileobj=None):
176 if kind == b'gz':
176 if kind == b'gz':
177 mode = mode[0:1]
177 mode = mode[0:1]
178 if not fileobj:
178 if not fileobj:
179 fileobj = open(name, mode + b'b')
179 fileobj = open(name, mode + b'b')
180 gzfileobj = self.GzipFileWithTime(
180 gzfileobj = self.GzipFileWithTime(
181 name,
181 name,
182 pycompat.sysstr(mode + b'b'),
182 pycompat.sysstr(mode + b'b'),
183 zlib.Z_BEST_COMPRESSION,
183 zlib.Z_BEST_COMPRESSION,
184 fileobj,
184 fileobj,
185 mtime=mtime,
185 mtime=mtime,
186 )
186 )
187 self.fileobj = gzfileobj
187 self.fileobj = gzfileobj
188 return tarfile.TarFile.taropen( # pytype: disable=attribute-error
188 return tarfile.TarFile.taropen( # pytype: disable=attribute-error
189 name, pycompat.sysstr(mode), gzfileobj
189 name, pycompat.sysstr(mode), gzfileobj
190 )
190 )
191 else:
191 else:
192 return tarfile.open(name, pycompat.sysstr(mode + kind), fileobj)
192 try:
193 return tarfile.open(
194 name, pycompat.sysstr(mode + kind), fileobj
195 )
196 except tarfile.CompressionError as e:
197 raise error.Abort(pycompat.bytestr(e))
193
198
194 if isinstance(dest, bytes):
199 if isinstance(dest, bytes):
195 self.z = taropen(b'w:', name=dest)
200 self.z = taropen(b'w:', name=dest)
196 else:
201 else:
197 self.z = taropen(b'w|', fileobj=dest)
202 self.z = taropen(b'w|', fileobj=dest)
198
203
199 def addfile(self, name, mode, islink, data):
204 def addfile(self, name, mode, islink, data):
200 name = pycompat.fsdecode(name)
205 name = pycompat.fsdecode(name)
201 i = tarfile.TarInfo(name)
206 i = tarfile.TarInfo(name)
202 i.mtime = self.mtime
207 i.mtime = self.mtime
203 i.size = len(data)
208 i.size = len(data)
204 if islink:
209 if islink:
205 i.type = tarfile.SYMTYPE
210 i.type = tarfile.SYMTYPE
206 i.mode = 0o777
211 i.mode = 0o777
207 i.linkname = pycompat.fsdecode(data)
212 i.linkname = pycompat.fsdecode(data)
208 data = None
213 data = None
209 i.size = 0
214 i.size = 0
210 else:
215 else:
211 i.mode = mode
216 i.mode = mode
212 data = stringio(data)
217 data = stringio(data)
213 self.z.addfile(i, data)
218 self.z.addfile(i, data)
214
219
215 def done(self):
220 def done(self):
216 self.z.close()
221 self.z.close()
217 if self.fileobj:
222 if self.fileobj:
218 self.fileobj.close()
223 self.fileobj.close()
219
224
220
225
221 class zipit(object):
226 class zipit(object):
222 '''write archive to zip file or stream. can write uncompressed,
227 '''write archive to zip file or stream. can write uncompressed,
223 or compressed with deflate.'''
228 or compressed with deflate.'''
224
229
225 def __init__(self, dest, mtime, compress=True):
230 def __init__(self, dest, mtime, compress=True):
226 if isinstance(dest, bytes):
231 if isinstance(dest, bytes):
227 dest = pycompat.fsdecode(dest)
232 dest = pycompat.fsdecode(dest)
228 self.z = zipfile.ZipFile(
233 self.z = zipfile.ZipFile(
229 dest, 'w', compress and zipfile.ZIP_DEFLATED or zipfile.ZIP_STORED
234 dest, 'w', compress and zipfile.ZIP_DEFLATED or zipfile.ZIP_STORED
230 )
235 )
231
236
232 # Python's zipfile module emits deprecation warnings if we try
237 # Python's zipfile module emits deprecation warnings if we try
233 # to store files with a date before 1980.
238 # to store files with a date before 1980.
234 epoch = 315532800 # calendar.timegm((1980, 1, 1, 0, 0, 0, 1, 1, 0))
239 epoch = 315532800 # calendar.timegm((1980, 1, 1, 0, 0, 0, 1, 1, 0))
235 if mtime < epoch:
240 if mtime < epoch:
236 mtime = epoch
241 mtime = epoch
237
242
238 self.mtime = mtime
243 self.mtime = mtime
239 self.date_time = time.gmtime(mtime)[:6]
244 self.date_time = time.gmtime(mtime)[:6]
240
245
241 def addfile(self, name, mode, islink, data):
246 def addfile(self, name, mode, islink, data):
242 i = zipfile.ZipInfo(pycompat.fsdecode(name), self.date_time)
247 i = zipfile.ZipInfo(pycompat.fsdecode(name), self.date_time)
243 i.compress_type = self.z.compression # pytype: disable=attribute-error
248 i.compress_type = self.z.compression # pytype: disable=attribute-error
244 # unzip will not honor unix file modes unless file creator is
249 # unzip will not honor unix file modes unless file creator is
245 # set to unix (id 3).
250 # set to unix (id 3).
246 i.create_system = 3
251 i.create_system = 3
247 ftype = _UNX_IFREG
252 ftype = _UNX_IFREG
248 if islink:
253 if islink:
249 mode = 0o777
254 mode = 0o777
250 ftype = _UNX_IFLNK
255 ftype = _UNX_IFLNK
251 i.external_attr = (mode | ftype) << 16
256 i.external_attr = (mode | ftype) << 16
252 # add "extended-timestamp" extra block, because zip archives
257 # add "extended-timestamp" extra block, because zip archives
253 # without this will be extracted with unexpected timestamp,
258 # without this will be extracted with unexpected timestamp,
254 # if TZ is not configured as GMT
259 # if TZ is not configured as GMT
255 i.extra += struct.pack(
260 i.extra += struct.pack(
256 b'<hhBl',
261 b'<hhBl',
257 0x5455, # block type: "extended-timestamp"
262 0x5455, # block type: "extended-timestamp"
258 1 + 4, # size of this block
263 1 + 4, # size of this block
259 1, # "modification time is present"
264 1, # "modification time is present"
260 int(self.mtime),
265 int(self.mtime),
261 ) # last modification (UTC)
266 ) # last modification (UTC)
262 self.z.writestr(i, data)
267 self.z.writestr(i, data)
263
268
264 def done(self):
269 def done(self):
265 self.z.close()
270 self.z.close()
266
271
267
272
268 class fileit(object):
273 class fileit(object):
269 '''write archive as files in directory.'''
274 '''write archive as files in directory.'''
270
275
271 def __init__(self, name, mtime):
276 def __init__(self, name, mtime):
272 self.basedir = name
277 self.basedir = name
273 self.opener = vfsmod.vfs(self.basedir)
278 self.opener = vfsmod.vfs(self.basedir)
274 self.mtime = mtime
279 self.mtime = mtime
275
280
276 def addfile(self, name, mode, islink, data):
281 def addfile(self, name, mode, islink, data):
277 if islink:
282 if islink:
278 self.opener.symlink(data, name)
283 self.opener.symlink(data, name)
279 return
284 return
280 f = self.opener(name, b"w", atomictemp=False)
285 f = self.opener(name, b"w", atomictemp=False)
281 f.write(data)
286 f.write(data)
282 f.close()
287 f.close()
283 destfile = os.path.join(self.basedir, name)
288 destfile = os.path.join(self.basedir, name)
284 os.chmod(destfile, mode)
289 os.chmod(destfile, mode)
285 if self.mtime is not None:
290 if self.mtime is not None:
286 os.utime(destfile, (self.mtime, self.mtime))
291 os.utime(destfile, (self.mtime, self.mtime))
287
292
288 def done(self):
293 def done(self):
289 pass
294 pass
290
295
291
296
292 archivers = {
297 archivers = {
293 b'files': fileit,
298 b'files': fileit,
294 b'tar': tarit,
299 b'tar': tarit,
295 b'tbz2': lambda name, mtime: tarit(name, mtime, b'bz2'),
300 b'tbz2': lambda name, mtime: tarit(name, mtime, b'bz2'),
296 b'tgz': lambda name, mtime: tarit(name, mtime, b'gz'),
301 b'tgz': lambda name, mtime: tarit(name, mtime, b'gz'),
297 b'txz': lambda name, mtime: tarit(name, mtime, b'xz'),
302 b'txz': lambda name, mtime: tarit(name, mtime, b'xz'),
298 b'uzip': lambda name, mtime: zipit(name, mtime, False),
303 b'uzip': lambda name, mtime: zipit(name, mtime, False),
299 b'zip': zipit,
304 b'zip': zipit,
300 }
305 }
301
306
302
307
303 def archive(
308 def archive(
304 repo,
309 repo,
305 dest,
310 dest,
306 node,
311 node,
307 kind,
312 kind,
308 decode=True,
313 decode=True,
309 match=None,
314 match=None,
310 prefix=b'',
315 prefix=b'',
311 mtime=None,
316 mtime=None,
312 subrepos=False,
317 subrepos=False,
313 ):
318 ):
314 '''create archive of repo as it was at node.
319 '''create archive of repo as it was at node.
315
320
316 dest can be name of directory, name of archive file, or file
321 dest can be name of directory, name of archive file, or file
317 object to write archive to.
322 object to write archive to.
318
323
319 kind is type of archive to create.
324 kind is type of archive to create.
320
325
321 decode tells whether to put files through decode filters from
326 decode tells whether to put files through decode filters from
322 hgrc.
327 hgrc.
323
328
324 match is a matcher to filter names of files to write to archive.
329 match is a matcher to filter names of files to write to archive.
325
330
326 prefix is name of path to put before every archive member.
331 prefix is name of path to put before every archive member.
327
332
328 mtime is the modified time, in seconds, or None to use the changeset time.
333 mtime is the modified time, in seconds, or None to use the changeset time.
329
334
330 subrepos tells whether to include subrepos.
335 subrepos tells whether to include subrepos.
331 '''
336 '''
332
337
333 if kind == b'txz' and not pycompat.ispy3:
338 if kind == b'txz' and not pycompat.ispy3:
334 raise error.Abort(_(b'xz compression is only available in Python 3'))
339 raise error.Abort(_(b'xz compression is only available in Python 3'))
335
340
336 if kind == b'files':
341 if kind == b'files':
337 if prefix:
342 if prefix:
338 raise error.Abort(_(b'cannot give prefix when archiving to files'))
343 raise error.Abort(_(b'cannot give prefix when archiving to files'))
339 else:
344 else:
340 prefix = tidyprefix(dest, kind, prefix)
345 prefix = tidyprefix(dest, kind, prefix)
341
346
342 def write(name, mode, islink, getdata):
347 def write(name, mode, islink, getdata):
343 data = getdata()
348 data = getdata()
344 if decode:
349 if decode:
345 data = repo.wwritedata(name, data)
350 data = repo.wwritedata(name, data)
346 archiver.addfile(prefix + name, mode, islink, data)
351 archiver.addfile(prefix + name, mode, islink, data)
347
352
348 if kind not in archivers:
353 if kind not in archivers:
349 raise error.Abort(_(b"unknown archive type '%s'") % kind)
354 raise error.Abort(_(b"unknown archive type '%s'") % kind)
350
355
351 ctx = repo[node]
356 ctx = repo[node]
352 archiver = archivers[kind](dest, mtime or ctx.date()[0])
357 archiver = archivers[kind](dest, mtime or ctx.date()[0])
353
358
354 if not match:
359 if not match:
355 match = scmutil.matchall(repo)
360 match = scmutil.matchall(repo)
356
361
357 if repo.ui.configbool(b"ui", b"archivemeta"):
362 if repo.ui.configbool(b"ui", b"archivemeta"):
358 name = b'.hg_archival.txt'
363 name = b'.hg_archival.txt'
359 if match(name):
364 if match(name):
360 write(name, 0o644, False, lambda: buildmetadata(ctx))
365 write(name, 0o644, False, lambda: buildmetadata(ctx))
361
366
362 files = list(ctx.manifest().walk(match))
367 files = list(ctx.manifest().walk(match))
363 total = len(files)
368 total = len(files)
364 if total:
369 if total:
365 files.sort()
370 files.sort()
366 scmutil.prefetchfiles(
371 scmutil.prefetchfiles(
367 repo, [(ctx.rev(), scmutil.matchfiles(repo, files))]
372 repo, [(ctx.rev(), scmutil.matchfiles(repo, files))]
368 )
373 )
369 progress = repo.ui.makeprogress(
374 progress = repo.ui.makeprogress(
370 _(b'archiving'), unit=_(b'files'), total=total
375 _(b'archiving'), unit=_(b'files'), total=total
371 )
376 )
372 progress.update(0)
377 progress.update(0)
373 for f in files:
378 for f in files:
374 ff = ctx.flags(f)
379 ff = ctx.flags(f)
375 write(f, b'x' in ff and 0o755 or 0o644, b'l' in ff, ctx[f].data)
380 write(f, b'x' in ff and 0o755 or 0o644, b'l' in ff, ctx[f].data)
376 progress.increment(item=f)
381 progress.increment(item=f)
377 progress.complete()
382 progress.complete()
378
383
379 if subrepos:
384 if subrepos:
380 for subpath in sorted(ctx.substate):
385 for subpath in sorted(ctx.substate):
381 sub = ctx.workingsub(subpath)
386 sub = ctx.workingsub(subpath)
382 submatch = matchmod.subdirmatcher(subpath, match)
387 submatch = matchmod.subdirmatcher(subpath, match)
383 subprefix = prefix + subpath + b'/'
388 subprefix = prefix + subpath + b'/'
384 total += sub.archive(archiver, subprefix, submatch, decode)
389 total += sub.archive(archiver, subprefix, submatch, decode)
385
390
386 if total == 0:
391 if total == 0:
387 raise error.Abort(_(b'no files match the archive pattern'))
392 raise error.Abort(_(b'no files match the archive pattern'))
388
393
389 archiver.done()
394 archiver.done()
390 return total
395 return total
@@ -1,1063 +1,1074 b''
1 from __future__ import absolute_import, print_function
1 from __future__ import absolute_import, print_function
2
2
3 import distutils.version
3 import distutils.version
4 import os
4 import os
5 import re
5 import re
6 import socket
6 import socket
7 import stat
7 import stat
8 import subprocess
8 import subprocess
9 import sys
9 import sys
10 import tempfile
10 import tempfile
11
11
12 tempprefix = 'hg-hghave-'
12 tempprefix = 'hg-hghave-'
13
13
14 checks = {
14 checks = {
15 "true": (lambda: True, "yak shaving"),
15 "true": (lambda: True, "yak shaving"),
16 "false": (lambda: False, "nail clipper"),
16 "false": (lambda: False, "nail clipper"),
17 }
17 }
18
18
19 try:
19 try:
20 import msvcrt
20 import msvcrt
21
21
22 msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
22 msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
23 msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
23 msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
24 except ImportError:
24 except ImportError:
25 pass
25 pass
26
26
27 stdout = getattr(sys.stdout, 'buffer', sys.stdout)
27 stdout = getattr(sys.stdout, 'buffer', sys.stdout)
28 stderr = getattr(sys.stderr, 'buffer', sys.stderr)
28 stderr = getattr(sys.stderr, 'buffer', sys.stderr)
29
29
30 if sys.version_info[0] >= 3:
30 if sys.version_info[0] >= 3:
31
31
32 def _sys2bytes(p):
32 def _sys2bytes(p):
33 if p is None:
33 if p is None:
34 return p
34 return p
35 return p.encode('utf-8')
35 return p.encode('utf-8')
36
36
37 def _bytes2sys(p):
37 def _bytes2sys(p):
38 if p is None:
38 if p is None:
39 return p
39 return p
40 return p.decode('utf-8')
40 return p.decode('utf-8')
41
41
42
42
43 else:
43 else:
44
44
45 def _sys2bytes(p):
45 def _sys2bytes(p):
46 return p
46 return p
47
47
48 _bytes2sys = _sys2bytes
48 _bytes2sys = _sys2bytes
49
49
50
50
51 def check(name, desc):
51 def check(name, desc):
52 """Registers a check function for a feature."""
52 """Registers a check function for a feature."""
53
53
54 def decorator(func):
54 def decorator(func):
55 checks[name] = (func, desc)
55 checks[name] = (func, desc)
56 return func
56 return func
57
57
58 return decorator
58 return decorator
59
59
60
60
61 def checkvers(name, desc, vers):
61 def checkvers(name, desc, vers):
62 """Registers a check function for each of a series of versions.
62 """Registers a check function for each of a series of versions.
63
63
64 vers can be a list or an iterator.
64 vers can be a list or an iterator.
65
65
66 Produces a series of feature checks that have the form <name><vers> without
66 Produces a series of feature checks that have the form <name><vers> without
67 any punctuation (even if there's punctuation in 'vers'; i.e. this produces
67 any punctuation (even if there's punctuation in 'vers'; i.e. this produces
68 'py38', not 'py3.8' or 'py-38')."""
68 'py38', not 'py3.8' or 'py-38')."""
69
69
70 def decorator(func):
70 def decorator(func):
71 def funcv(v):
71 def funcv(v):
72 def f():
72 def f():
73 return func(v)
73 return func(v)
74
74
75 return f
75 return f
76
76
77 for v in vers:
77 for v in vers:
78 v = str(v)
78 v = str(v)
79 f = funcv(v)
79 f = funcv(v)
80 checks['%s%s' % (name, v.replace('.', ''))] = (f, desc % v)
80 checks['%s%s' % (name, v.replace('.', ''))] = (f, desc % v)
81 return func
81 return func
82
82
83 return decorator
83 return decorator
84
84
85
85
86 def checkfeatures(features):
86 def checkfeatures(features):
87 result = {
87 result = {
88 'error': [],
88 'error': [],
89 'missing': [],
89 'missing': [],
90 'skipped': [],
90 'skipped': [],
91 }
91 }
92
92
93 for feature in features:
93 for feature in features:
94 negate = feature.startswith('no-')
94 negate = feature.startswith('no-')
95 if negate:
95 if negate:
96 feature = feature[3:]
96 feature = feature[3:]
97
97
98 if feature not in checks:
98 if feature not in checks:
99 result['missing'].append(feature)
99 result['missing'].append(feature)
100 continue
100 continue
101
101
102 check, desc = checks[feature]
102 check, desc = checks[feature]
103 try:
103 try:
104 available = check()
104 available = check()
105 except Exception:
105 except Exception:
106 result['error'].append('hghave check failed: %s' % feature)
106 result['error'].append('hghave check failed: %s' % feature)
107 continue
107 continue
108
108
109 if not negate and not available:
109 if not negate and not available:
110 result['skipped'].append('missing feature: %s' % desc)
110 result['skipped'].append('missing feature: %s' % desc)
111 elif negate and available:
111 elif negate and available:
112 result['skipped'].append('system supports %s' % desc)
112 result['skipped'].append('system supports %s' % desc)
113
113
114 return result
114 return result
115
115
116
116
117 def require(features):
117 def require(features):
118 """Require that features are available, exiting if not."""
118 """Require that features are available, exiting if not."""
119 result = checkfeatures(features)
119 result = checkfeatures(features)
120
120
121 for missing in result['missing']:
121 for missing in result['missing']:
122 stderr.write(
122 stderr.write(
123 ('skipped: unknown feature: %s\n' % missing).encode('utf-8')
123 ('skipped: unknown feature: %s\n' % missing).encode('utf-8')
124 )
124 )
125 for msg in result['skipped']:
125 for msg in result['skipped']:
126 stderr.write(('skipped: %s\n' % msg).encode('utf-8'))
126 stderr.write(('skipped: %s\n' % msg).encode('utf-8'))
127 for msg in result['error']:
127 for msg in result['error']:
128 stderr.write(('%s\n' % msg).encode('utf-8'))
128 stderr.write(('%s\n' % msg).encode('utf-8'))
129
129
130 if result['missing']:
130 if result['missing']:
131 sys.exit(2)
131 sys.exit(2)
132
132
133 if result['skipped'] or result['error']:
133 if result['skipped'] or result['error']:
134 sys.exit(1)
134 sys.exit(1)
135
135
136
136
137 def matchoutput(cmd, regexp, ignorestatus=False):
137 def matchoutput(cmd, regexp, ignorestatus=False):
138 """Return the match object if cmd executes successfully and its output
138 """Return the match object if cmd executes successfully and its output
139 is matched by the supplied regular expression.
139 is matched by the supplied regular expression.
140 """
140 """
141 r = re.compile(regexp)
141 r = re.compile(regexp)
142 p = subprocess.Popen(
142 p = subprocess.Popen(
143 cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
143 cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
144 )
144 )
145 s = p.communicate()[0]
145 s = p.communicate()[0]
146 ret = p.returncode
146 ret = p.returncode
147 return (ignorestatus or not ret) and r.search(s)
147 return (ignorestatus or not ret) and r.search(s)
148
148
149
149
150 @check("baz", "GNU Arch baz client")
150 @check("baz", "GNU Arch baz client")
151 def has_baz():
151 def has_baz():
152 return matchoutput('baz --version 2>&1', br'baz Bazaar version')
152 return matchoutput('baz --version 2>&1', br'baz Bazaar version')
153
153
154
154
155 @check("bzr", "Canonical's Bazaar client")
155 @check("bzr", "Canonical's Bazaar client")
156 def has_bzr():
156 def has_bzr():
157 try:
157 try:
158 import bzrlib
158 import bzrlib
159 import bzrlib.bzrdir
159 import bzrlib.bzrdir
160 import bzrlib.errors
160 import bzrlib.errors
161 import bzrlib.revision
161 import bzrlib.revision
162 import bzrlib.revisionspec
162 import bzrlib.revisionspec
163
163
164 bzrlib.revisionspec.RevisionSpec
164 bzrlib.revisionspec.RevisionSpec
165 return bzrlib.__doc__ is not None
165 return bzrlib.__doc__ is not None
166 except (AttributeError, ImportError):
166 except (AttributeError, ImportError):
167 return False
167 return False
168
168
169
169
170 @checkvers("bzr", "Canonical's Bazaar client >= %s", (1.14,))
170 @checkvers("bzr", "Canonical's Bazaar client >= %s", (1.14,))
171 def has_bzr_range(v):
171 def has_bzr_range(v):
172 major, minor = v.split('rc')[0].split('.')[0:2]
172 major, minor = v.split('rc')[0].split('.')[0:2]
173 try:
173 try:
174 import bzrlib
174 import bzrlib
175
175
176 return bzrlib.__doc__ is not None and bzrlib.version_info[:2] >= (
176 return bzrlib.__doc__ is not None and bzrlib.version_info[:2] >= (
177 int(major),
177 int(major),
178 int(minor),
178 int(minor),
179 )
179 )
180 except ImportError:
180 except ImportError:
181 return False
181 return False
182
182
183
183
184 @check("chg", "running with chg")
184 @check("chg", "running with chg")
185 def has_chg():
185 def has_chg():
186 return 'CHGHG' in os.environ
186 return 'CHGHG' in os.environ
187
187
188
188
189 @check("cvs", "cvs client/server")
189 @check("cvs", "cvs client/server")
190 def has_cvs():
190 def has_cvs():
191 re = br'Concurrent Versions System.*?server'
191 re = br'Concurrent Versions System.*?server'
192 return matchoutput('cvs --version 2>&1', re) and not has_msys()
192 return matchoutput('cvs --version 2>&1', re) and not has_msys()
193
193
194
194
195 @check("cvs112", "cvs client/server 1.12.* (not cvsnt)")
195 @check("cvs112", "cvs client/server 1.12.* (not cvsnt)")
196 def has_cvs112():
196 def has_cvs112():
197 re = br'Concurrent Versions System \(CVS\) 1.12.*?server'
197 re = br'Concurrent Versions System \(CVS\) 1.12.*?server'
198 return matchoutput('cvs --version 2>&1', re) and not has_msys()
198 return matchoutput('cvs --version 2>&1', re) and not has_msys()
199
199
200
200
201 @check("cvsnt", "cvsnt client/server")
201 @check("cvsnt", "cvsnt client/server")
202 def has_cvsnt():
202 def has_cvsnt():
203 re = br'Concurrent Versions System \(CVSNT\) (\d+).(\d+).*\(client/server\)'
203 re = br'Concurrent Versions System \(CVSNT\) (\d+).(\d+).*\(client/server\)'
204 return matchoutput('cvsnt --version 2>&1', re)
204 return matchoutput('cvsnt --version 2>&1', re)
205
205
206
206
207 @check("darcs", "darcs client")
207 @check("darcs", "darcs client")
208 def has_darcs():
208 def has_darcs():
209 return matchoutput('darcs --version', br'\b2\.([2-9]|\d{2})', True)
209 return matchoutput('darcs --version', br'\b2\.([2-9]|\d{2})', True)
210
210
211
211
212 @check("mtn", "monotone client (>= 1.0)")
212 @check("mtn", "monotone client (>= 1.0)")
213 def has_mtn():
213 def has_mtn():
214 return matchoutput('mtn --version', br'monotone', True) and not matchoutput(
214 return matchoutput('mtn --version', br'monotone', True) and not matchoutput(
215 'mtn --version', br'monotone 0\.', True
215 'mtn --version', br'monotone 0\.', True
216 )
216 )
217
217
218
218
219 @check("eol-in-paths", "end-of-lines in paths")
219 @check("eol-in-paths", "end-of-lines in paths")
220 def has_eol_in_paths():
220 def has_eol_in_paths():
221 try:
221 try:
222 fd, path = tempfile.mkstemp(dir='.', prefix=tempprefix, suffix='\n\r')
222 fd, path = tempfile.mkstemp(dir='.', prefix=tempprefix, suffix='\n\r')
223 os.close(fd)
223 os.close(fd)
224 os.remove(path)
224 os.remove(path)
225 return True
225 return True
226 except (IOError, OSError):
226 except (IOError, OSError):
227 return False
227 return False
228
228
229
229
230 @check("execbit", "executable bit")
230 @check("execbit", "executable bit")
231 def has_executablebit():
231 def has_executablebit():
232 try:
232 try:
233 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
233 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
234 fh, fn = tempfile.mkstemp(dir='.', prefix=tempprefix)
234 fh, fn = tempfile.mkstemp(dir='.', prefix=tempprefix)
235 try:
235 try:
236 os.close(fh)
236 os.close(fh)
237 m = os.stat(fn).st_mode & 0o777
237 m = os.stat(fn).st_mode & 0o777
238 new_file_has_exec = m & EXECFLAGS
238 new_file_has_exec = m & EXECFLAGS
239 os.chmod(fn, m ^ EXECFLAGS)
239 os.chmod(fn, m ^ EXECFLAGS)
240 exec_flags_cannot_flip = (os.stat(fn).st_mode & 0o777) == m
240 exec_flags_cannot_flip = (os.stat(fn).st_mode & 0o777) == m
241 finally:
241 finally:
242 os.unlink(fn)
242 os.unlink(fn)
243 except (IOError, OSError):
243 except (IOError, OSError):
244 # we don't care, the user probably won't be able to commit anyway
244 # we don't care, the user probably won't be able to commit anyway
245 return False
245 return False
246 return not (new_file_has_exec or exec_flags_cannot_flip)
246 return not (new_file_has_exec or exec_flags_cannot_flip)
247
247
248
248
249 @check("icasefs", "case insensitive file system")
249 @check("icasefs", "case insensitive file system")
250 def has_icasefs():
250 def has_icasefs():
251 # Stolen from mercurial.util
251 # Stolen from mercurial.util
252 fd, path = tempfile.mkstemp(dir='.', prefix=tempprefix)
252 fd, path = tempfile.mkstemp(dir='.', prefix=tempprefix)
253 os.close(fd)
253 os.close(fd)
254 try:
254 try:
255 s1 = os.stat(path)
255 s1 = os.stat(path)
256 d, b = os.path.split(path)
256 d, b = os.path.split(path)
257 p2 = os.path.join(d, b.upper())
257 p2 = os.path.join(d, b.upper())
258 if path == p2:
258 if path == p2:
259 p2 = os.path.join(d, b.lower())
259 p2 = os.path.join(d, b.lower())
260 try:
260 try:
261 s2 = os.stat(p2)
261 s2 = os.stat(p2)
262 return s2 == s1
262 return s2 == s1
263 except OSError:
263 except OSError:
264 return False
264 return False
265 finally:
265 finally:
266 os.remove(path)
266 os.remove(path)
267
267
268
268
269 @check("fifo", "named pipes")
269 @check("fifo", "named pipes")
270 def has_fifo():
270 def has_fifo():
271 if getattr(os, "mkfifo", None) is None:
271 if getattr(os, "mkfifo", None) is None:
272 return False
272 return False
273 name = tempfile.mktemp(dir='.', prefix=tempprefix)
273 name = tempfile.mktemp(dir='.', prefix=tempprefix)
274 try:
274 try:
275 os.mkfifo(name)
275 os.mkfifo(name)
276 os.unlink(name)
276 os.unlink(name)
277 return True
277 return True
278 except OSError:
278 except OSError:
279 return False
279 return False
280
280
281
281
282 @check("killdaemons", 'killdaemons.py support')
282 @check("killdaemons", 'killdaemons.py support')
283 def has_killdaemons():
283 def has_killdaemons():
284 return True
284 return True
285
285
286
286
287 @check("cacheable", "cacheable filesystem")
287 @check("cacheable", "cacheable filesystem")
288 def has_cacheable_fs():
288 def has_cacheable_fs():
289 from mercurial import util
289 from mercurial import util
290
290
291 fd, path = tempfile.mkstemp(dir='.', prefix=tempprefix)
291 fd, path = tempfile.mkstemp(dir='.', prefix=tempprefix)
292 os.close(fd)
292 os.close(fd)
293 try:
293 try:
294 return util.cachestat(path).cacheable()
294 return util.cachestat(path).cacheable()
295 finally:
295 finally:
296 os.remove(path)
296 os.remove(path)
297
297
298
298
299 @check("lsprof", "python lsprof module")
299 @check("lsprof", "python lsprof module")
300 def has_lsprof():
300 def has_lsprof():
301 try:
301 try:
302 import _lsprof
302 import _lsprof
303
303
304 _lsprof.Profiler # silence unused import warning
304 _lsprof.Profiler # silence unused import warning
305 return True
305 return True
306 except ImportError:
306 except ImportError:
307 return False
307 return False
308
308
309
309
310 def _gethgversion():
310 def _gethgversion():
311 m = matchoutput('hg --version --quiet 2>&1', br'(\d+)\.(\d+)')
311 m = matchoutput('hg --version --quiet 2>&1', br'(\d+)\.(\d+)')
312 if not m:
312 if not m:
313 return (0, 0)
313 return (0, 0)
314 return (int(m.group(1)), int(m.group(2)))
314 return (int(m.group(1)), int(m.group(2)))
315
315
316
316
317 _hgversion = None
317 _hgversion = None
318
318
319
319
320 def gethgversion():
320 def gethgversion():
321 global _hgversion
321 global _hgversion
322 if _hgversion is None:
322 if _hgversion is None:
323 _hgversion = _gethgversion()
323 _hgversion = _gethgversion()
324 return _hgversion
324 return _hgversion
325
325
326
326
327 @checkvers(
327 @checkvers(
328 "hg", "Mercurial >= %s", list([(1.0 * x) / 10 for x in range(9, 99)])
328 "hg", "Mercurial >= %s", list([(1.0 * x) / 10 for x in range(9, 99)])
329 )
329 )
330 def has_hg_range(v):
330 def has_hg_range(v):
331 major, minor = v.split('.')[0:2]
331 major, minor = v.split('.')[0:2]
332 return gethgversion() >= (int(major), int(minor))
332 return gethgversion() >= (int(major), int(minor))
333
333
334
334
335 @check("rust", "Using the Rust extensions")
335 @check("rust", "Using the Rust extensions")
336 def has_rust():
336 def has_rust():
337 """Check is the mercurial currently running is using some rust code"""
337 """Check is the mercurial currently running is using some rust code"""
338 cmd = 'hg debuginstall --quiet 2>&1'
338 cmd = 'hg debuginstall --quiet 2>&1'
339 match = br'checking module policy \(([^)]+)\)'
339 match = br'checking module policy \(([^)]+)\)'
340 policy = matchoutput(cmd, match)
340 policy = matchoutput(cmd, match)
341 if not policy:
341 if not policy:
342 return False
342 return False
343 return b'rust' in policy.group(1)
343 return b'rust' in policy.group(1)
344
344
345
345
346 @check("hg08", "Mercurial >= 0.8")
346 @check("hg08", "Mercurial >= 0.8")
347 def has_hg08():
347 def has_hg08():
348 if checks["hg09"][0]():
348 if checks["hg09"][0]():
349 return True
349 return True
350 return matchoutput('hg help annotate 2>&1', '--date')
350 return matchoutput('hg help annotate 2>&1', '--date')
351
351
352
352
353 @check("hg07", "Mercurial >= 0.7")
353 @check("hg07", "Mercurial >= 0.7")
354 def has_hg07():
354 def has_hg07():
355 if checks["hg08"][0]():
355 if checks["hg08"][0]():
356 return True
356 return True
357 return matchoutput('hg --version --quiet 2>&1', 'Mercurial Distributed SCM')
357 return matchoutput('hg --version --quiet 2>&1', 'Mercurial Distributed SCM')
358
358
359
359
360 @check("hg06", "Mercurial >= 0.6")
360 @check("hg06", "Mercurial >= 0.6")
361 def has_hg06():
361 def has_hg06():
362 if checks["hg07"][0]():
362 if checks["hg07"][0]():
363 return True
363 return True
364 return matchoutput('hg --version --quiet 2>&1', 'Mercurial version')
364 return matchoutput('hg --version --quiet 2>&1', 'Mercurial version')
365
365
366
366
367 @check("gettext", "GNU Gettext (msgfmt)")
367 @check("gettext", "GNU Gettext (msgfmt)")
368 def has_gettext():
368 def has_gettext():
369 return matchoutput('msgfmt --version', br'GNU gettext-tools')
369 return matchoutput('msgfmt --version', br'GNU gettext-tools')
370
370
371
371
372 @check("git", "git command line client")
372 @check("git", "git command line client")
373 def has_git():
373 def has_git():
374 return matchoutput('git --version 2>&1', br'^git version')
374 return matchoutput('git --version 2>&1', br'^git version')
375
375
376
376
377 def getgitversion():
377 def getgitversion():
378 m = matchoutput('git --version 2>&1', br'git version (\d+)\.(\d+)')
378 m = matchoutput('git --version 2>&1', br'git version (\d+)\.(\d+)')
379 if not m:
379 if not m:
380 return (0, 0)
380 return (0, 0)
381 return (int(m.group(1)), int(m.group(2)))
381 return (int(m.group(1)), int(m.group(2)))
382
382
383
383
384 @check("pygit2", "pygit2 Python library")
384 @check("pygit2", "pygit2 Python library")
385 def has_git():
385 def has_git():
386 try:
386 try:
387 import pygit2
387 import pygit2
388
388
389 pygit2.Oid # silence unused import
389 pygit2.Oid # silence unused import
390 return True
390 return True
391 except ImportError:
391 except ImportError:
392 return False
392 return False
393
393
394
394
395 # https://github.com/git-lfs/lfs-test-server
395 # https://github.com/git-lfs/lfs-test-server
396 @check("lfs-test-server", "git-lfs test server")
396 @check("lfs-test-server", "git-lfs test server")
397 def has_lfsserver():
397 def has_lfsserver():
398 exe = 'lfs-test-server'
398 exe = 'lfs-test-server'
399 if has_windows():
399 if has_windows():
400 exe = 'lfs-test-server.exe'
400 exe = 'lfs-test-server.exe'
401 return any(
401 return any(
402 os.access(os.path.join(path, exe), os.X_OK)
402 os.access(os.path.join(path, exe), os.X_OK)
403 for path in os.environ["PATH"].split(os.pathsep)
403 for path in os.environ["PATH"].split(os.pathsep)
404 )
404 )
405
405
406
406
407 @checkvers("git", "git client (with ext::sh support) version >= %s", (1.9,))
407 @checkvers("git", "git client (with ext::sh support) version >= %s", (1.9,))
408 def has_git_range(v):
408 def has_git_range(v):
409 major, minor = v.split('.')[0:2]
409 major, minor = v.split('.')[0:2]
410 return getgitversion() >= (int(major), int(minor))
410 return getgitversion() >= (int(major), int(minor))
411
411
412
412
413 @check("docutils", "Docutils text processing library")
413 @check("docutils", "Docutils text processing library")
414 def has_docutils():
414 def has_docutils():
415 try:
415 try:
416 import docutils.core
416 import docutils.core
417
417
418 docutils.core.publish_cmdline # silence unused import
418 docutils.core.publish_cmdline # silence unused import
419 return True
419 return True
420 except ImportError:
420 except ImportError:
421 return False
421 return False
422
422
423
423
424 def getsvnversion():
424 def getsvnversion():
425 m = matchoutput('svn --version --quiet 2>&1', br'^(\d+)\.(\d+)')
425 m = matchoutput('svn --version --quiet 2>&1', br'^(\d+)\.(\d+)')
426 if not m:
426 if not m:
427 return (0, 0)
427 return (0, 0)
428 return (int(m.group(1)), int(m.group(2)))
428 return (int(m.group(1)), int(m.group(2)))
429
429
430
430
431 @checkvers("svn", "subversion client and admin tools >= %s", (1.3, 1.5))
431 @checkvers("svn", "subversion client and admin tools >= %s", (1.3, 1.5))
432 def has_svn_range(v):
432 def has_svn_range(v):
433 major, minor = v.split('.')[0:2]
433 major, minor = v.split('.')[0:2]
434 return getsvnversion() >= (int(major), int(minor))
434 return getsvnversion() >= (int(major), int(minor))
435
435
436
436
437 @check("svn", "subversion client and admin tools")
437 @check("svn", "subversion client and admin tools")
438 def has_svn():
438 def has_svn():
439 return matchoutput('svn --version 2>&1', br'^svn, version') and matchoutput(
439 return matchoutput('svn --version 2>&1', br'^svn, version') and matchoutput(
440 'svnadmin --version 2>&1', br'^svnadmin, version'
440 'svnadmin --version 2>&1', br'^svnadmin, version'
441 )
441 )
442
442
443
443
444 @check("svn-bindings", "subversion python bindings")
444 @check("svn-bindings", "subversion python bindings")
445 def has_svn_bindings():
445 def has_svn_bindings():
446 try:
446 try:
447 import svn.core
447 import svn.core
448
448
449 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
449 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
450 if version < (1, 4):
450 if version < (1, 4):
451 return False
451 return False
452 return True
452 return True
453 except ImportError:
453 except ImportError:
454 return False
454 return False
455
455
456
456
457 @check("p4", "Perforce server and client")
457 @check("p4", "Perforce server and client")
458 def has_p4():
458 def has_p4():
459 return matchoutput('p4 -V', br'Rev\. P4/') and matchoutput(
459 return matchoutput('p4 -V', br'Rev\. P4/') and matchoutput(
460 'p4d -V', br'Rev\. P4D/'
460 'p4d -V', br'Rev\. P4D/'
461 )
461 )
462
462
463
463
464 @check("symlink", "symbolic links")
464 @check("symlink", "symbolic links")
465 def has_symlink():
465 def has_symlink():
466 # mercurial.windows.checklink() is a hard 'no' at the moment
466 # mercurial.windows.checklink() is a hard 'no' at the moment
467 if os.name == 'nt' or getattr(os, "symlink", None) is None:
467 if os.name == 'nt' or getattr(os, "symlink", None) is None:
468 return False
468 return False
469 name = tempfile.mktemp(dir='.', prefix=tempprefix)
469 name = tempfile.mktemp(dir='.', prefix=tempprefix)
470 try:
470 try:
471 os.symlink(".", name)
471 os.symlink(".", name)
472 os.unlink(name)
472 os.unlink(name)
473 return True
473 return True
474 except (OSError, AttributeError):
474 except (OSError, AttributeError):
475 return False
475 return False
476
476
477
477
478 @check("hardlink", "hardlinks")
478 @check("hardlink", "hardlinks")
479 def has_hardlink():
479 def has_hardlink():
480 from mercurial import util
480 from mercurial import util
481
481
482 fh, fn = tempfile.mkstemp(dir='.', prefix=tempprefix)
482 fh, fn = tempfile.mkstemp(dir='.', prefix=tempprefix)
483 os.close(fh)
483 os.close(fh)
484 name = tempfile.mktemp(dir='.', prefix=tempprefix)
484 name = tempfile.mktemp(dir='.', prefix=tempprefix)
485 try:
485 try:
486 util.oslink(_sys2bytes(fn), _sys2bytes(name))
486 util.oslink(_sys2bytes(fn), _sys2bytes(name))
487 os.unlink(name)
487 os.unlink(name)
488 return True
488 return True
489 except OSError:
489 except OSError:
490 return False
490 return False
491 finally:
491 finally:
492 os.unlink(fn)
492 os.unlink(fn)
493
493
494
494
495 @check("hardlink-whitelisted", "hardlinks on whitelisted filesystems")
495 @check("hardlink-whitelisted", "hardlinks on whitelisted filesystems")
496 def has_hardlink_whitelisted():
496 def has_hardlink_whitelisted():
497 from mercurial import util
497 from mercurial import util
498
498
499 try:
499 try:
500 fstype = util.getfstype(b'.')
500 fstype = util.getfstype(b'.')
501 except OSError:
501 except OSError:
502 return False
502 return False
503 return fstype in util._hardlinkfswhitelist
503 return fstype in util._hardlinkfswhitelist
504
504
505
505
506 @check("rmcwd", "can remove current working directory")
506 @check("rmcwd", "can remove current working directory")
507 def has_rmcwd():
507 def has_rmcwd():
508 ocwd = os.getcwd()
508 ocwd = os.getcwd()
509 temp = tempfile.mkdtemp(dir='.', prefix=tempprefix)
509 temp = tempfile.mkdtemp(dir='.', prefix=tempprefix)
510 try:
510 try:
511 os.chdir(temp)
511 os.chdir(temp)
512 # On Linux, 'rmdir .' isn't allowed, but the other names are okay.
512 # On Linux, 'rmdir .' isn't allowed, but the other names are okay.
513 # On Solaris and Windows, the cwd can't be removed by any names.
513 # On Solaris and Windows, the cwd can't be removed by any names.
514 os.rmdir(os.getcwd())
514 os.rmdir(os.getcwd())
515 return True
515 return True
516 except OSError:
516 except OSError:
517 return False
517 return False
518 finally:
518 finally:
519 os.chdir(ocwd)
519 os.chdir(ocwd)
520 # clean up temp dir on platforms where cwd can't be removed
520 # clean up temp dir on platforms where cwd can't be removed
521 try:
521 try:
522 os.rmdir(temp)
522 os.rmdir(temp)
523 except OSError:
523 except OSError:
524 pass
524 pass
525
525
526
526
527 @check("tla", "GNU Arch tla client")
527 @check("tla", "GNU Arch tla client")
528 def has_tla():
528 def has_tla():
529 return matchoutput('tla --version 2>&1', br'The GNU Arch Revision')
529 return matchoutput('tla --version 2>&1', br'The GNU Arch Revision')
530
530
531
531
532 @check("gpg", "gpg client")
532 @check("gpg", "gpg client")
533 def has_gpg():
533 def has_gpg():
534 return matchoutput('gpg --version 2>&1', br'GnuPG')
534 return matchoutput('gpg --version 2>&1', br'GnuPG')
535
535
536
536
537 @check("gpg2", "gpg client v2")
537 @check("gpg2", "gpg client v2")
538 def has_gpg2():
538 def has_gpg2():
539 return matchoutput('gpg --version 2>&1', br'GnuPG[^0-9]+2\.')
539 return matchoutput('gpg --version 2>&1', br'GnuPG[^0-9]+2\.')
540
540
541
541
542 @check("gpg21", "gpg client v2.1+")
542 @check("gpg21", "gpg client v2.1+")
543 def has_gpg21():
543 def has_gpg21():
544 return matchoutput('gpg --version 2>&1', br'GnuPG[^0-9]+2\.(?!0)')
544 return matchoutput('gpg --version 2>&1', br'GnuPG[^0-9]+2\.(?!0)')
545
545
546
546
547 @check("unix-permissions", "unix-style permissions")
547 @check("unix-permissions", "unix-style permissions")
548 def has_unix_permissions():
548 def has_unix_permissions():
549 d = tempfile.mkdtemp(dir='.', prefix=tempprefix)
549 d = tempfile.mkdtemp(dir='.', prefix=tempprefix)
550 try:
550 try:
551 fname = os.path.join(d, 'foo')
551 fname = os.path.join(d, 'foo')
552 for umask in (0o77, 0o07, 0o22):
552 for umask in (0o77, 0o07, 0o22):
553 os.umask(umask)
553 os.umask(umask)
554 f = open(fname, 'w')
554 f = open(fname, 'w')
555 f.close()
555 f.close()
556 mode = os.stat(fname).st_mode
556 mode = os.stat(fname).st_mode
557 os.unlink(fname)
557 os.unlink(fname)
558 if mode & 0o777 != ~umask & 0o666:
558 if mode & 0o777 != ~umask & 0o666:
559 return False
559 return False
560 return True
560 return True
561 finally:
561 finally:
562 os.rmdir(d)
562 os.rmdir(d)
563
563
564
564
565 @check("unix-socket", "AF_UNIX socket family")
565 @check("unix-socket", "AF_UNIX socket family")
566 def has_unix_socket():
566 def has_unix_socket():
567 return getattr(socket, 'AF_UNIX', None) is not None
567 return getattr(socket, 'AF_UNIX', None) is not None
568
568
569
569
570 @check("root", "root permissions")
570 @check("root", "root permissions")
571 def has_root():
571 def has_root():
572 return getattr(os, 'geteuid', None) and os.geteuid() == 0
572 return getattr(os, 'geteuid', None) and os.geteuid() == 0
573
573
574
574
575 @check("pyflakes", "Pyflakes python linter")
575 @check("pyflakes", "Pyflakes python linter")
576 def has_pyflakes():
576 def has_pyflakes():
577 try:
577 try:
578 import pyflakes
578 import pyflakes
579
579
580 pyflakes.__version__
580 pyflakes.__version__
581 except ImportError:
581 except ImportError:
582 return False
582 return False
583 else:
583 else:
584 return True
584 return True
585
585
586
586
587 @check("pylint", "Pylint python linter")
587 @check("pylint", "Pylint python linter")
588 def has_pylint():
588 def has_pylint():
589 return matchoutput("pylint --help", br"Usage: pylint", True)
589 return matchoutput("pylint --help", br"Usage: pylint", True)
590
590
591
591
592 @check("clang-format", "clang-format C code formatter")
592 @check("clang-format", "clang-format C code formatter")
593 def has_clang_format():
593 def has_clang_format():
594 m = matchoutput('clang-format --version', br'clang-format version (\d)')
594 m = matchoutput('clang-format --version', br'clang-format version (\d)')
595 # style changed somewhere between 4.x and 6.x
595 # style changed somewhere between 4.x and 6.x
596 return m and int(m.group(1)) >= 6
596 return m and int(m.group(1)) >= 6
597
597
598
598
599 @check("jshint", "JSHint static code analysis tool")
599 @check("jshint", "JSHint static code analysis tool")
600 def has_jshint():
600 def has_jshint():
601 return matchoutput("jshint --version 2>&1", br"jshint v")
601 return matchoutput("jshint --version 2>&1", br"jshint v")
602
602
603
603
604 @check("pygments", "Pygments source highlighting library")
604 @check("pygments", "Pygments source highlighting library")
605 def has_pygments():
605 def has_pygments():
606 try:
606 try:
607 import pygments
607 import pygments
608
608
609 pygments.highlight # silence unused import warning
609 pygments.highlight # silence unused import warning
610 return True
610 return True
611 except ImportError:
611 except ImportError:
612 return False
612 return False
613
613
614
614
615 @check("pygments25", "Pygments version >= 2.5")
615 @check("pygments25", "Pygments version >= 2.5")
616 def pygments25():
616 def pygments25():
617 try:
617 try:
618 import pygments
618 import pygments
619
619
620 v = pygments.__version__
620 v = pygments.__version__
621 except ImportError:
621 except ImportError:
622 return False
622 return False
623
623
624 parts = v.split(".")
624 parts = v.split(".")
625 major = int(parts[0])
625 major = int(parts[0])
626 minor = int(parts[1])
626 minor = int(parts[1])
627
627
628 return (major, minor) >= (2, 5)
628 return (major, minor) >= (2, 5)
629
629
630
630
631 @check("outer-repo", "outer repo")
631 @check("outer-repo", "outer repo")
632 def has_outer_repo():
632 def has_outer_repo():
633 # failing for other reasons than 'no repo' imply that there is a repo
633 # failing for other reasons than 'no repo' imply that there is a repo
634 return not matchoutput('hg root 2>&1', br'abort: no repository found', True)
634 return not matchoutput('hg root 2>&1', br'abort: no repository found', True)
635
635
636
636
637 @check("ssl", "ssl module available")
637 @check("ssl", "ssl module available")
638 def has_ssl():
638 def has_ssl():
639 try:
639 try:
640 import ssl
640 import ssl
641
641
642 ssl.CERT_NONE
642 ssl.CERT_NONE
643 return True
643 return True
644 except ImportError:
644 except ImportError:
645 return False
645 return False
646
646
647
647
648 @check("defaultcacertsloaded", "detected presence of loaded system CA certs")
648 @check("defaultcacertsloaded", "detected presence of loaded system CA certs")
649 def has_defaultcacertsloaded():
649 def has_defaultcacertsloaded():
650 import ssl
650 import ssl
651 from mercurial import sslutil, ui as uimod
651 from mercurial import sslutil, ui as uimod
652
652
653 ui = uimod.ui.load()
653 ui = uimod.ui.load()
654 cafile = sslutil._defaultcacerts(ui)
654 cafile = sslutil._defaultcacerts(ui)
655 ctx = ssl.create_default_context()
655 ctx = ssl.create_default_context()
656 if cafile:
656 if cafile:
657 ctx.load_verify_locations(cafile=cafile)
657 ctx.load_verify_locations(cafile=cafile)
658 else:
658 else:
659 ctx.load_default_certs()
659 ctx.load_default_certs()
660
660
661 return len(ctx.get_ca_certs()) > 0
661 return len(ctx.get_ca_certs()) > 0
662
662
663
663
664 @check("tls1.2", "TLS 1.2 protocol support")
664 @check("tls1.2", "TLS 1.2 protocol support")
665 def has_tls1_2():
665 def has_tls1_2():
666 from mercurial import sslutil
666 from mercurial import sslutil
667
667
668 return b'tls1.2' in sslutil.supportedprotocols
668 return b'tls1.2' in sslutil.supportedprotocols
669
669
670
670
671 @check("windows", "Windows")
671 @check("windows", "Windows")
672 def has_windows():
672 def has_windows():
673 return os.name == 'nt'
673 return os.name == 'nt'
674
674
675
675
676 @check("system-sh", "system() uses sh")
676 @check("system-sh", "system() uses sh")
677 def has_system_sh():
677 def has_system_sh():
678 return os.name != 'nt'
678 return os.name != 'nt'
679
679
680
680
681 @check("serve", "platform and python can manage 'hg serve -d'")
681 @check("serve", "platform and python can manage 'hg serve -d'")
682 def has_serve():
682 def has_serve():
683 return True
683 return True
684
684
685
685
686 @check("setprocname", "whether osutil.setprocname is available or not")
686 @check("setprocname", "whether osutil.setprocname is available or not")
687 def has_setprocname():
687 def has_setprocname():
688 try:
688 try:
689 from mercurial.utils import procutil
689 from mercurial.utils import procutil
690
690
691 procutil.setprocname
691 procutil.setprocname
692 return True
692 return True
693 except AttributeError:
693 except AttributeError:
694 return False
694 return False
695
695
696
696
697 @check("test-repo", "running tests from repository")
697 @check("test-repo", "running tests from repository")
698 def has_test_repo():
698 def has_test_repo():
699 t = os.environ["TESTDIR"]
699 t = os.environ["TESTDIR"]
700 return os.path.isdir(os.path.join(t, "..", ".hg"))
700 return os.path.isdir(os.path.join(t, "..", ".hg"))
701
701
702
702
703 @check("tic", "terminfo compiler and curses module")
703 @check("tic", "terminfo compiler and curses module")
704 def has_tic():
704 def has_tic():
705 try:
705 try:
706 import curses
706 import curses
707
707
708 curses.COLOR_BLUE
708 curses.COLOR_BLUE
709 return matchoutput('test -x "`which tic`"', br'')
709 return matchoutput('test -x "`which tic`"', br'')
710 except (ImportError, AttributeError):
710 except (ImportError, AttributeError):
711 return False
711 return False
712
712
713
713
714 @check("xz", "xz compression utility")
714 @check("xz", "xz compression utility")
715 def has_xz():
715 def has_xz():
716 # When Windows invokes a subprocess in shell mode, it uses `cmd.exe`, which
716 # When Windows invokes a subprocess in shell mode, it uses `cmd.exe`, which
717 # only knows `where`, not `which`. So invoke MSYS shell explicitly.
717 # only knows `where`, not `which`. So invoke MSYS shell explicitly.
718 return matchoutput("sh -c 'test -x \"`which xz`\"'", b'')
718 return matchoutput("sh -c 'test -x \"`which xz`\"'", b'')
719
719
720
720
721 @check("msys", "Windows with MSYS")
721 @check("msys", "Windows with MSYS")
722 def has_msys():
722 def has_msys():
723 return os.getenv('MSYSTEM')
723 return os.getenv('MSYSTEM')
724
724
725
725
726 @check("aix", "AIX")
726 @check("aix", "AIX")
727 def has_aix():
727 def has_aix():
728 return sys.platform.startswith("aix")
728 return sys.platform.startswith("aix")
729
729
730
730
731 @check("osx", "OS X")
731 @check("osx", "OS X")
732 def has_osx():
732 def has_osx():
733 return sys.platform == 'darwin'
733 return sys.platform == 'darwin'
734
734
735
735
736 @check("osxpackaging", "OS X packaging tools")
736 @check("osxpackaging", "OS X packaging tools")
737 def has_osxpackaging():
737 def has_osxpackaging():
738 try:
738 try:
739 return (
739 return (
740 matchoutput('pkgbuild', br'Usage: pkgbuild ', ignorestatus=1)
740 matchoutput('pkgbuild', br'Usage: pkgbuild ', ignorestatus=1)
741 and matchoutput(
741 and matchoutput(
742 'productbuild', br'Usage: productbuild ', ignorestatus=1
742 'productbuild', br'Usage: productbuild ', ignorestatus=1
743 )
743 )
744 and matchoutput('lsbom', br'Usage: lsbom', ignorestatus=1)
744 and matchoutput('lsbom', br'Usage: lsbom', ignorestatus=1)
745 and matchoutput('xar --help', br'Usage: xar', ignorestatus=1)
745 and matchoutput('xar --help', br'Usage: xar', ignorestatus=1)
746 )
746 )
747 except ImportError:
747 except ImportError:
748 return False
748 return False
749
749
750
750
751 @check('linuxormacos', 'Linux or MacOS')
751 @check('linuxormacos', 'Linux or MacOS')
752 def has_linuxormacos():
752 def has_linuxormacos():
753 # This isn't a perfect test for MacOS. But it is sufficient for our needs.
753 # This isn't a perfect test for MacOS. But it is sufficient for our needs.
754 return sys.platform.startswith(('linux', 'darwin'))
754 return sys.platform.startswith(('linux', 'darwin'))
755
755
756
756
757 @check("docker", "docker support")
757 @check("docker", "docker support")
758 def has_docker():
758 def has_docker():
759 pat = br'A self-sufficient runtime for'
759 pat = br'A self-sufficient runtime for'
760 if matchoutput('docker --help', pat):
760 if matchoutput('docker --help', pat):
761 if 'linux' not in sys.platform:
761 if 'linux' not in sys.platform:
762 # TODO: in theory we should be able to test docker-based
762 # TODO: in theory we should be able to test docker-based
763 # package creation on non-linux using boot2docker, but in
763 # package creation on non-linux using boot2docker, but in
764 # practice that requires extra coordination to make sure
764 # practice that requires extra coordination to make sure
765 # $TESTTEMP is going to be visible at the same path to the
765 # $TESTTEMP is going to be visible at the same path to the
766 # boot2docker VM. If we figure out how to verify that, we
766 # boot2docker VM. If we figure out how to verify that, we
767 # can use the following instead of just saying False:
767 # can use the following instead of just saying False:
768 # return 'DOCKER_HOST' in os.environ
768 # return 'DOCKER_HOST' in os.environ
769 return False
769 return False
770
770
771 return True
771 return True
772 return False
772 return False
773
773
774
774
775 @check("debhelper", "debian packaging tools")
775 @check("debhelper", "debian packaging tools")
776 def has_debhelper():
776 def has_debhelper():
777 # Some versions of dpkg say `dpkg', some say 'dpkg' (` vs ' on the first
777 # Some versions of dpkg say `dpkg', some say 'dpkg' (` vs ' on the first
778 # quote), so just accept anything in that spot.
778 # quote), so just accept anything in that spot.
779 dpkg = matchoutput(
779 dpkg = matchoutput(
780 'dpkg --version', br"Debian .dpkg' package management program"
780 'dpkg --version', br"Debian .dpkg' package management program"
781 )
781 )
782 dh = matchoutput(
782 dh = matchoutput(
783 'dh --help', br'dh is a part of debhelper.', ignorestatus=True
783 'dh --help', br'dh is a part of debhelper.', ignorestatus=True
784 )
784 )
785 dh_py2 = matchoutput(
785 dh_py2 = matchoutput(
786 'dh_python2 --help', br'other supported Python versions'
786 'dh_python2 --help', br'other supported Python versions'
787 )
787 )
788 # debuild comes from the 'devscripts' package, though you might want
788 # debuild comes from the 'devscripts' package, though you might want
789 # the 'build-debs' package instead, which has a dependency on devscripts.
789 # the 'build-debs' package instead, which has a dependency on devscripts.
790 debuild = matchoutput(
790 debuild = matchoutput(
791 'debuild --help', br'to run debian/rules with given parameter'
791 'debuild --help', br'to run debian/rules with given parameter'
792 )
792 )
793 return dpkg and dh and dh_py2 and debuild
793 return dpkg and dh and dh_py2 and debuild
794
794
795
795
796 @check(
796 @check(
797 "debdeps", "debian build dependencies (run dpkg-checkbuilddeps in contrib/)"
797 "debdeps", "debian build dependencies (run dpkg-checkbuilddeps in contrib/)"
798 )
798 )
799 def has_debdeps():
799 def has_debdeps():
800 # just check exit status (ignoring output)
800 # just check exit status (ignoring output)
801 path = '%s/../contrib/packaging/debian/control' % os.environ['TESTDIR']
801 path = '%s/../contrib/packaging/debian/control' % os.environ['TESTDIR']
802 return matchoutput('dpkg-checkbuilddeps %s' % path, br'')
802 return matchoutput('dpkg-checkbuilddeps %s' % path, br'')
803
803
804
804
805 @check("demandimport", "demandimport enabled")
805 @check("demandimport", "demandimport enabled")
806 def has_demandimport():
806 def has_demandimport():
807 # chg disables demandimport intentionally for performance wins.
807 # chg disables demandimport intentionally for performance wins.
808 return (not has_chg()) and os.environ.get('HGDEMANDIMPORT') != 'disable'
808 return (not has_chg()) and os.environ.get('HGDEMANDIMPORT') != 'disable'
809
809
810
810
811 # Add "py27", "py35", ... as possible feature checks. Note that there's no
811 # Add "py27", "py35", ... as possible feature checks. Note that there's no
812 # punctuation here.
812 # punctuation here.
813 @checkvers("py", "Python >= %s", (2.7, 3.5, 3.6, 3.7, 3.8, 3.9))
813 @checkvers("py", "Python >= %s", (2.7, 3.5, 3.6, 3.7, 3.8, 3.9))
814 def has_python_range(v):
814 def has_python_range(v):
815 major, minor = v.split('.')[0:2]
815 major, minor = v.split('.')[0:2]
816 py_major, py_minor = sys.version_info.major, sys.version_info.minor
816 py_major, py_minor = sys.version_info.major, sys.version_info.minor
817
817
818 return (py_major, py_minor) >= (int(major), int(minor))
818 return (py_major, py_minor) >= (int(major), int(minor))
819
819
820
820
821 @check("py3", "running with Python 3.x")
821 @check("py3", "running with Python 3.x")
822 def has_py3():
822 def has_py3():
823 return 3 == sys.version_info[0]
823 return 3 == sys.version_info[0]
824
824
825
825
826 @check("py3exe", "a Python 3.x interpreter is available")
826 @check("py3exe", "a Python 3.x interpreter is available")
827 def has_python3exe():
827 def has_python3exe():
828 return matchoutput('python3 -V', br'^Python 3.(5|6|7|8|9)')
828 return matchoutput('python3 -V', br'^Python 3.(5|6|7|8|9)')
829
829
830
830
831 @check("pure", "running with pure Python code")
831 @check("pure", "running with pure Python code")
832 def has_pure():
832 def has_pure():
833 return any(
833 return any(
834 [
834 [
835 os.environ.get("HGMODULEPOLICY") == "py",
835 os.environ.get("HGMODULEPOLICY") == "py",
836 os.environ.get("HGTEST_RUN_TESTS_PURE") == "--pure",
836 os.environ.get("HGTEST_RUN_TESTS_PURE") == "--pure",
837 ]
837 ]
838 )
838 )
839
839
840
840
841 @check("slow", "allow slow tests (use --allow-slow-tests)")
841 @check("slow", "allow slow tests (use --allow-slow-tests)")
842 def has_slow():
842 def has_slow():
843 return os.environ.get('HGTEST_SLOW') == 'slow'
843 return os.environ.get('HGTEST_SLOW') == 'slow'
844
844
845
845
846 @check("hypothesis", "Hypothesis automated test generation")
846 @check("hypothesis", "Hypothesis automated test generation")
847 def has_hypothesis():
847 def has_hypothesis():
848 try:
848 try:
849 import hypothesis
849 import hypothesis
850
850
851 hypothesis.given
851 hypothesis.given
852 return True
852 return True
853 except ImportError:
853 except ImportError:
854 return False
854 return False
855
855
856
856
857 @check("unziplinks", "unzip(1) understands and extracts symlinks")
857 @check("unziplinks", "unzip(1) understands and extracts symlinks")
858 def unzip_understands_symlinks():
858 def unzip_understands_symlinks():
859 return matchoutput('unzip --help', br'Info-ZIP')
859 return matchoutput('unzip --help', br'Info-ZIP')
860
860
861
861
862 @check("zstd", "zstd Python module available")
862 @check("zstd", "zstd Python module available")
863 def has_zstd():
863 def has_zstd():
864 try:
864 try:
865 import mercurial.zstd
865 import mercurial.zstd
866
866
867 mercurial.zstd.__version__
867 mercurial.zstd.__version__
868 return True
868 return True
869 except ImportError:
869 except ImportError:
870 return False
870 return False
871
871
872
872
873 @check("devfull", "/dev/full special file")
873 @check("devfull", "/dev/full special file")
874 def has_dev_full():
874 def has_dev_full():
875 return os.path.exists('/dev/full')
875 return os.path.exists('/dev/full')
876
876
877
877
878 @check("ensurepip", "ensurepip module")
878 @check("ensurepip", "ensurepip module")
879 def has_ensurepip():
879 def has_ensurepip():
880 try:
880 try:
881 import ensurepip
881 import ensurepip
882
882
883 ensurepip.bootstrap
883 ensurepip.bootstrap
884 return True
884 return True
885 except ImportError:
885 except ImportError:
886 return False
886 return False
887
887
888
888
889 @check("virtualenv", "Python virtualenv support")
889 @check("virtualenv", "Python virtualenv support")
890 def has_virtualenv():
890 def has_virtualenv():
891 try:
891 try:
892 import virtualenv
892 import virtualenv
893
893
894 virtualenv.ACTIVATE_SH
894 virtualenv.ACTIVATE_SH
895 return True
895 return True
896 except ImportError:
896 except ImportError:
897 return False
897 return False
898
898
899
899
900 @check("fsmonitor", "running tests with fsmonitor")
900 @check("fsmonitor", "running tests with fsmonitor")
901 def has_fsmonitor():
901 def has_fsmonitor():
902 return 'HGFSMONITOR_TESTS' in os.environ
902 return 'HGFSMONITOR_TESTS' in os.environ
903
903
904
904
905 @check("fuzzywuzzy", "Fuzzy string matching library")
905 @check("fuzzywuzzy", "Fuzzy string matching library")
906 def has_fuzzywuzzy():
906 def has_fuzzywuzzy():
907 try:
907 try:
908 import fuzzywuzzy
908 import fuzzywuzzy
909
909
910 fuzzywuzzy.__version__
910 fuzzywuzzy.__version__
911 return True
911 return True
912 except ImportError:
912 except ImportError:
913 return False
913 return False
914
914
915
915
916 @check("clang-libfuzzer", "clang new enough to include libfuzzer")
916 @check("clang-libfuzzer", "clang new enough to include libfuzzer")
917 def has_clang_libfuzzer():
917 def has_clang_libfuzzer():
918 mat = matchoutput('clang --version', br'clang version (\d)')
918 mat = matchoutput('clang --version', br'clang version (\d)')
919 if mat:
919 if mat:
920 # libfuzzer is new in clang 6
920 # libfuzzer is new in clang 6
921 return int(mat.group(1)) > 5
921 return int(mat.group(1)) > 5
922 return False
922 return False
923
923
924
924
925 @check("clang-6.0", "clang 6.0 with version suffix (libfuzzer included)")
925 @check("clang-6.0", "clang 6.0 with version suffix (libfuzzer included)")
926 def has_clang60():
926 def has_clang60():
927 return matchoutput('clang-6.0 --version', br'clang version 6\.')
927 return matchoutput('clang-6.0 --version', br'clang version 6\.')
928
928
929
929
930 @check("xdiff", "xdiff algorithm")
930 @check("xdiff", "xdiff algorithm")
931 def has_xdiff():
931 def has_xdiff():
932 try:
932 try:
933 from mercurial import policy
933 from mercurial import policy
934
934
935 bdiff = policy.importmod('bdiff')
935 bdiff = policy.importmod('bdiff')
936 return bdiff.xdiffblocks(b'', b'') == [(0, 0, 0, 0)]
936 return bdiff.xdiffblocks(b'', b'') == [(0, 0, 0, 0)]
937 except (ImportError, AttributeError):
937 except (ImportError, AttributeError):
938 return False
938 return False
939
939
940
940
941 @check('extraextensions', 'whether tests are running with extra extensions')
941 @check('extraextensions', 'whether tests are running with extra extensions')
942 def has_extraextensions():
942 def has_extraextensions():
943 return 'HGTESTEXTRAEXTENSIONS' in os.environ
943 return 'HGTESTEXTRAEXTENSIONS' in os.environ
944
944
945
945
946 def getrepofeatures():
946 def getrepofeatures():
947 """Obtain set of repository features in use.
947 """Obtain set of repository features in use.
948
948
949 HGREPOFEATURES can be used to define or remove features. It contains
949 HGREPOFEATURES can be used to define or remove features. It contains
950 a space-delimited list of feature strings. Strings beginning with ``-``
950 a space-delimited list of feature strings. Strings beginning with ``-``
951 mean to remove.
951 mean to remove.
952 """
952 """
953 # Default list provided by core.
953 # Default list provided by core.
954 features = {
954 features = {
955 'bundlerepo',
955 'bundlerepo',
956 'revlogstore',
956 'revlogstore',
957 'fncache',
957 'fncache',
958 }
958 }
959
959
960 # Features that imply other features.
960 # Features that imply other features.
961 implies = {
961 implies = {
962 'simplestore': ['-revlogstore', '-bundlerepo', '-fncache'],
962 'simplestore': ['-revlogstore', '-bundlerepo', '-fncache'],
963 }
963 }
964
964
965 for override in os.environ.get('HGREPOFEATURES', '').split(' '):
965 for override in os.environ.get('HGREPOFEATURES', '').split(' '):
966 if not override:
966 if not override:
967 continue
967 continue
968
968
969 if override.startswith('-'):
969 if override.startswith('-'):
970 if override[1:] in features:
970 if override[1:] in features:
971 features.remove(override[1:])
971 features.remove(override[1:])
972 else:
972 else:
973 features.add(override)
973 features.add(override)
974
974
975 for imply in implies.get(override, []):
975 for imply in implies.get(override, []):
976 if imply.startswith('-'):
976 if imply.startswith('-'):
977 if imply[1:] in features:
977 if imply[1:] in features:
978 features.remove(imply[1:])
978 features.remove(imply[1:])
979 else:
979 else:
980 features.add(imply)
980 features.add(imply)
981
981
982 return features
982 return features
983
983
984
984
985 @check('reporevlogstore', 'repository using the default revlog store')
985 @check('reporevlogstore', 'repository using the default revlog store')
986 def has_reporevlogstore():
986 def has_reporevlogstore():
987 return 'revlogstore' in getrepofeatures()
987 return 'revlogstore' in getrepofeatures()
988
988
989
989
990 @check('reposimplestore', 'repository using simple storage extension')
990 @check('reposimplestore', 'repository using simple storage extension')
991 def has_reposimplestore():
991 def has_reposimplestore():
992 return 'simplestore' in getrepofeatures()
992 return 'simplestore' in getrepofeatures()
993
993
994
994
995 @check('repobundlerepo', 'whether we can open bundle files as repos')
995 @check('repobundlerepo', 'whether we can open bundle files as repos')
996 def has_repobundlerepo():
996 def has_repobundlerepo():
997 return 'bundlerepo' in getrepofeatures()
997 return 'bundlerepo' in getrepofeatures()
998
998
999
999
1000 @check('repofncache', 'repository has an fncache')
1000 @check('repofncache', 'repository has an fncache')
1001 def has_repofncache():
1001 def has_repofncache():
1002 return 'fncache' in getrepofeatures()
1002 return 'fncache' in getrepofeatures()
1003
1003
1004
1004
1005 @check('sqlite', 'sqlite3 module is available')
1005 @check('sqlite', 'sqlite3 module is available')
1006 def has_sqlite():
1006 def has_sqlite():
1007 try:
1007 try:
1008 import sqlite3
1008 import sqlite3
1009
1009
1010 version = sqlite3.sqlite_version_info
1010 version = sqlite3.sqlite_version_info
1011 except ImportError:
1011 except ImportError:
1012 return False
1012 return False
1013
1013
1014 if version < (3, 8, 3):
1014 if version < (3, 8, 3):
1015 # WITH clause not supported
1015 # WITH clause not supported
1016 return False
1016 return False
1017
1017
1018 return matchoutput('sqlite3 -version', br'^3\.\d+')
1018 return matchoutput('sqlite3 -version', br'^3\.\d+')
1019
1019
1020
1020
1021 @check('vcr', 'vcr http mocking library')
1021 @check('vcr', 'vcr http mocking library')
1022 def has_vcr():
1022 def has_vcr():
1023 try:
1023 try:
1024 import vcr
1024 import vcr
1025
1025
1026 vcr.VCR
1026 vcr.VCR
1027 return True
1027 return True
1028 except (ImportError, AttributeError):
1028 except (ImportError, AttributeError):
1029 pass
1029 pass
1030 return False
1030 return False
1031
1031
1032
1032
1033 @check('emacs', 'GNU Emacs')
1033 @check('emacs', 'GNU Emacs')
1034 def has_emacs():
1034 def has_emacs():
1035 # Our emacs lisp uses `with-eval-after-load` which is new in emacs
1035 # Our emacs lisp uses `with-eval-after-load` which is new in emacs
1036 # 24.4, so we allow emacs 24.4, 24.5, and 25+ (24.5 was the last
1036 # 24.4, so we allow emacs 24.4, 24.5, and 25+ (24.5 was the last
1037 # 24 release)
1037 # 24 release)
1038 return matchoutput('emacs --version', b'GNU Emacs 2(4.4|4.5|5|6|7|8|9)')
1038 return matchoutput('emacs --version', b'GNU Emacs 2(4.4|4.5|5|6|7|8|9)')
1039
1039
1040
1040
1041 @check('black', 'the black formatter for python')
1041 @check('black', 'the black formatter for python')
1042 def has_black():
1042 def has_black():
1043 blackcmd = 'black --version'
1043 blackcmd = 'black --version'
1044 version_regex = b'black, version ([0-9a-b.]+)'
1044 version_regex = b'black, version ([0-9a-b.]+)'
1045 version = matchoutput(blackcmd, version_regex)
1045 version = matchoutput(blackcmd, version_regex)
1046 sv = distutils.version.StrictVersion
1046 sv = distutils.version.StrictVersion
1047 return version and sv(_bytes2sys(version.group(1))) >= sv('19.10b0')
1047 return version and sv(_bytes2sys(version.group(1))) >= sv('19.10b0')
1048
1048
1049
1049
1050 @check('pytype', 'the pytype type checker')
1050 @check('pytype', 'the pytype type checker')
1051 def has_pytype():
1051 def has_pytype():
1052 pytypecmd = 'pytype --version'
1052 pytypecmd = 'pytype --version'
1053 version = matchoutput(pytypecmd, b'[0-9a-b.]+')
1053 version = matchoutput(pytypecmd, b'[0-9a-b.]+')
1054 sv = distutils.version.StrictVersion
1054 sv = distutils.version.StrictVersion
1055 return version and sv(_bytes2sys(version.group(0))) >= sv('2019.10.17')
1055 return version and sv(_bytes2sys(version.group(0))) >= sv('2019.10.17')
1056
1056
1057
1057
1058 @check("rustfmt", "rustfmt tool")
1058 @check("rustfmt", "rustfmt tool")
1059 def has_rustfmt():
1059 def has_rustfmt():
1060 # We use Nightly's rustfmt due to current unstable config options.
1060 # We use Nightly's rustfmt due to current unstable config options.
1061 return matchoutput(
1061 return matchoutput(
1062 '`rustup which --toolchain nightly rustfmt` --version', b'rustfmt'
1062 '`rustup which --toolchain nightly rustfmt` --version', b'rustfmt'
1063 )
1063 )
1064
1065
1066 @check("lzma", "python lzma module")
1067 def has_lzma():
1068 try:
1069 import _lzma
1070
1071 _lzma.FORMAT_XZ
1072 return True
1073 except ImportError:
1074 return False
@@ -1,630 +1,636 b''
1 #require serve
1 #require serve
2
2
3 $ hg init test
3 $ hg init test
4 $ cd test
4 $ cd test
5 $ echo foo>foo
5 $ echo foo>foo
6 $ hg commit -Am 1 -d '1 0'
6 $ hg commit -Am 1 -d '1 0'
7 adding foo
7 adding foo
8 $ echo bar>bar
8 $ echo bar>bar
9 $ hg commit -Am 2 -d '2 0'
9 $ hg commit -Am 2 -d '2 0'
10 adding bar
10 adding bar
11 $ mkdir baz
11 $ mkdir baz
12 $ echo bletch>baz/bletch
12 $ echo bletch>baz/bletch
13 $ hg commit -Am 3 -d '1000000000 0'
13 $ hg commit -Am 3 -d '1000000000 0'
14 adding baz/bletch
14 adding baz/bletch
15 $ hg init subrepo
15 $ hg init subrepo
16 $ touch subrepo/sub
16 $ touch subrepo/sub
17 $ hg -q -R subrepo ci -Am "init subrepo"
17 $ hg -q -R subrepo ci -Am "init subrepo"
18 $ echo "subrepo = subrepo" > .hgsub
18 $ echo "subrepo = subrepo" > .hgsub
19 $ hg add .hgsub
19 $ hg add .hgsub
20 $ hg ci -m "add subrepo"
20 $ hg ci -m "add subrepo"
21
21
22 $ cat >> $HGRCPATH <<EOF
22 $ cat >> $HGRCPATH <<EOF
23 > [extensions]
23 > [extensions]
24 > share =
24 > share =
25 > EOF
25 > EOF
26
26
27 hg subrepos are shared when the parent repo is shared
27 hg subrepos are shared when the parent repo is shared
28
28
29 $ cd ..
29 $ cd ..
30 $ hg share test shared1
30 $ hg share test shared1
31 updating working directory
31 updating working directory
32 sharing subrepo subrepo from $TESTTMP/test/subrepo
32 sharing subrepo subrepo from $TESTTMP/test/subrepo
33 5 files updated, 0 files merged, 0 files removed, 0 files unresolved
33 5 files updated, 0 files merged, 0 files removed, 0 files unresolved
34 $ cat shared1/subrepo/.hg/sharedpath
34 $ cat shared1/subrepo/.hg/sharedpath
35 $TESTTMP/test/subrepo/.hg (no-eol)
35 $TESTTMP/test/subrepo/.hg (no-eol)
36
36
37 hg subrepos are shared into existence on demand if the parent was shared
37 hg subrepos are shared into existence on demand if the parent was shared
38
38
39 $ hg clone -qr 1 test clone1
39 $ hg clone -qr 1 test clone1
40 $ hg share clone1 share2
40 $ hg share clone1 share2
41 updating working directory
41 updating working directory
42 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
42 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
43 $ hg -R clone1 -q pull
43 $ hg -R clone1 -q pull
44 $ hg -R share2 update tip
44 $ hg -R share2 update tip
45 sharing subrepo subrepo from $TESTTMP/test/subrepo
45 sharing subrepo subrepo from $TESTTMP/test/subrepo
46 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
46 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
47 $ cat share2/subrepo/.hg/sharedpath
47 $ cat share2/subrepo/.hg/sharedpath
48 $TESTTMP/test/subrepo/.hg (no-eol)
48 $TESTTMP/test/subrepo/.hg (no-eol)
49 $ echo 'mod' > share2/subrepo/sub
49 $ echo 'mod' > share2/subrepo/sub
50 $ hg -R share2 ci -Sqm 'subrepo mod'
50 $ hg -R share2 ci -Sqm 'subrepo mod'
51 $ hg -R clone1 update -C tip
51 $ hg -R clone1 update -C tip
52 cloning subrepo subrepo from $TESTTMP/test/subrepo
52 cloning subrepo subrepo from $TESTTMP/test/subrepo
53 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
53 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
54 $ find share2 | egrep 'sharedpath|00.+\.i' | sort
54 $ find share2 | egrep 'sharedpath|00.+\.i' | sort
55 share2/.hg/sharedpath
55 share2/.hg/sharedpath
56 share2/subrepo/.hg/sharedpath
56 share2/subrepo/.hg/sharedpath
57 $ hg -R share2 unshare
57 $ hg -R share2 unshare
58 unsharing subrepo 'subrepo'
58 unsharing subrepo 'subrepo'
59 $ find share2 | egrep 'sharedpath|00.+\.i' | sort
59 $ find share2 | egrep 'sharedpath|00.+\.i' | sort
60 share2/.hg/00changelog.i
60 share2/.hg/00changelog.i
61 share2/.hg/sharedpath.old
61 share2/.hg/sharedpath.old
62 share2/.hg/store/00changelog.i
62 share2/.hg/store/00changelog.i
63 share2/.hg/store/00manifest.i
63 share2/.hg/store/00manifest.i
64 share2/subrepo/.hg/00changelog.i
64 share2/subrepo/.hg/00changelog.i
65 share2/subrepo/.hg/sharedpath.old
65 share2/subrepo/.hg/sharedpath.old
66 share2/subrepo/.hg/store/00changelog.i
66 share2/subrepo/.hg/store/00changelog.i
67 share2/subrepo/.hg/store/00manifest.i
67 share2/subrepo/.hg/store/00manifest.i
68 $ hg -R share2/subrepo log -r tip -T compact
68 $ hg -R share2/subrepo log -r tip -T compact
69 1[tip] 559dcc9bfa65 1970-01-01 00:00 +0000 test
69 1[tip] 559dcc9bfa65 1970-01-01 00:00 +0000 test
70 subrepo mod
70 subrepo mod
71
71
72 $ rm -rf clone1
72 $ rm -rf clone1
73
73
74 $ hg clone -qr 1 test clone1
74 $ hg clone -qr 1 test clone1
75 $ hg share clone1 shared3
75 $ hg share clone1 shared3
76 updating working directory
76 updating working directory
77 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
77 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
78 $ hg -R clone1 -q pull
78 $ hg -R clone1 -q pull
79 $ hg -R shared3 archive --config ui.archivemeta=False -r tip -S archive
79 $ hg -R shared3 archive --config ui.archivemeta=False -r tip -S archive
80 sharing subrepo subrepo from $TESTTMP/test/subrepo
80 sharing subrepo subrepo from $TESTTMP/test/subrepo
81 $ cat shared3/subrepo/.hg/sharedpath
81 $ cat shared3/subrepo/.hg/sharedpath
82 $TESTTMP/test/subrepo/.hg (no-eol)
82 $TESTTMP/test/subrepo/.hg (no-eol)
83 $ diff -r archive test
83 $ diff -r archive test
84 Only in test: .hg
84 Only in test: .hg
85 Common subdirectories: archive/baz and test/baz (?)
85 Common subdirectories: archive/baz and test/baz (?)
86 Common subdirectories: archive/subrepo and test/subrepo (?)
86 Common subdirectories: archive/subrepo and test/subrepo (?)
87 Only in test/subrepo: .hg
87 Only in test/subrepo: .hg
88 [1]
88 [1]
89 $ rm -rf archive
89 $ rm -rf archive
90
90
91 $ cd test
91 $ cd test
92 $ echo "[web]" >> .hg/hgrc
92 $ echo "[web]" >> .hg/hgrc
93 $ echo "name = test-archive" >> .hg/hgrc
93 $ echo "name = test-archive" >> .hg/hgrc
94 $ echo "archivesubrepos = True" >> .hg/hgrc
94 $ echo "archivesubrepos = True" >> .hg/hgrc
95 $ cp .hg/hgrc .hg/hgrc-base
95 $ cp .hg/hgrc .hg/hgrc-base
96 > test_archtype() {
96 > test_archtype() {
97 > echo "allow-archive = $1" >> .hg/hgrc
97 > echo "allow-archive = $1" >> .hg/hgrc
98 > test_archtype_run "$@"
98 > test_archtype_run "$@"
99 > }
99 > }
100 > test_archtype_deprecated() {
100 > test_archtype_deprecated() {
101 > echo "allow$1 = True" >> .hg/hgrc
101 > echo "allow$1 = True" >> .hg/hgrc
102 > test_archtype_run "$@"
102 > test_archtype_run "$@"
103 > }
103 > }
104 > test_archtype_run() {
104 > test_archtype_run() {
105 > hg serve -p $HGPORT -d --pid-file=hg.pid -E errors.log \
105 > hg serve -p $HGPORT -d --pid-file=hg.pid -E errors.log \
106 > --config extensions.blackbox= --config blackbox.track=develwarn
106 > --config extensions.blackbox= --config blackbox.track=develwarn
107 > cat hg.pid >> $DAEMON_PIDS
107 > cat hg.pid >> $DAEMON_PIDS
108 > echo % $1 allowed should give 200
108 > echo % $1 allowed should give 200
109 > get-with-headers.py --bodyfile body localhost:$HGPORT "archive/tip.$2" -
109 > get-with-headers.py --bodyfile body localhost:$HGPORT "archive/tip.$2" -
110 > f --size --sha1 body
110 > f --size --sha1 body
111 > echo % $3 and $4 disallowed should both give 403
111 > echo % $3 and $4 disallowed should both give 403
112 > get-with-headers.py --bodyfile body localhost:$HGPORT "archive/tip.$3" -
112 > get-with-headers.py --bodyfile body localhost:$HGPORT "archive/tip.$3" -
113 > f --size --sha1 body
113 > f --size --sha1 body
114 > get-with-headers.py --bodyfile body localhost:$HGPORT "archive/tip.$4" -
114 > get-with-headers.py --bodyfile body localhost:$HGPORT "archive/tip.$4" -
115 > f --size --sha1 body
115 > f --size --sha1 body
116 > killdaemons.py
116 > killdaemons.py
117 > cat errors.log
117 > cat errors.log
118 > hg blackbox --config extensions.blackbox= --config blackbox.track=
118 > hg blackbox --config extensions.blackbox= --config blackbox.track=
119 > cp .hg/hgrc-base .hg/hgrc
119 > cp .hg/hgrc-base .hg/hgrc
120 > }
120 > }
121
121
122 check http return codes
122 check http return codes
123
123
124 $ test_archtype gz tar.gz tar.bz2 zip
124 $ test_archtype gz tar.gz tar.bz2 zip
125 % gz allowed should give 200
125 % gz allowed should give 200
126 200 Script output follows
126 200 Script output follows
127 content-disposition: attachment; filename=test-archive-1701ef1f1510.tar.gz
127 content-disposition: attachment; filename=test-archive-1701ef1f1510.tar.gz
128 content-type: application/x-gzip
128 content-type: application/x-gzip
129 date: $HTTP_DATE$
129 date: $HTTP_DATE$
130 etag: W/"*" (glob)
130 etag: W/"*" (glob)
131 server: testing stub value
131 server: testing stub value
132 transfer-encoding: chunked
132 transfer-encoding: chunked
133
133
134 body: size=408, sha1=8fa06531bddecc365a9f5edb0f88b65974bfe505 (no-py38 !)
134 body: size=408, sha1=8fa06531bddecc365a9f5edb0f88b65974bfe505 (no-py38 !)
135 body: size=506, sha1=70926a04cb8887d0bcccf5380488100a10222def (py38 no-py39 !)
135 body: size=506, sha1=70926a04cb8887d0bcccf5380488100a10222def (py38 no-py39 !)
136 body: size=505, sha1=eb823c293bedff0df4070b854e2c5cbb06d6ec62 (py39 !)
136 body: size=505, sha1=eb823c293bedff0df4070b854e2c5cbb06d6ec62 (py39 !)
137 % tar.bz2 and zip disallowed should both give 403
137 % tar.bz2 and zip disallowed should both give 403
138 403 Archive type not allowed: bz2
138 403 Archive type not allowed: bz2
139 content-type: text/html; charset=ascii
139 content-type: text/html; charset=ascii
140 date: $HTTP_DATE$
140 date: $HTTP_DATE$
141 etag: W/"*" (glob)
141 etag: W/"*" (glob)
142 server: testing stub value
142 server: testing stub value
143 transfer-encoding: chunked
143 transfer-encoding: chunked
144
144
145 body: size=1451, sha1=4c5cf0f574446c44feb7f88f4e0e2a56bd92c352
145 body: size=1451, sha1=4c5cf0f574446c44feb7f88f4e0e2a56bd92c352
146 403 Archive type not allowed: zip
146 403 Archive type not allowed: zip
147 content-type: text/html; charset=ascii
147 content-type: text/html; charset=ascii
148 date: $HTTP_DATE$
148 date: $HTTP_DATE$
149 etag: W/"*" (glob)
149 etag: W/"*" (glob)
150 server: testing stub value
150 server: testing stub value
151 transfer-encoding: chunked
151 transfer-encoding: chunked
152
152
153 body: size=1451, sha1=cbfa5574b337348bfd0564cc534474d002e7d6c7
153 body: size=1451, sha1=cbfa5574b337348bfd0564cc534474d002e7d6c7
154 $ test_archtype bz2 tar.bz2 zip tar.gz
154 $ test_archtype bz2 tar.bz2 zip tar.gz
155 % bz2 allowed should give 200
155 % bz2 allowed should give 200
156 200 Script output follows
156 200 Script output follows
157 content-disposition: attachment; filename=test-archive-1701ef1f1510.tar.bz2
157 content-disposition: attachment; filename=test-archive-1701ef1f1510.tar.bz2
158 content-type: application/x-bzip2
158 content-type: application/x-bzip2
159 date: $HTTP_DATE$
159 date: $HTTP_DATE$
160 etag: W/"*" (glob)
160 etag: W/"*" (glob)
161 server: testing stub value
161 server: testing stub value
162 transfer-encoding: chunked
162 transfer-encoding: chunked
163
163
164 body: size=426, sha1=8d87f5aba6e14f1bfea6c232985982c278b2fb0b (no-py38 !)
164 body: size=426, sha1=8d87f5aba6e14f1bfea6c232985982c278b2fb0b (no-py38 !)
165 body: size=506, sha1=1bd1f8e8d3701704bd4385038bd9c09b81c77f4e (py38 no-py39 !)
165 body: size=506, sha1=1bd1f8e8d3701704bd4385038bd9c09b81c77f4e (py38 no-py39 !)
166 body: size=503, sha1=2d8ce8bb3816603b9683a1804a5a02c11224cb01 (py39 !)
166 body: size=503, sha1=2d8ce8bb3816603b9683a1804a5a02c11224cb01 (py39 !)
167 % zip and tar.gz disallowed should both give 403
167 % zip and tar.gz disallowed should both give 403
168 403 Archive type not allowed: zip
168 403 Archive type not allowed: zip
169 content-type: text/html; charset=ascii
169 content-type: text/html; charset=ascii
170 date: $HTTP_DATE$
170 date: $HTTP_DATE$
171 etag: W/"*" (glob)
171 etag: W/"*" (glob)
172 server: testing stub value
172 server: testing stub value
173 transfer-encoding: chunked
173 transfer-encoding: chunked
174
174
175 body: size=1451, sha1=cbfa5574b337348bfd0564cc534474d002e7d6c7
175 body: size=1451, sha1=cbfa5574b337348bfd0564cc534474d002e7d6c7
176 403 Archive type not allowed: gz
176 403 Archive type not allowed: gz
177 content-type: text/html; charset=ascii
177 content-type: text/html; charset=ascii
178 date: $HTTP_DATE$
178 date: $HTTP_DATE$
179 etag: W/"*" (glob)
179 etag: W/"*" (glob)
180 server: testing stub value
180 server: testing stub value
181 transfer-encoding: chunked
181 transfer-encoding: chunked
182
182
183 body: size=1450, sha1=71f0b12d59f85fdcfe8ff493e2dc66863f2f7734
183 body: size=1450, sha1=71f0b12d59f85fdcfe8ff493e2dc66863f2f7734
184 $ test_archtype zip zip tar.gz tar.bz2
184 $ test_archtype zip zip tar.gz tar.bz2
185 % zip allowed should give 200
185 % zip allowed should give 200
186 200 Script output follows
186 200 Script output follows
187 content-disposition: attachment; filename=test-archive-1701ef1f1510.zip
187 content-disposition: attachment; filename=test-archive-1701ef1f1510.zip
188 content-type: application/zip
188 content-type: application/zip
189 date: $HTTP_DATE$
189 date: $HTTP_DATE$
190 etag: W/"*" (glob)
190 etag: W/"*" (glob)
191 server: testing stub value
191 server: testing stub value
192 transfer-encoding: chunked
192 transfer-encoding: chunked
193
193
194 body: size=(1377|1461|1489), sha1=(677b14d3d048778d5eb5552c14a67e6192068650|be6d3983aa13dfe930361b2569291cdedd02b537|1897e496871aa89ad685a92b936f5fa0d008b9e8) (re)
194 body: size=(1377|1461|1489), sha1=(677b14d3d048778d5eb5552c14a67e6192068650|be6d3983aa13dfe930361b2569291cdedd02b537|1897e496871aa89ad685a92b936f5fa0d008b9e8) (re)
195 % tar.gz and tar.bz2 disallowed should both give 403
195 % tar.gz and tar.bz2 disallowed should both give 403
196 403 Archive type not allowed: gz
196 403 Archive type not allowed: gz
197 content-type: text/html; charset=ascii
197 content-type: text/html; charset=ascii
198 date: $HTTP_DATE$
198 date: $HTTP_DATE$
199 etag: W/"*" (glob)
199 etag: W/"*" (glob)
200 server: testing stub value
200 server: testing stub value
201 transfer-encoding: chunked
201 transfer-encoding: chunked
202
202
203 body: size=1450, sha1=71f0b12d59f85fdcfe8ff493e2dc66863f2f7734
203 body: size=1450, sha1=71f0b12d59f85fdcfe8ff493e2dc66863f2f7734
204 403 Archive type not allowed: bz2
204 403 Archive type not allowed: bz2
205 content-type: text/html; charset=ascii
205 content-type: text/html; charset=ascii
206 date: $HTTP_DATE$
206 date: $HTTP_DATE$
207 etag: W/"*" (glob)
207 etag: W/"*" (glob)
208 server: testing stub value
208 server: testing stub value
209 transfer-encoding: chunked
209 transfer-encoding: chunked
210
210
211 body: size=1451, sha1=4c5cf0f574446c44feb7f88f4e0e2a56bd92c352
211 body: size=1451, sha1=4c5cf0f574446c44feb7f88f4e0e2a56bd92c352
212
212
213 check http return codes (with deprecated option)
213 check http return codes (with deprecated option)
214
214
215 $ test_archtype_deprecated gz tar.gz tar.bz2 zip
215 $ test_archtype_deprecated gz tar.gz tar.bz2 zip
216 % gz allowed should give 200
216 % gz allowed should give 200
217 200 Script output follows
217 200 Script output follows
218 content-disposition: attachment; filename=test-archive-1701ef1f1510.tar.gz
218 content-disposition: attachment; filename=test-archive-1701ef1f1510.tar.gz
219 content-type: application/x-gzip
219 content-type: application/x-gzip
220 date: $HTTP_DATE$
220 date: $HTTP_DATE$
221 etag: W/"*" (glob)
221 etag: W/"*" (glob)
222 server: testing stub value
222 server: testing stub value
223 transfer-encoding: chunked
223 transfer-encoding: chunked
224
224
225 body: size=408, sha1=8fa06531bddecc365a9f5edb0f88b65974bfe505 (no-py38 !)
225 body: size=408, sha1=8fa06531bddecc365a9f5edb0f88b65974bfe505 (no-py38 !)
226 body: size=506, sha1=70926a04cb8887d0bcccf5380488100a10222def (py38 no-py39 !)
226 body: size=506, sha1=70926a04cb8887d0bcccf5380488100a10222def (py38 no-py39 !)
227 body: size=505, sha1=eb823c293bedff0df4070b854e2c5cbb06d6ec62 (py39 !)
227 body: size=505, sha1=eb823c293bedff0df4070b854e2c5cbb06d6ec62 (py39 !)
228 % tar.bz2 and zip disallowed should both give 403
228 % tar.bz2 and zip disallowed should both give 403
229 403 Archive type not allowed: bz2
229 403 Archive type not allowed: bz2
230 content-type: text/html; charset=ascii
230 content-type: text/html; charset=ascii
231 date: $HTTP_DATE$
231 date: $HTTP_DATE$
232 etag: W/"*" (glob)
232 etag: W/"*" (glob)
233 server: testing stub value
233 server: testing stub value
234 transfer-encoding: chunked
234 transfer-encoding: chunked
235
235
236 body: size=1451, sha1=4c5cf0f574446c44feb7f88f4e0e2a56bd92c352
236 body: size=1451, sha1=4c5cf0f574446c44feb7f88f4e0e2a56bd92c352
237 403 Archive type not allowed: zip
237 403 Archive type not allowed: zip
238 content-type: text/html; charset=ascii
238 content-type: text/html; charset=ascii
239 date: $HTTP_DATE$
239 date: $HTTP_DATE$
240 etag: W/"*" (glob)
240 etag: W/"*" (glob)
241 server: testing stub value
241 server: testing stub value
242 transfer-encoding: chunked
242 transfer-encoding: chunked
243
243
244 body: size=1451, sha1=cbfa5574b337348bfd0564cc534474d002e7d6c7
244 body: size=1451, sha1=cbfa5574b337348bfd0564cc534474d002e7d6c7
245 $ test_archtype_deprecated bz2 tar.bz2 zip tar.gz
245 $ test_archtype_deprecated bz2 tar.bz2 zip tar.gz
246 % bz2 allowed should give 200
246 % bz2 allowed should give 200
247 200 Script output follows
247 200 Script output follows
248 content-disposition: attachment; filename=test-archive-1701ef1f1510.tar.bz2
248 content-disposition: attachment; filename=test-archive-1701ef1f1510.tar.bz2
249 content-type: application/x-bzip2
249 content-type: application/x-bzip2
250 date: $HTTP_DATE$
250 date: $HTTP_DATE$
251 etag: W/"*" (glob)
251 etag: W/"*" (glob)
252 server: testing stub value
252 server: testing stub value
253 transfer-encoding: chunked
253 transfer-encoding: chunked
254
254
255 body: size=426, sha1=8d87f5aba6e14f1bfea6c232985982c278b2fb0b (no-py38 !)
255 body: size=426, sha1=8d87f5aba6e14f1bfea6c232985982c278b2fb0b (no-py38 !)
256 body: size=506, sha1=1bd1f8e8d3701704bd4385038bd9c09b81c77f4e (py38 no-py39 !)
256 body: size=506, sha1=1bd1f8e8d3701704bd4385038bd9c09b81c77f4e (py38 no-py39 !)
257 body: size=503, sha1=2d8ce8bb3816603b9683a1804a5a02c11224cb01 (py39 !)
257 body: size=503, sha1=2d8ce8bb3816603b9683a1804a5a02c11224cb01 (py39 !)
258 % zip and tar.gz disallowed should both give 403
258 % zip and tar.gz disallowed should both give 403
259 403 Archive type not allowed: zip
259 403 Archive type not allowed: zip
260 content-type: text/html; charset=ascii
260 content-type: text/html; charset=ascii
261 date: $HTTP_DATE$
261 date: $HTTP_DATE$
262 etag: W/"*" (glob)
262 etag: W/"*" (glob)
263 server: testing stub value
263 server: testing stub value
264 transfer-encoding: chunked
264 transfer-encoding: chunked
265
265
266 body: size=1451, sha1=cbfa5574b337348bfd0564cc534474d002e7d6c7
266 body: size=1451, sha1=cbfa5574b337348bfd0564cc534474d002e7d6c7
267 403 Archive type not allowed: gz
267 403 Archive type not allowed: gz
268 content-type: text/html; charset=ascii
268 content-type: text/html; charset=ascii
269 date: $HTTP_DATE$
269 date: $HTTP_DATE$
270 etag: W/"*" (glob)
270 etag: W/"*" (glob)
271 server: testing stub value
271 server: testing stub value
272 transfer-encoding: chunked
272 transfer-encoding: chunked
273
273
274 body: size=1450, sha1=71f0b12d59f85fdcfe8ff493e2dc66863f2f7734
274 body: size=1450, sha1=71f0b12d59f85fdcfe8ff493e2dc66863f2f7734
275 $ test_archtype_deprecated zip zip tar.gz tar.bz2
275 $ test_archtype_deprecated zip zip tar.gz tar.bz2
276 % zip allowed should give 200
276 % zip allowed should give 200
277 200 Script output follows
277 200 Script output follows
278 content-disposition: attachment; filename=test-archive-1701ef1f1510.zip
278 content-disposition: attachment; filename=test-archive-1701ef1f1510.zip
279 content-type: application/zip
279 content-type: application/zip
280 date: $HTTP_DATE$
280 date: $HTTP_DATE$
281 etag: W/"*" (glob)
281 etag: W/"*" (glob)
282 server: testing stub value
282 server: testing stub value
283 transfer-encoding: chunked
283 transfer-encoding: chunked
284
284
285 body: size=(1377|1461|1489), sha1=(677b14d3d048778d5eb5552c14a67e6192068650|be6d3983aa13dfe930361b2569291cdedd02b537|1897e496871aa89ad685a92b936f5fa0d008b9e8) (re)
285 body: size=(1377|1461|1489), sha1=(677b14d3d048778d5eb5552c14a67e6192068650|be6d3983aa13dfe930361b2569291cdedd02b537|1897e496871aa89ad685a92b936f5fa0d008b9e8) (re)
286 % tar.gz and tar.bz2 disallowed should both give 403
286 % tar.gz and tar.bz2 disallowed should both give 403
287 403 Archive type not allowed: gz
287 403 Archive type not allowed: gz
288 content-type: text/html; charset=ascii
288 content-type: text/html; charset=ascii
289 date: $HTTP_DATE$
289 date: $HTTP_DATE$
290 etag: W/"*" (glob)
290 etag: W/"*" (glob)
291 server: testing stub value
291 server: testing stub value
292 transfer-encoding: chunked
292 transfer-encoding: chunked
293
293
294 body: size=1450, sha1=71f0b12d59f85fdcfe8ff493e2dc66863f2f7734
294 body: size=1450, sha1=71f0b12d59f85fdcfe8ff493e2dc66863f2f7734
295 403 Archive type not allowed: bz2
295 403 Archive type not allowed: bz2
296 content-type: text/html; charset=ascii
296 content-type: text/html; charset=ascii
297 date: $HTTP_DATE$
297 date: $HTTP_DATE$
298 etag: W/"*" (glob)
298 etag: W/"*" (glob)
299 server: testing stub value
299 server: testing stub value
300 transfer-encoding: chunked
300 transfer-encoding: chunked
301
301
302 body: size=1451, sha1=4c5cf0f574446c44feb7f88f4e0e2a56bd92c352
302 body: size=1451, sha1=4c5cf0f574446c44feb7f88f4e0e2a56bd92c352
303
303
304 $ echo "allow-archive = gz bz2 zip" >> .hg/hgrc
304 $ echo "allow-archive = gz bz2 zip" >> .hg/hgrc
305 $ hg serve -p $HGPORT -d --pid-file=hg.pid -E errors.log
305 $ hg serve -p $HGPORT -d --pid-file=hg.pid -E errors.log
306 $ cat hg.pid >> $DAEMON_PIDS
306 $ cat hg.pid >> $DAEMON_PIDS
307
307
308 check archive links' order
308 check archive links' order
309
309
310 $ get-with-headers.py localhost:$HGPORT "?revcount=1" | grep '/archive/tip.'
310 $ get-with-headers.py localhost:$HGPORT "?revcount=1" | grep '/archive/tip.'
311 <a href="/archive/tip.zip">zip</a>
311 <a href="/archive/tip.zip">zip</a>
312 <a href="/archive/tip.tar.gz">gz</a>
312 <a href="/archive/tip.tar.gz">gz</a>
313 <a href="/archive/tip.tar.bz2">bz2</a>
313 <a href="/archive/tip.tar.bz2">bz2</a>
314
314
315 invalid arch type should give 404
315 invalid arch type should give 404
316
316
317 $ get-with-headers.py localhost:$HGPORT "archive/tip.invalid" | head -n 1
317 $ get-with-headers.py localhost:$HGPORT "archive/tip.invalid" | head -n 1
318 404 Unsupported archive type: None
318 404 Unsupported archive type: None
319
319
320 $ TIP=`hg id -v | cut -f1 -d' '`
320 $ TIP=`hg id -v | cut -f1 -d' '`
321 $ QTIP=`hg id -q`
321 $ QTIP=`hg id -q`
322 $ cat > getarchive.py <<EOF
322 $ cat > getarchive.py <<EOF
323 > from __future__ import absolute_import
323 > from __future__ import absolute_import
324 > import os
324 > import os
325 > import sys
325 > import sys
326 > from mercurial import (
326 > from mercurial import (
327 > util,
327 > util,
328 > )
328 > )
329 > try:
329 > try:
330 > # Set stdout to binary mode for win32 platforms
330 > # Set stdout to binary mode for win32 platforms
331 > import msvcrt
331 > import msvcrt
332 > msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
332 > msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
333 > except ImportError:
333 > except ImportError:
334 > pass
334 > pass
335 > if len(sys.argv) <= 3:
335 > if len(sys.argv) <= 3:
336 > node, archive = sys.argv[1:]
336 > node, archive = sys.argv[1:]
337 > requeststr = 'cmd=archive;node=%s;type=%s' % (node, archive)
337 > requeststr = 'cmd=archive;node=%s;type=%s' % (node, archive)
338 > else:
338 > else:
339 > node, archive, file = sys.argv[1:]
339 > node, archive, file = sys.argv[1:]
340 > requeststr = 'cmd=archive;node=%s;type=%s;file=%s' % (node, archive, file)
340 > requeststr = 'cmd=archive;node=%s;type=%s;file=%s' % (node, archive, file)
341 > try:
341 > try:
342 > stdout = sys.stdout.buffer
342 > stdout = sys.stdout.buffer
343 > except AttributeError:
343 > except AttributeError:
344 > stdout = sys.stdout
344 > stdout = sys.stdout
345 > try:
345 > try:
346 > f = util.urlreq.urlopen('http://$LOCALIP:%s/?%s'
346 > f = util.urlreq.urlopen('http://$LOCALIP:%s/?%s'
347 > % (os.environ['HGPORT'], requeststr))
347 > % (os.environ['HGPORT'], requeststr))
348 > stdout.write(f.read())
348 > stdout.write(f.read())
349 > except util.urlerr.httperror as e:
349 > except util.urlerr.httperror as e:
350 > sys.stderr.write(str(e) + '\n')
350 > sys.stderr.write(str(e) + '\n')
351 > EOF
351 > EOF
352 $ "$PYTHON" getarchive.py "$TIP" gz | gunzip | tar tf - 2>/dev/null
352 $ "$PYTHON" getarchive.py "$TIP" gz | gunzip | tar tf - 2>/dev/null
353 test-archive-1701ef1f1510/.hg_archival.txt
353 test-archive-1701ef1f1510/.hg_archival.txt
354 test-archive-1701ef1f1510/.hgsub
354 test-archive-1701ef1f1510/.hgsub
355 test-archive-1701ef1f1510/.hgsubstate
355 test-archive-1701ef1f1510/.hgsubstate
356 test-archive-1701ef1f1510/bar
356 test-archive-1701ef1f1510/bar
357 test-archive-1701ef1f1510/baz/bletch
357 test-archive-1701ef1f1510/baz/bletch
358 test-archive-1701ef1f1510/foo
358 test-archive-1701ef1f1510/foo
359 test-archive-1701ef1f1510/subrepo/sub
359 test-archive-1701ef1f1510/subrepo/sub
360 $ "$PYTHON" getarchive.py "$TIP" bz2 | bunzip2 | tar tf - 2>/dev/null
360 $ "$PYTHON" getarchive.py "$TIP" bz2 | bunzip2 | tar tf - 2>/dev/null
361 test-archive-1701ef1f1510/.hg_archival.txt
361 test-archive-1701ef1f1510/.hg_archival.txt
362 test-archive-1701ef1f1510/.hgsub
362 test-archive-1701ef1f1510/.hgsub
363 test-archive-1701ef1f1510/.hgsubstate
363 test-archive-1701ef1f1510/.hgsubstate
364 test-archive-1701ef1f1510/bar
364 test-archive-1701ef1f1510/bar
365 test-archive-1701ef1f1510/baz/bletch
365 test-archive-1701ef1f1510/baz/bletch
366 test-archive-1701ef1f1510/foo
366 test-archive-1701ef1f1510/foo
367 test-archive-1701ef1f1510/subrepo/sub
367 test-archive-1701ef1f1510/subrepo/sub
368 $ "$PYTHON" getarchive.py "$TIP" zip > archive.zip
368 $ "$PYTHON" getarchive.py "$TIP" zip > archive.zip
369 $ unzip -t archive.zip
369 $ unzip -t archive.zip
370 Archive: archive.zip
370 Archive: archive.zip
371 testing: test-archive-1701ef1f1510/.hg_archival.txt*OK (glob)
371 testing: test-archive-1701ef1f1510/.hg_archival.txt*OK (glob)
372 testing: test-archive-1701ef1f1510/.hgsub*OK (glob)
372 testing: test-archive-1701ef1f1510/.hgsub*OK (glob)
373 testing: test-archive-1701ef1f1510/.hgsubstate*OK (glob)
373 testing: test-archive-1701ef1f1510/.hgsubstate*OK (glob)
374 testing: test-archive-1701ef1f1510/bar*OK (glob)
374 testing: test-archive-1701ef1f1510/bar*OK (glob)
375 testing: test-archive-1701ef1f1510/baz/bletch*OK (glob)
375 testing: test-archive-1701ef1f1510/baz/bletch*OK (glob)
376 testing: test-archive-1701ef1f1510/foo*OK (glob)
376 testing: test-archive-1701ef1f1510/foo*OK (glob)
377 testing: test-archive-1701ef1f1510/subrepo/sub*OK (glob)
377 testing: test-archive-1701ef1f1510/subrepo/sub*OK (glob)
378 No errors detected in compressed data of archive.zip.
378 No errors detected in compressed data of archive.zip.
379
379
380 test that we can download single directories and files
380 test that we can download single directories and files
381
381
382 $ "$PYTHON" getarchive.py "$TIP" gz baz | gunzip | tar tf - 2>/dev/null
382 $ "$PYTHON" getarchive.py "$TIP" gz baz | gunzip | tar tf - 2>/dev/null
383 test-archive-1701ef1f1510/baz/bletch
383 test-archive-1701ef1f1510/baz/bletch
384 $ "$PYTHON" getarchive.py "$TIP" gz foo | gunzip | tar tf - 2>/dev/null
384 $ "$PYTHON" getarchive.py "$TIP" gz foo | gunzip | tar tf - 2>/dev/null
385 test-archive-1701ef1f1510/foo
385 test-archive-1701ef1f1510/foo
386
386
387 test that we detect file patterns that match no files
387 test that we detect file patterns that match no files
388
388
389 $ "$PYTHON" getarchive.py "$TIP" gz foobar
389 $ "$PYTHON" getarchive.py "$TIP" gz foobar
390 HTTP Error 404: file(s) not found: foobar
390 HTTP Error 404: file(s) not found: foobar
391
391
392 test that we reject unsafe patterns
392 test that we reject unsafe patterns
393
393
394 $ "$PYTHON" getarchive.py "$TIP" gz relre:baz
394 $ "$PYTHON" getarchive.py "$TIP" gz relre:baz
395 HTTP Error 404: file(s) not found: relre:baz
395 HTTP Error 404: file(s) not found: relre:baz
396
396
397 $ killdaemons.py
397 $ killdaemons.py
398
398
399 $ hg archive -t tar test.tar
399 $ hg archive -t tar test.tar
400 $ tar tf test.tar
400 $ tar tf test.tar
401 test/.hg_archival.txt
401 test/.hg_archival.txt
402 test/.hgsub
402 test/.hgsub
403 test/.hgsubstate
403 test/.hgsubstate
404 test/bar
404 test/bar
405 test/baz/bletch
405 test/baz/bletch
406 test/foo
406 test/foo
407
407
408 $ hg archive --debug -t tbz2 -X baz test.tar.bz2 --config progress.debug=true
408 $ hg archive --debug -t tbz2 -X baz test.tar.bz2 --config progress.debug=true
409 archiving: 0/4 files (0.00%)
409 archiving: 0/4 files (0.00%)
410 archiving: .hgsub 1/4 files (25.00%)
410 archiving: .hgsub 1/4 files (25.00%)
411 archiving: .hgsubstate 2/4 files (50.00%)
411 archiving: .hgsubstate 2/4 files (50.00%)
412 archiving: bar 3/4 files (75.00%)
412 archiving: bar 3/4 files (75.00%)
413 archiving: foo 4/4 files (100.00%)
413 archiving: foo 4/4 files (100.00%)
414 $ bunzip2 -dc test.tar.bz2 | tar tf - 2>/dev/null
414 $ bunzip2 -dc test.tar.bz2 | tar tf - 2>/dev/null
415 test/.hg_archival.txt
415 test/.hg_archival.txt
416 test/.hgsub
416 test/.hgsub
417 test/.hgsubstate
417 test/.hgsubstate
418 test/bar
418 test/bar
419 test/foo
419 test/foo
420
420
421 $ hg archive -t tgz -p %b-%h test-%h.tar.gz
421 $ hg archive -t tgz -p %b-%h test-%h.tar.gz
422 $ gzip -dc test-$QTIP.tar.gz | tar tf - 2>/dev/null
422 $ gzip -dc test-$QTIP.tar.gz | tar tf - 2>/dev/null
423 test-1701ef1f1510/.hg_archival.txt
423 test-1701ef1f1510/.hg_archival.txt
424 test-1701ef1f1510/.hgsub
424 test-1701ef1f1510/.hgsub
425 test-1701ef1f1510/.hgsubstate
425 test-1701ef1f1510/.hgsubstate
426 test-1701ef1f1510/bar
426 test-1701ef1f1510/bar
427 test-1701ef1f1510/baz/bletch
427 test-1701ef1f1510/baz/bletch
428 test-1701ef1f1510/foo
428 test-1701ef1f1510/foo
429
429
430 $ hg archive autodetected_test.tar
430 $ hg archive autodetected_test.tar
431 $ tar tf autodetected_test.tar
431 $ tar tf autodetected_test.tar
432 autodetected_test/.hg_archival.txt
432 autodetected_test/.hg_archival.txt
433 autodetected_test/.hgsub
433 autodetected_test/.hgsub
434 autodetected_test/.hgsubstate
434 autodetected_test/.hgsubstate
435 autodetected_test/bar
435 autodetected_test/bar
436 autodetected_test/baz/bletch
436 autodetected_test/baz/bletch
437 autodetected_test/foo
437 autodetected_test/foo
438
438
439 The '-t' should override autodetection
439 The '-t' should override autodetection
440
440
441 $ hg archive -t tar autodetect_override_test.zip
441 $ hg archive -t tar autodetect_override_test.zip
442 $ tar tf autodetect_override_test.zip
442 $ tar tf autodetect_override_test.zip
443 autodetect_override_test.zip/.hg_archival.txt
443 autodetect_override_test.zip/.hg_archival.txt
444 autodetect_override_test.zip/.hgsub
444 autodetect_override_test.zip/.hgsub
445 autodetect_override_test.zip/.hgsubstate
445 autodetect_override_test.zip/.hgsubstate
446 autodetect_override_test.zip/bar
446 autodetect_override_test.zip/bar
447 autodetect_override_test.zip/baz/bletch
447 autodetect_override_test.zip/baz/bletch
448 autodetect_override_test.zip/foo
448 autodetect_override_test.zip/foo
449
449
450 $ for ext in tar tar.gz tgz tar.bz2 tbz2 zip; do
450 $ for ext in tar tar.gz tgz tar.bz2 tbz2 zip; do
451 > hg archive auto_test.$ext
451 > hg archive auto_test.$ext
452 > if [ -d auto_test.$ext ]; then
452 > if [ -d auto_test.$ext ]; then
453 > echo "extension $ext was not autodetected."
453 > echo "extension $ext was not autodetected."
454 > fi
454 > fi
455 > done
455 > done
456
456
457 $ cat > md5comp.py <<EOF
457 $ cat > md5comp.py <<EOF
458 > from __future__ import absolute_import, print_function
458 > from __future__ import absolute_import, print_function
459 > import hashlib
459 > import hashlib
460 > import sys
460 > import sys
461 > f1, f2 = sys.argv[1:3]
461 > f1, f2 = sys.argv[1:3]
462 > h1 = hashlib.md5(open(f1, 'rb').read()).hexdigest()
462 > h1 = hashlib.md5(open(f1, 'rb').read()).hexdigest()
463 > h2 = hashlib.md5(open(f2, 'rb').read()).hexdigest()
463 > h2 = hashlib.md5(open(f2, 'rb').read()).hexdigest()
464 > print(h1 == h2 or "md5 differ: " + repr((h1, h2)))
464 > print(h1 == h2 or "md5 differ: " + repr((h1, h2)))
465 > EOF
465 > EOF
466
466
467 archive name is stored in the archive, so create similar archives and
467 archive name is stored in the archive, so create similar archives and
468 rename them afterwards.
468 rename them afterwards.
469
469
470 $ hg archive -t tgz tip.tar.gz
470 $ hg archive -t tgz tip.tar.gz
471 $ mv tip.tar.gz tip1.tar.gz
471 $ mv tip.tar.gz tip1.tar.gz
472 $ sleep 1
472 $ sleep 1
473 $ hg archive -t tgz tip.tar.gz
473 $ hg archive -t tgz tip.tar.gz
474 $ mv tip.tar.gz tip2.tar.gz
474 $ mv tip.tar.gz tip2.tar.gz
475 $ "$PYTHON" md5comp.py tip1.tar.gz tip2.tar.gz
475 $ "$PYTHON" md5comp.py tip1.tar.gz tip2.tar.gz
476 True
476 True
477
477
478 $ hg archive -t zip -p /illegal test.zip
478 $ hg archive -t zip -p /illegal test.zip
479 abort: archive prefix contains illegal components
479 abort: archive prefix contains illegal components
480 [255]
480 [255]
481 $ hg archive -t zip -p very/../bad test.zip
481 $ hg archive -t zip -p very/../bad test.zip
482
482
483 $ hg archive --config ui.archivemeta=false -t zip -r 2 test.zip
483 $ hg archive --config ui.archivemeta=false -t zip -r 2 test.zip
484 $ unzip -t test.zip
484 $ unzip -t test.zip
485 Archive: test.zip
485 Archive: test.zip
486 testing: test/bar*OK (glob)
486 testing: test/bar*OK (glob)
487 testing: test/baz/bletch*OK (glob)
487 testing: test/baz/bletch*OK (glob)
488 testing: test/foo*OK (glob)
488 testing: test/foo*OK (glob)
489 No errors detected in compressed data of test.zip.
489 No errors detected in compressed data of test.zip.
490
490
491 $ hg archive -t tar - | tar tf - 2>/dev/null
491 $ hg archive -t tar - | tar tf - 2>/dev/null
492 test-1701ef1f1510/.hg_archival.txt
492 test-1701ef1f1510/.hg_archival.txt
493 test-1701ef1f1510/.hgsub
493 test-1701ef1f1510/.hgsub
494 test-1701ef1f1510/.hgsubstate
494 test-1701ef1f1510/.hgsubstate
495 test-1701ef1f1510/bar
495 test-1701ef1f1510/bar
496 test-1701ef1f1510/baz/bletch
496 test-1701ef1f1510/baz/bletch
497 test-1701ef1f1510/foo
497 test-1701ef1f1510/foo
498
498
499 $ hg archive -r 0 -t tar rev-%r.tar
499 $ hg archive -r 0 -t tar rev-%r.tar
500 $ [ -f rev-0.tar ]
500 $ [ -f rev-0.tar ]
501
501
502 test .hg_archival.txt
502 test .hg_archival.txt
503
503
504 $ hg archive ../test-tags
504 $ hg archive ../test-tags
505 $ cat ../test-tags/.hg_archival.txt
505 $ cat ../test-tags/.hg_archival.txt
506 repo: daa7f7c60e0a224faa4ff77ca41b2760562af264
506 repo: daa7f7c60e0a224faa4ff77ca41b2760562af264
507 node: 1701ef1f151069b8747038e93b5186bb43a47504
507 node: 1701ef1f151069b8747038e93b5186bb43a47504
508 branch: default
508 branch: default
509 latesttag: null
509 latesttag: null
510 latesttagdistance: 4
510 latesttagdistance: 4
511 changessincelatesttag: 4
511 changessincelatesttag: 4
512 $ hg tag -r 2 mytag
512 $ hg tag -r 2 mytag
513 $ hg tag -r 2 anothertag
513 $ hg tag -r 2 anothertag
514 $ hg archive -r 2 ../test-lasttag
514 $ hg archive -r 2 ../test-lasttag
515 $ cat ../test-lasttag/.hg_archival.txt
515 $ cat ../test-lasttag/.hg_archival.txt
516 repo: daa7f7c60e0a224faa4ff77ca41b2760562af264
516 repo: daa7f7c60e0a224faa4ff77ca41b2760562af264
517 node: 2c0277f05ed49d1c8328fb9ba92fba7a5ebcb33e
517 node: 2c0277f05ed49d1c8328fb9ba92fba7a5ebcb33e
518 branch: default
518 branch: default
519 tag: anothertag
519 tag: anothertag
520 tag: mytag
520 tag: mytag
521
521
522 $ hg archive -t bogus test.bogus
522 $ hg archive -t bogus test.bogus
523 abort: unknown archive type 'bogus'
523 abort: unknown archive type 'bogus'
524 [255]
524 [255]
525
525
526 enable progress extension:
526 enable progress extension:
527
527
528 $ cp $HGRCPATH $HGRCPATH.no-progress
528 $ cp $HGRCPATH $HGRCPATH.no-progress
529 $ cat >> $HGRCPATH <<EOF
529 $ cat >> $HGRCPATH <<EOF
530 > [progress]
530 > [progress]
531 > assume-tty = 1
531 > assume-tty = 1
532 > format = topic bar number
532 > format = topic bar number
533 > delay = 0
533 > delay = 0
534 > refresh = 0
534 > refresh = 0
535 > width = 60
535 > width = 60
536 > EOF
536 > EOF
537
537
538 $ hg archive ../with-progress
538 $ hg archive ../with-progress
539 \r (no-eol) (esc)
539 \r (no-eol) (esc)
540 archiving [ ] 0/6\r (no-eol) (esc)
540 archiving [ ] 0/6\r (no-eol) (esc)
541 archiving [======> ] 1/6\r (no-eol) (esc)
541 archiving [======> ] 1/6\r (no-eol) (esc)
542 archiving [=============> ] 2/6\r (no-eol) (esc)
542 archiving [=============> ] 2/6\r (no-eol) (esc)
543 archiving [====================> ] 3/6\r (no-eol) (esc)
543 archiving [====================> ] 3/6\r (no-eol) (esc)
544 archiving [===========================> ] 4/6\r (no-eol) (esc)
544 archiving [===========================> ] 4/6\r (no-eol) (esc)
545 archiving [==================================> ] 5/6\r (no-eol) (esc)
545 archiving [==================================> ] 5/6\r (no-eol) (esc)
546 archiving [==========================================>] 6/6\r (no-eol) (esc)
546 archiving [==========================================>] 6/6\r (no-eol) (esc)
547 \r (no-eol) (esc)
547 \r (no-eol) (esc)
548
548
549 cleanup after progress extension test:
549 cleanup after progress extension test:
550
550
551 $ cp $HGRCPATH.no-progress $HGRCPATH
551 $ cp $HGRCPATH.no-progress $HGRCPATH
552
552
553 server errors
553 server errors
554
554
555 $ cat errors.log
555 $ cat errors.log
556
556
557 empty repo
557 empty repo
558
558
559 $ hg init ../empty
559 $ hg init ../empty
560 $ cd ../empty
560 $ cd ../empty
561 $ hg archive ../test-empty
561 $ hg archive ../test-empty
562 abort: no working directory: please specify a revision
562 abort: no working directory: please specify a revision
563 [255]
563 [255]
564
564
565 old file -- date clamped to 1980
565 old file -- date clamped to 1980
566
566
567 $ touch -t 197501010000 old
567 $ touch -t 197501010000 old
568 $ hg add old
568 $ hg add old
569 $ hg commit -m old
569 $ hg commit -m old
570 $ hg archive ../old.zip
570 $ hg archive ../old.zip
571 $ unzip -l ../old.zip | grep -v -- ----- | egrep -v files$
571 $ unzip -l ../old.zip | grep -v -- ----- | egrep -v files$
572 Archive: ../old.zip
572 Archive: ../old.zip
573 \s*Length.* (re)
573 \s*Length.* (re)
574 *172*80*00:00*old/.hg_archival.txt (glob)
574 *172*80*00:00*old/.hg_archival.txt (glob)
575 *0*80*00:00*old/old (glob)
575 *0*80*00:00*old/old (glob)
576
576
577 test xz support only available in Python 3.4
577 test xz support only available in Python 3.4
578
578
579 #if py3
579 #if lzma
580 $ hg archive ../archive.txz
580 $ hg archive ../archive.txz
581 $ which xz >/dev/null && xz -l ../archive.txz | head -n1 || true
581 $ which xz >/dev/null && xz -l ../archive.txz | head -n1 || true
582 Strms Blocks Compressed Uncompressed Ratio Check Filename (xz !)
582 Strms Blocks Compressed Uncompressed Ratio Check Filename (xz !)
583 $ rm -f ../archive.txz
583 $ rm -f ../archive.txz
584 #else
584 #endif
585 #if py3 no-lzma
586 $ hg archive ../archive.txz
587 abort: lzma module is not available
588 [255]
589 #endif
590 #if no-py3
585 $ hg archive ../archive.txz
591 $ hg archive ../archive.txz
586 abort: xz compression is only available in Python 3
592 abort: xz compression is only available in Python 3
587 [255]
593 [255]
588 #endif
594 #endif
589
595
590 show an error when a provided pattern matches no files
596 show an error when a provided pattern matches no files
591
597
592 $ hg archive -I file_that_does_not_exist.foo ../empty.zip
598 $ hg archive -I file_that_does_not_exist.foo ../empty.zip
593 abort: no files match the archive pattern
599 abort: no files match the archive pattern
594 [255]
600 [255]
595
601
596 $ hg archive -X * ../empty.zip
602 $ hg archive -X * ../empty.zip
597 abort: no files match the archive pattern
603 abort: no files match the archive pattern
598 [255]
604 [255]
599
605
600 $ cd ..
606 $ cd ..
601
607
602 issue3600: check whether "hg archive" can create archive files which
608 issue3600: check whether "hg archive" can create archive files which
603 are extracted with expected timestamp, even though TZ is not
609 are extracted with expected timestamp, even though TZ is not
604 configured as GMT.
610 configured as GMT.
605
611
606 $ mkdir issue3600
612 $ mkdir issue3600
607 $ cd issue3600
613 $ cd issue3600
608
614
609 $ hg init repo
615 $ hg init repo
610 $ echo a > repo/a
616 $ echo a > repo/a
611 $ hg -R repo add repo/a
617 $ hg -R repo add repo/a
612 $ hg -R repo commit -m '#0' -d '456789012 21600'
618 $ hg -R repo commit -m '#0' -d '456789012 21600'
613 $ cat > show_mtime.py <<EOF
619 $ cat > show_mtime.py <<EOF
614 > from __future__ import absolute_import, print_function
620 > from __future__ import absolute_import, print_function
615 > import os
621 > import os
616 > import sys
622 > import sys
617 > print(int(os.stat(sys.argv[1]).st_mtime))
623 > print(int(os.stat(sys.argv[1]).st_mtime))
618 > EOF
624 > EOF
619
625
620 $ hg -R repo archive --prefix tar-extracted archive.tar
626 $ hg -R repo archive --prefix tar-extracted archive.tar
621 $ (TZ=UTC-3; export TZ; tar xf archive.tar)
627 $ (TZ=UTC-3; export TZ; tar xf archive.tar)
622 $ "$PYTHON" show_mtime.py tar-extracted/a
628 $ "$PYTHON" show_mtime.py tar-extracted/a
623 456789012
629 456789012
624
630
625 $ hg -R repo archive --prefix zip-extracted archive.zip
631 $ hg -R repo archive --prefix zip-extracted archive.zip
626 $ (TZ=UTC-3; export TZ; unzip -q archive.zip)
632 $ (TZ=UTC-3; export TZ; unzip -q archive.zip)
627 $ "$PYTHON" show_mtime.py zip-extracted/a
633 $ "$PYTHON" show_mtime.py zip-extracted/a
628 456789012
634 456789012
629
635
630 $ cd ..
636 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now