##// END OF EJS Templates
archive: create alwaysmatcher when no matcher provided...
Martin von Zweigbergk -
r40444:997997eb default
parent child Browse files
Show More
@@ -1,349 +1,349
1 # archival.py - revision archival for mercurial
1 # archival.py - revision archival for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import gzip
10 import gzip
11 import os
11 import os
12 import struct
12 import struct
13 import tarfile
13 import tarfile
14 import time
14 import time
15 import zipfile
15 import zipfile
16 import zlib
16 import zlib
17
17
18 from .i18n import _
18 from .i18n import _
19 from .node import (
19 from .node import (
20 nullrev,
20 nullrev,
21 )
21 )
22
22
23 from . import (
23 from . import (
24 error,
24 error,
25 formatter,
25 formatter,
26 match as matchmod,
26 match as matchmod,
27 pycompat,
27 pycompat,
28 scmutil,
28 scmutil,
29 util,
29 util,
30 vfs as vfsmod,
30 vfs as vfsmod,
31 )
31 )
32 stringio = util.stringio
32 stringio = util.stringio
33
33
34 # from unzip source code:
34 # from unzip source code:
35 _UNX_IFREG = 0x8000
35 _UNX_IFREG = 0x8000
36 _UNX_IFLNK = 0xa000
36 _UNX_IFLNK = 0xa000
37
37
38 def tidyprefix(dest, kind, prefix):
38 def tidyprefix(dest, kind, prefix):
39 '''choose prefix to use for names in archive. make sure prefix is
39 '''choose prefix to use for names in archive. make sure prefix is
40 safe for consumers.'''
40 safe for consumers.'''
41
41
42 if prefix:
42 if prefix:
43 prefix = util.normpath(prefix)
43 prefix = util.normpath(prefix)
44 else:
44 else:
45 if not isinstance(dest, bytes):
45 if not isinstance(dest, bytes):
46 raise ValueError('dest must be string if no prefix')
46 raise ValueError('dest must be string if no prefix')
47 prefix = os.path.basename(dest)
47 prefix = os.path.basename(dest)
48 lower = prefix.lower()
48 lower = prefix.lower()
49 for sfx in exts.get(kind, []):
49 for sfx in exts.get(kind, []):
50 if lower.endswith(sfx):
50 if lower.endswith(sfx):
51 prefix = prefix[:-len(sfx)]
51 prefix = prefix[:-len(sfx)]
52 break
52 break
53 lpfx = os.path.normpath(util.localpath(prefix))
53 lpfx = os.path.normpath(util.localpath(prefix))
54 prefix = util.pconvert(lpfx)
54 prefix = util.pconvert(lpfx)
55 if not prefix.endswith('/'):
55 if not prefix.endswith('/'):
56 prefix += '/'
56 prefix += '/'
57 # Drop the leading '.' path component if present, so Windows can read the
57 # Drop the leading '.' path component if present, so Windows can read the
58 # zip files (issue4634)
58 # zip files (issue4634)
59 if prefix.startswith('./'):
59 if prefix.startswith('./'):
60 prefix = prefix[2:]
60 prefix = prefix[2:]
61 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
61 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
62 raise error.Abort(_('archive prefix contains illegal components'))
62 raise error.Abort(_('archive prefix contains illegal components'))
63 return prefix
63 return prefix
64
64
65 exts = {
65 exts = {
66 'tar': ['.tar'],
66 'tar': ['.tar'],
67 'tbz2': ['.tbz2', '.tar.bz2'],
67 'tbz2': ['.tbz2', '.tar.bz2'],
68 'tgz': ['.tgz', '.tar.gz'],
68 'tgz': ['.tgz', '.tar.gz'],
69 'zip': ['.zip'],
69 'zip': ['.zip'],
70 }
70 }
71
71
72 def guesskind(dest):
72 def guesskind(dest):
73 for kind, extensions in exts.iteritems():
73 for kind, extensions in exts.iteritems():
74 if any(dest.endswith(ext) for ext in extensions):
74 if any(dest.endswith(ext) for ext in extensions):
75 return kind
75 return kind
76 return None
76 return None
77
77
78 def _rootctx(repo):
78 def _rootctx(repo):
79 # repo[0] may be hidden
79 # repo[0] may be hidden
80 for rev in repo:
80 for rev in repo:
81 return repo[rev]
81 return repo[rev]
82 return repo[nullrev]
82 return repo[nullrev]
83
83
84 # {tags} on ctx includes local tags and 'tip', with no current way to limit
84 # {tags} on ctx includes local tags and 'tip', with no current way to limit
85 # that to global tags. Therefore, use {latesttag} as a substitute when
85 # that to global tags. Therefore, use {latesttag} as a substitute when
86 # the distance is 0, since that will be the list of global tags on ctx.
86 # the distance is 0, since that will be the list of global tags on ctx.
87 _defaultmetatemplate = br'''
87 _defaultmetatemplate = br'''
88 repo: {root}
88 repo: {root}
89 node: {ifcontains(rev, revset("wdir()"), "{p1node}{dirty}", "{node}")}
89 node: {ifcontains(rev, revset("wdir()"), "{p1node}{dirty}", "{node}")}
90 branch: {branch|utf8}
90 branch: {branch|utf8}
91 {ifeq(latesttagdistance, 0, join(latesttag % "tag: {tag}", "\n"),
91 {ifeq(latesttagdistance, 0, join(latesttag % "tag: {tag}", "\n"),
92 separate("\n",
92 separate("\n",
93 join(latesttag % "latesttag: {tag}", "\n"),
93 join(latesttag % "latesttag: {tag}", "\n"),
94 "latesttagdistance: {latesttagdistance}",
94 "latesttagdistance: {latesttagdistance}",
95 "changessincelatesttag: {changessincelatesttag}"))}
95 "changessincelatesttag: {changessincelatesttag}"))}
96 '''[1:] # drop leading '\n'
96 '''[1:] # drop leading '\n'
97
97
98 def buildmetadata(ctx):
98 def buildmetadata(ctx):
99 '''build content of .hg_archival.txt'''
99 '''build content of .hg_archival.txt'''
100 repo = ctx.repo()
100 repo = ctx.repo()
101
101
102 opts = {
102 opts = {
103 'template': repo.ui.config('experimental', 'archivemetatemplate',
103 'template': repo.ui.config('experimental', 'archivemetatemplate',
104 _defaultmetatemplate)
104 _defaultmetatemplate)
105 }
105 }
106
106
107 out = util.stringio()
107 out = util.stringio()
108
108
109 fm = formatter.formatter(repo.ui, out, 'archive', opts)
109 fm = formatter.formatter(repo.ui, out, 'archive', opts)
110 fm.startitem()
110 fm.startitem()
111 fm.context(ctx=ctx)
111 fm.context(ctx=ctx)
112 fm.data(root=_rootctx(repo).hex())
112 fm.data(root=_rootctx(repo).hex())
113
113
114 if ctx.rev() is None:
114 if ctx.rev() is None:
115 dirty = ''
115 dirty = ''
116 if ctx.dirty(missing=True):
116 if ctx.dirty(missing=True):
117 dirty = '+'
117 dirty = '+'
118 fm.data(dirty=dirty)
118 fm.data(dirty=dirty)
119 fm.end()
119 fm.end()
120
120
121 return out.getvalue()
121 return out.getvalue()
122
122
123 class tarit(object):
123 class tarit(object):
124 '''write archive to tar file or stream. can write uncompressed,
124 '''write archive to tar file or stream. can write uncompressed,
125 or compress with gzip or bzip2.'''
125 or compress with gzip or bzip2.'''
126
126
127 class GzipFileWithTime(gzip.GzipFile):
127 class GzipFileWithTime(gzip.GzipFile):
128
128
129 def __init__(self, *args, **kw):
129 def __init__(self, *args, **kw):
130 timestamp = None
130 timestamp = None
131 if r'timestamp' in kw:
131 if r'timestamp' in kw:
132 timestamp = kw.pop(r'timestamp')
132 timestamp = kw.pop(r'timestamp')
133 if timestamp is None:
133 if timestamp is None:
134 self.timestamp = time.time()
134 self.timestamp = time.time()
135 else:
135 else:
136 self.timestamp = timestamp
136 self.timestamp = timestamp
137 gzip.GzipFile.__init__(self, *args, **kw)
137 gzip.GzipFile.__init__(self, *args, **kw)
138
138
139 def _write_gzip_header(self):
139 def _write_gzip_header(self):
140 self.fileobj.write('\037\213') # magic header
140 self.fileobj.write('\037\213') # magic header
141 self.fileobj.write('\010') # compression method
141 self.fileobj.write('\010') # compression method
142 fname = self.name
142 fname = self.name
143 if fname and fname.endswith('.gz'):
143 if fname and fname.endswith('.gz'):
144 fname = fname[:-3]
144 fname = fname[:-3]
145 flags = 0
145 flags = 0
146 if fname:
146 if fname:
147 flags = gzip.FNAME
147 flags = gzip.FNAME
148 self.fileobj.write(pycompat.bytechr(flags))
148 self.fileobj.write(pycompat.bytechr(flags))
149 gzip.write32u(self.fileobj, int(self.timestamp))
149 gzip.write32u(self.fileobj, int(self.timestamp))
150 self.fileobj.write('\002')
150 self.fileobj.write('\002')
151 self.fileobj.write('\377')
151 self.fileobj.write('\377')
152 if fname:
152 if fname:
153 self.fileobj.write(fname + '\000')
153 self.fileobj.write(fname + '\000')
154
154
155 def __init__(self, dest, mtime, kind=''):
155 def __init__(self, dest, mtime, kind=''):
156 self.mtime = mtime
156 self.mtime = mtime
157 self.fileobj = None
157 self.fileobj = None
158
158
159 def taropen(mode, name='', fileobj=None):
159 def taropen(mode, name='', fileobj=None):
160 if kind == 'gz':
160 if kind == 'gz':
161 mode = mode[0:1]
161 mode = mode[0:1]
162 if not fileobj:
162 if not fileobj:
163 fileobj = open(name, mode + 'b')
163 fileobj = open(name, mode + 'b')
164 gzfileobj = self.GzipFileWithTime(name,
164 gzfileobj = self.GzipFileWithTime(name,
165 pycompat.sysstr(mode + 'b'),
165 pycompat.sysstr(mode + 'b'),
166 zlib.Z_BEST_COMPRESSION,
166 zlib.Z_BEST_COMPRESSION,
167 fileobj, timestamp=mtime)
167 fileobj, timestamp=mtime)
168 self.fileobj = gzfileobj
168 self.fileobj = gzfileobj
169 return tarfile.TarFile.taropen(
169 return tarfile.TarFile.taropen(
170 name, pycompat.sysstr(mode), gzfileobj)
170 name, pycompat.sysstr(mode), gzfileobj)
171 else:
171 else:
172 return tarfile.open(
172 return tarfile.open(
173 name, pycompat.sysstr(mode + kind), fileobj)
173 name, pycompat.sysstr(mode + kind), fileobj)
174
174
175 if isinstance(dest, bytes):
175 if isinstance(dest, bytes):
176 self.z = taropen('w:', name=dest)
176 self.z = taropen('w:', name=dest)
177 else:
177 else:
178 self.z = taropen('w|', fileobj=dest)
178 self.z = taropen('w|', fileobj=dest)
179
179
180 def addfile(self, name, mode, islink, data):
180 def addfile(self, name, mode, islink, data):
181 name = pycompat.fsdecode(name)
181 name = pycompat.fsdecode(name)
182 i = tarfile.TarInfo(name)
182 i = tarfile.TarInfo(name)
183 i.mtime = self.mtime
183 i.mtime = self.mtime
184 i.size = len(data)
184 i.size = len(data)
185 if islink:
185 if islink:
186 i.type = tarfile.SYMTYPE
186 i.type = tarfile.SYMTYPE
187 i.mode = 0o777
187 i.mode = 0o777
188 i.linkname = pycompat.fsdecode(data)
188 i.linkname = pycompat.fsdecode(data)
189 data = None
189 data = None
190 i.size = 0
190 i.size = 0
191 else:
191 else:
192 i.mode = mode
192 i.mode = mode
193 data = stringio(data)
193 data = stringio(data)
194 self.z.addfile(i, data)
194 self.z.addfile(i, data)
195
195
196 def done(self):
196 def done(self):
197 self.z.close()
197 self.z.close()
198 if self.fileobj:
198 if self.fileobj:
199 self.fileobj.close()
199 self.fileobj.close()
200
200
201 class zipit(object):
201 class zipit(object):
202 '''write archive to zip file or stream. can write uncompressed,
202 '''write archive to zip file or stream. can write uncompressed,
203 or compressed with deflate.'''
203 or compressed with deflate.'''
204
204
205 def __init__(self, dest, mtime, compress=True):
205 def __init__(self, dest, mtime, compress=True):
206 if isinstance(dest, bytes):
206 if isinstance(dest, bytes):
207 dest = pycompat.fsdecode(dest)
207 dest = pycompat.fsdecode(dest)
208 self.z = zipfile.ZipFile(dest, r'w',
208 self.z = zipfile.ZipFile(dest, r'w',
209 compress and zipfile.ZIP_DEFLATED or
209 compress and zipfile.ZIP_DEFLATED or
210 zipfile.ZIP_STORED)
210 zipfile.ZIP_STORED)
211
211
212 # Python's zipfile module emits deprecation warnings if we try
212 # Python's zipfile module emits deprecation warnings if we try
213 # to store files with a date before 1980.
213 # to store files with a date before 1980.
214 epoch = 315532800 # calendar.timegm((1980, 1, 1, 0, 0, 0, 1, 1, 0))
214 epoch = 315532800 # calendar.timegm((1980, 1, 1, 0, 0, 0, 1, 1, 0))
215 if mtime < epoch:
215 if mtime < epoch:
216 mtime = epoch
216 mtime = epoch
217
217
218 self.mtime = mtime
218 self.mtime = mtime
219 self.date_time = time.gmtime(mtime)[:6]
219 self.date_time = time.gmtime(mtime)[:6]
220
220
221 def addfile(self, name, mode, islink, data):
221 def addfile(self, name, mode, islink, data):
222 i = zipfile.ZipInfo(pycompat.fsdecode(name), self.date_time)
222 i = zipfile.ZipInfo(pycompat.fsdecode(name), self.date_time)
223 i.compress_type = self.z.compression
223 i.compress_type = self.z.compression
224 # unzip will not honor unix file modes unless file creator is
224 # unzip will not honor unix file modes unless file creator is
225 # set to unix (id 3).
225 # set to unix (id 3).
226 i.create_system = 3
226 i.create_system = 3
227 ftype = _UNX_IFREG
227 ftype = _UNX_IFREG
228 if islink:
228 if islink:
229 mode = 0o777
229 mode = 0o777
230 ftype = _UNX_IFLNK
230 ftype = _UNX_IFLNK
231 i.external_attr = (mode | ftype) << 16
231 i.external_attr = (mode | ftype) << 16
232 # add "extended-timestamp" extra block, because zip archives
232 # add "extended-timestamp" extra block, because zip archives
233 # without this will be extracted with unexpected timestamp,
233 # without this will be extracted with unexpected timestamp,
234 # if TZ is not configured as GMT
234 # if TZ is not configured as GMT
235 i.extra += struct.pack('<hhBl',
235 i.extra += struct.pack('<hhBl',
236 0x5455, # block type: "extended-timestamp"
236 0x5455, # block type: "extended-timestamp"
237 1 + 4, # size of this block
237 1 + 4, # size of this block
238 1, # "modification time is present"
238 1, # "modification time is present"
239 int(self.mtime)) # last modification (UTC)
239 int(self.mtime)) # last modification (UTC)
240 self.z.writestr(i, data)
240 self.z.writestr(i, data)
241
241
242 def done(self):
242 def done(self):
243 self.z.close()
243 self.z.close()
244
244
245 class fileit(object):
245 class fileit(object):
246 '''write archive as files in directory.'''
246 '''write archive as files in directory.'''
247
247
248 def __init__(self, name, mtime):
248 def __init__(self, name, mtime):
249 self.basedir = name
249 self.basedir = name
250 self.opener = vfsmod.vfs(self.basedir)
250 self.opener = vfsmod.vfs(self.basedir)
251 self.mtime = mtime
251 self.mtime = mtime
252
252
253 def addfile(self, name, mode, islink, data):
253 def addfile(self, name, mode, islink, data):
254 if islink:
254 if islink:
255 self.opener.symlink(data, name)
255 self.opener.symlink(data, name)
256 return
256 return
257 f = self.opener(name, "w", atomictemp=False)
257 f = self.opener(name, "w", atomictemp=False)
258 f.write(data)
258 f.write(data)
259 f.close()
259 f.close()
260 destfile = os.path.join(self.basedir, name)
260 destfile = os.path.join(self.basedir, name)
261 os.chmod(destfile, mode)
261 os.chmod(destfile, mode)
262 if self.mtime is not None:
262 if self.mtime is not None:
263 os.utime(destfile, (self.mtime, self.mtime))
263 os.utime(destfile, (self.mtime, self.mtime))
264
264
265 def done(self):
265 def done(self):
266 pass
266 pass
267
267
268 archivers = {
268 archivers = {
269 'files': fileit,
269 'files': fileit,
270 'tar': tarit,
270 'tar': tarit,
271 'tbz2': lambda name, mtime: tarit(name, mtime, 'bz2'),
271 'tbz2': lambda name, mtime: tarit(name, mtime, 'bz2'),
272 'tgz': lambda name, mtime: tarit(name, mtime, 'gz'),
272 'tgz': lambda name, mtime: tarit(name, mtime, 'gz'),
273 'uzip': lambda name, mtime: zipit(name, mtime, False),
273 'uzip': lambda name, mtime: zipit(name, mtime, False),
274 'zip': zipit,
274 'zip': zipit,
275 }
275 }
276
276
277 def archive(repo, dest, node, kind, decode=True, match=None,
277 def archive(repo, dest, node, kind, decode=True, match=None,
278 prefix='', mtime=None, subrepos=False):
278 prefix='', mtime=None, subrepos=False):
279 '''create archive of repo as it was at node.
279 '''create archive of repo as it was at node.
280
280
281 dest can be name of directory, name of archive file, or file
281 dest can be name of directory, name of archive file, or file
282 object to write archive to.
282 object to write archive to.
283
283
284 kind is type of archive to create.
284 kind is type of archive to create.
285
285
286 decode tells whether to put files through decode filters from
286 decode tells whether to put files through decode filters from
287 hgrc.
287 hgrc.
288
288
289 match is a matcher to filter names of files to write to archive.
289 match is a matcher to filter names of files to write to archive.
290
290
291 prefix is name of path to put before every archive member.
291 prefix is name of path to put before every archive member.
292
292
293 mtime is the modified time, in seconds, or None to use the changeset time.
293 mtime is the modified time, in seconds, or None to use the changeset time.
294
294
295 subrepos tells whether to include subrepos.
295 subrepos tells whether to include subrepos.
296 '''
296 '''
297
297
298 if kind == 'files':
298 if kind == 'files':
299 if prefix:
299 if prefix:
300 raise error.Abort(_('cannot give prefix when archiving to files'))
300 raise error.Abort(_('cannot give prefix when archiving to files'))
301 else:
301 else:
302 prefix = tidyprefix(dest, kind, prefix)
302 prefix = tidyprefix(dest, kind, prefix)
303
303
304 def write(name, mode, islink, getdata):
304 def write(name, mode, islink, getdata):
305 data = getdata()
305 data = getdata()
306 if decode:
306 if decode:
307 data = repo.wwritedata(name, data)
307 data = repo.wwritedata(name, data)
308 archiver.addfile(prefix + name, mode, islink, data)
308 archiver.addfile(prefix + name, mode, islink, data)
309
309
310 if kind not in archivers:
310 if kind not in archivers:
311 raise error.Abort(_("unknown archive type '%s'") % kind)
311 raise error.Abort(_("unknown archive type '%s'") % kind)
312
312
313 ctx = repo[node]
313 ctx = repo[node]
314 archiver = archivers[kind](dest, mtime or ctx.date()[0])
314 archiver = archivers[kind](dest, mtime or ctx.date()[0])
315
315
316 if not match:
317 match = scmutil.matchall(repo)
318
316 if repo.ui.configbool("ui", "archivemeta"):
319 if repo.ui.configbool("ui", "archivemeta"):
317 name = '.hg_archival.txt'
320 name = '.hg_archival.txt'
318 if not match or match(name):
321 if match(name):
319 write(name, 0o644, False, lambda: buildmetadata(ctx))
322 write(name, 0o644, False, lambda: buildmetadata(ctx))
320
323
321 if match:
324 files = [f for f in ctx.manifest().keys() if match(f)]
322 files = [f for f in ctx.manifest().keys() if match(f)]
323 else:
324 files = ctx.manifest().keys()
325 total = len(files)
325 total = len(files)
326 if total:
326 if total:
327 files.sort()
327 files.sort()
328 scmutil.prefetchfiles(repo, [ctx.rev()],
328 scmutil.prefetchfiles(repo, [ctx.rev()],
329 scmutil.matchfiles(repo, files))
329 scmutil.matchfiles(repo, files))
330 progress = scmutil.progress(repo.ui, _('archiving'), unit=_('files'),
330 progress = scmutil.progress(repo.ui, _('archiving'), unit=_('files'),
331 total=total)
331 total=total)
332 progress.update(0)
332 progress.update(0)
333 for f in files:
333 for f in files:
334 ff = ctx.flags(f)
334 ff = ctx.flags(f)
335 write(f, 'x' in ff and 0o755 or 0o644, 'l' in ff, ctx[f].data)
335 write(f, 'x' in ff and 0o755 or 0o644, 'l' in ff, ctx[f].data)
336 progress.increment(item=f)
336 progress.increment(item=f)
337 progress.complete()
337 progress.complete()
338
338
339 if subrepos:
339 if subrepos:
340 for subpath in sorted(ctx.substate):
340 for subpath in sorted(ctx.substate):
341 sub = ctx.workingsub(subpath)
341 sub = ctx.workingsub(subpath)
342 submatch = matchmod.subdirmatcher(subpath, match)
342 submatch = matchmod.subdirmatcher(subpath, match)
343 total += sub.archive(archiver, prefix, submatch, decode)
343 total += sub.archive(archiver, prefix, submatch, decode)
344
344
345 if total == 0:
345 if total == 0:
346 raise error.Abort(_('no files match the archive pattern'))
346 raise error.Abort(_('no files match the archive pattern'))
347
347
348 archiver.done()
348 archiver.done()
349 return total
349 return total
General Comments 0
You need to be logged in to leave comments. Login now