##// END OF EJS Templates
archive: rewrite default metadata template as a multi-line bytes literal...
Yuya Nishihara -
r35923:887bbce7 default
parent child Browse files
Show More
@@ -1,360 +1,358 b''
1 # archival.py - revision archival for mercurial
1 # archival.py - revision archival for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import gzip
10 import gzip
11 import os
11 import os
12 import struct
12 import struct
13 import tarfile
13 import tarfile
14 import time
14 import time
15 import zipfile
15 import zipfile
16 import zlib
16 import zlib
17
17
18 from .i18n import _
18 from .i18n import _
19
19
20 from . import (
20 from . import (
21 error,
21 error,
22 formatter,
22 formatter,
23 match as matchmod,
23 match as matchmod,
24 util,
24 util,
25 vfs as vfsmod,
25 vfs as vfsmod,
26 )
26 )
27 stringio = util.stringio
27 stringio = util.stringio
28
28
29 # from unzip source code:
29 # from unzip source code:
30 _UNX_IFREG = 0x8000
30 _UNX_IFREG = 0x8000
31 _UNX_IFLNK = 0xa000
31 _UNX_IFLNK = 0xa000
32
32
33 def tidyprefix(dest, kind, prefix):
33 def tidyprefix(dest, kind, prefix):
34 '''choose prefix to use for names in archive. make sure prefix is
34 '''choose prefix to use for names in archive. make sure prefix is
35 safe for consumers.'''
35 safe for consumers.'''
36
36
37 if prefix:
37 if prefix:
38 prefix = util.normpath(prefix)
38 prefix = util.normpath(prefix)
39 else:
39 else:
40 if not isinstance(dest, str):
40 if not isinstance(dest, str):
41 raise ValueError('dest must be string if no prefix')
41 raise ValueError('dest must be string if no prefix')
42 prefix = os.path.basename(dest)
42 prefix = os.path.basename(dest)
43 lower = prefix.lower()
43 lower = prefix.lower()
44 for sfx in exts.get(kind, []):
44 for sfx in exts.get(kind, []):
45 if lower.endswith(sfx):
45 if lower.endswith(sfx):
46 prefix = prefix[:-len(sfx)]
46 prefix = prefix[:-len(sfx)]
47 break
47 break
48 lpfx = os.path.normpath(util.localpath(prefix))
48 lpfx = os.path.normpath(util.localpath(prefix))
49 prefix = util.pconvert(lpfx)
49 prefix = util.pconvert(lpfx)
50 if not prefix.endswith('/'):
50 if not prefix.endswith('/'):
51 prefix += '/'
51 prefix += '/'
52 # Drop the leading '.' path component if present, so Windows can read the
52 # Drop the leading '.' path component if present, so Windows can read the
53 # zip files (issue4634)
53 # zip files (issue4634)
54 if prefix.startswith('./'):
54 if prefix.startswith('./'):
55 prefix = prefix[2:]
55 prefix = prefix[2:]
56 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
56 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
57 raise error.Abort(_('archive prefix contains illegal components'))
57 raise error.Abort(_('archive prefix contains illegal components'))
58 return prefix
58 return prefix
59
59
60 exts = {
60 exts = {
61 'tar': ['.tar'],
61 'tar': ['.tar'],
62 'tbz2': ['.tbz2', '.tar.bz2'],
62 'tbz2': ['.tbz2', '.tar.bz2'],
63 'tgz': ['.tgz', '.tar.gz'],
63 'tgz': ['.tgz', '.tar.gz'],
64 'zip': ['.zip'],
64 'zip': ['.zip'],
65 }
65 }
66
66
67 def guesskind(dest):
67 def guesskind(dest):
68 for kind, extensions in exts.iteritems():
68 for kind, extensions in exts.iteritems():
69 if any(dest.endswith(ext) for ext in extensions):
69 if any(dest.endswith(ext) for ext in extensions):
70 return kind
70 return kind
71 return None
71 return None
72
72
73 def _rootctx(repo):
73 def _rootctx(repo):
74 # repo[0] may be hidden
74 # repo[0] may be hidden
75 for rev in repo:
75 for rev in repo:
76 return repo[rev]
76 return repo[rev]
77 return repo['null']
77 return repo['null']
78
78
79 # {tags} on ctx includes local tags and 'tip', with no current way to limit
80 # that to global tags. Therefore, use {latesttag} as a substitute when
81 # the distance is 0, since that will be the list of global tags on ctx.
82 _defaultmetatemplate = br'''
83 repo: {root}
84 node: {ifcontains(rev, revset("wdir()"), "{p1node}{dirty}", "{node}")}
85 branch: {branch|utf8}
86 {ifeq(latesttagdistance, 0, join(latesttag % "tag: {tag}", "\n"),
87 separate("\n",
88 join(latesttag % "latesttag: {tag}", "\n"),
89 "latesttagdistance: {latesttagdistance}",
90 "changessincelatesttag: {changessincelatesttag}"))}
91 '''[1:] # drop leading '\n'
92
79 def buildmetadata(ctx):
93 def buildmetadata(ctx):
80 '''build content of .hg_archival.txt'''
94 '''build content of .hg_archival.txt'''
81 repo = ctx.repo()
95 repo = ctx.repo()
82
96
83 default = (
84 r'repo: {root}\n'
85 r'node: {ifcontains(rev, revset("wdir()"),'
86 r'"{p1node}{dirty}", "{node}")}\n'
87 r'branch: {branch|utf8}\n'
88
89 # {tags} on ctx includes local tags and 'tip', with no current way to
90 # limit that to global tags. Therefore, use {latesttag} as a substitute
91 # when the distance is 0, since that will be the list of global tags on
92 # ctx.
93 r'{ifeq(latesttagdistance, 0, latesttag % "tag: {tag}\n",'
94 r'"{latesttag % "latesttag: {tag}\n"}'
95 r'latesttagdistance: {latesttagdistance}\n'
96 r'changessincelatesttag: {changessincelatesttag}\n")}'
97 )
98
99 opts = {
97 opts = {
100 'template': repo.ui.config('experimental', 'archivemetatemplate',
98 'template': repo.ui.config('experimental', 'archivemetatemplate',
101 default)
99 _defaultmetatemplate)
102 }
100 }
103
101
104 out = util.stringio()
102 out = util.stringio()
105
103
106 fm = formatter.formatter(repo.ui, out, 'archive', opts)
104 fm = formatter.formatter(repo.ui, out, 'archive', opts)
107 fm.startitem()
105 fm.startitem()
108 fm.context(ctx=ctx)
106 fm.context(ctx=ctx)
109 fm.data(root=_rootctx(repo).hex())
107 fm.data(root=_rootctx(repo).hex())
110
108
111 if ctx.rev() is None:
109 if ctx.rev() is None:
112 dirty = ''
110 dirty = ''
113 if ctx.dirty(missing=True):
111 if ctx.dirty(missing=True):
114 dirty = '+'
112 dirty = '+'
115 fm.data(dirty=dirty)
113 fm.data(dirty=dirty)
116 fm.end()
114 fm.end()
117
115
118 return out.getvalue()
116 return out.getvalue()
119
117
120 class tarit(object):
118 class tarit(object):
121 '''write archive to tar file or stream. can write uncompressed,
119 '''write archive to tar file or stream. can write uncompressed,
122 or compress with gzip or bzip2.'''
120 or compress with gzip or bzip2.'''
123
121
124 class GzipFileWithTime(gzip.GzipFile):
122 class GzipFileWithTime(gzip.GzipFile):
125
123
126 def __init__(self, *args, **kw):
124 def __init__(self, *args, **kw):
127 timestamp = None
125 timestamp = None
128 if 'timestamp' in kw:
126 if 'timestamp' in kw:
129 timestamp = kw.pop(r'timestamp')
127 timestamp = kw.pop(r'timestamp')
130 if timestamp is None:
128 if timestamp is None:
131 self.timestamp = time.time()
129 self.timestamp = time.time()
132 else:
130 else:
133 self.timestamp = timestamp
131 self.timestamp = timestamp
134 gzip.GzipFile.__init__(self, *args, **kw)
132 gzip.GzipFile.__init__(self, *args, **kw)
135
133
136 def _write_gzip_header(self):
134 def _write_gzip_header(self):
137 self.fileobj.write('\037\213') # magic header
135 self.fileobj.write('\037\213') # magic header
138 self.fileobj.write('\010') # compression method
136 self.fileobj.write('\010') # compression method
139 fname = self.name
137 fname = self.name
140 if fname and fname.endswith('.gz'):
138 if fname and fname.endswith('.gz'):
141 fname = fname[:-3]
139 fname = fname[:-3]
142 flags = 0
140 flags = 0
143 if fname:
141 if fname:
144 flags = gzip.FNAME
142 flags = gzip.FNAME
145 self.fileobj.write(chr(flags))
143 self.fileobj.write(chr(flags))
146 gzip.write32u(self.fileobj, long(self.timestamp))
144 gzip.write32u(self.fileobj, long(self.timestamp))
147 self.fileobj.write('\002')
145 self.fileobj.write('\002')
148 self.fileobj.write('\377')
146 self.fileobj.write('\377')
149 if fname:
147 if fname:
150 self.fileobj.write(fname + '\000')
148 self.fileobj.write(fname + '\000')
151
149
152 def __init__(self, dest, mtime, kind=''):
150 def __init__(self, dest, mtime, kind=''):
153 self.mtime = mtime
151 self.mtime = mtime
154 self.fileobj = None
152 self.fileobj = None
155
153
156 def taropen(mode, name='', fileobj=None):
154 def taropen(mode, name='', fileobj=None):
157 if kind == 'gz':
155 if kind == 'gz':
158 mode = mode[0]
156 mode = mode[0]
159 if not fileobj:
157 if not fileobj:
160 fileobj = open(name, mode + 'b')
158 fileobj = open(name, mode + 'b')
161 gzfileobj = self.GzipFileWithTime(name, mode + 'b',
159 gzfileobj = self.GzipFileWithTime(name, mode + 'b',
162 zlib.Z_BEST_COMPRESSION,
160 zlib.Z_BEST_COMPRESSION,
163 fileobj, timestamp=mtime)
161 fileobj, timestamp=mtime)
164 self.fileobj = gzfileobj
162 self.fileobj = gzfileobj
165 return tarfile.TarFile.taropen(name, mode, gzfileobj)
163 return tarfile.TarFile.taropen(name, mode, gzfileobj)
166 else:
164 else:
167 return tarfile.open(name, mode + kind, fileobj)
165 return tarfile.open(name, mode + kind, fileobj)
168
166
169 if isinstance(dest, str):
167 if isinstance(dest, str):
170 self.z = taropen('w:', name=dest)
168 self.z = taropen('w:', name=dest)
171 else:
169 else:
172 self.z = taropen('w|', fileobj=dest)
170 self.z = taropen('w|', fileobj=dest)
173
171
174 def addfile(self, name, mode, islink, data):
172 def addfile(self, name, mode, islink, data):
175 i = tarfile.TarInfo(name)
173 i = tarfile.TarInfo(name)
176 i.mtime = self.mtime
174 i.mtime = self.mtime
177 i.size = len(data)
175 i.size = len(data)
178 if islink:
176 if islink:
179 i.type = tarfile.SYMTYPE
177 i.type = tarfile.SYMTYPE
180 i.mode = 0o777
178 i.mode = 0o777
181 i.linkname = data
179 i.linkname = data
182 data = None
180 data = None
183 i.size = 0
181 i.size = 0
184 else:
182 else:
185 i.mode = mode
183 i.mode = mode
186 data = stringio(data)
184 data = stringio(data)
187 self.z.addfile(i, data)
185 self.z.addfile(i, data)
188
186
189 def done(self):
187 def done(self):
190 self.z.close()
188 self.z.close()
191 if self.fileobj:
189 if self.fileobj:
192 self.fileobj.close()
190 self.fileobj.close()
193
191
194 class tellable(object):
192 class tellable(object):
195 '''provide tell method for zipfile.ZipFile when writing to http
193 '''provide tell method for zipfile.ZipFile when writing to http
196 response file object.'''
194 response file object.'''
197
195
198 def __init__(self, fp):
196 def __init__(self, fp):
199 self.fp = fp
197 self.fp = fp
200 self.offset = 0
198 self.offset = 0
201
199
202 def __getattr__(self, key):
200 def __getattr__(self, key):
203 return getattr(self.fp, key)
201 return getattr(self.fp, key)
204
202
205 def write(self, s):
203 def write(self, s):
206 self.fp.write(s)
204 self.fp.write(s)
207 self.offset += len(s)
205 self.offset += len(s)
208
206
209 def tell(self):
207 def tell(self):
210 return self.offset
208 return self.offset
211
209
212 class zipit(object):
210 class zipit(object):
213 '''write archive to zip file or stream. can write uncompressed,
211 '''write archive to zip file or stream. can write uncompressed,
214 or compressed with deflate.'''
212 or compressed with deflate.'''
215
213
216 def __init__(self, dest, mtime, compress=True):
214 def __init__(self, dest, mtime, compress=True):
217 if not isinstance(dest, str):
215 if not isinstance(dest, str):
218 try:
216 try:
219 dest.tell()
217 dest.tell()
220 except (AttributeError, IOError):
218 except (AttributeError, IOError):
221 dest = tellable(dest)
219 dest = tellable(dest)
222 self.z = zipfile.ZipFile(dest, 'w',
220 self.z = zipfile.ZipFile(dest, 'w',
223 compress and zipfile.ZIP_DEFLATED or
221 compress and zipfile.ZIP_DEFLATED or
224 zipfile.ZIP_STORED)
222 zipfile.ZIP_STORED)
225
223
226 # Python's zipfile module emits deprecation warnings if we try
224 # Python's zipfile module emits deprecation warnings if we try
227 # to store files with a date before 1980.
225 # to store files with a date before 1980.
228 epoch = 315532800 # calendar.timegm((1980, 1, 1, 0, 0, 0, 1, 1, 0))
226 epoch = 315532800 # calendar.timegm((1980, 1, 1, 0, 0, 0, 1, 1, 0))
229 if mtime < epoch:
227 if mtime < epoch:
230 mtime = epoch
228 mtime = epoch
231
229
232 self.mtime = mtime
230 self.mtime = mtime
233 self.date_time = time.gmtime(mtime)[:6]
231 self.date_time = time.gmtime(mtime)[:6]
234
232
235 def addfile(self, name, mode, islink, data):
233 def addfile(self, name, mode, islink, data):
236 i = zipfile.ZipInfo(name, self.date_time)
234 i = zipfile.ZipInfo(name, self.date_time)
237 i.compress_type = self.z.compression
235 i.compress_type = self.z.compression
238 # unzip will not honor unix file modes unless file creator is
236 # unzip will not honor unix file modes unless file creator is
239 # set to unix (id 3).
237 # set to unix (id 3).
240 i.create_system = 3
238 i.create_system = 3
241 ftype = _UNX_IFREG
239 ftype = _UNX_IFREG
242 if islink:
240 if islink:
243 mode = 0o777
241 mode = 0o777
244 ftype = _UNX_IFLNK
242 ftype = _UNX_IFLNK
245 i.external_attr = (mode | ftype) << 16
243 i.external_attr = (mode | ftype) << 16
246 # add "extended-timestamp" extra block, because zip archives
244 # add "extended-timestamp" extra block, because zip archives
247 # without this will be extracted with unexpected timestamp,
245 # without this will be extracted with unexpected timestamp,
248 # if TZ is not configured as GMT
246 # if TZ is not configured as GMT
249 i.extra += struct.pack('<hhBl',
247 i.extra += struct.pack('<hhBl',
250 0x5455, # block type: "extended-timestamp"
248 0x5455, # block type: "extended-timestamp"
251 1 + 4, # size of this block
249 1 + 4, # size of this block
252 1, # "modification time is present"
250 1, # "modification time is present"
253 int(self.mtime)) # last modification (UTC)
251 int(self.mtime)) # last modification (UTC)
254 self.z.writestr(i, data)
252 self.z.writestr(i, data)
255
253
256 def done(self):
254 def done(self):
257 self.z.close()
255 self.z.close()
258
256
259 class fileit(object):
257 class fileit(object):
260 '''write archive as files in directory.'''
258 '''write archive as files in directory.'''
261
259
262 def __init__(self, name, mtime):
260 def __init__(self, name, mtime):
263 self.basedir = name
261 self.basedir = name
264 self.opener = vfsmod.vfs(self.basedir)
262 self.opener = vfsmod.vfs(self.basedir)
265 self.mtime = mtime
263 self.mtime = mtime
266
264
267 def addfile(self, name, mode, islink, data):
265 def addfile(self, name, mode, islink, data):
268 if islink:
266 if islink:
269 self.opener.symlink(data, name)
267 self.opener.symlink(data, name)
270 return
268 return
271 f = self.opener(name, "w", atomictemp=True)
269 f = self.opener(name, "w", atomictemp=True)
272 f.write(data)
270 f.write(data)
273 f.close()
271 f.close()
274 destfile = os.path.join(self.basedir, name)
272 destfile = os.path.join(self.basedir, name)
275 os.chmod(destfile, mode)
273 os.chmod(destfile, mode)
276 if self.mtime is not None:
274 if self.mtime is not None:
277 os.utime(destfile, (self.mtime, self.mtime))
275 os.utime(destfile, (self.mtime, self.mtime))
278
276
279 def done(self):
277 def done(self):
280 pass
278 pass
281
279
282 archivers = {
280 archivers = {
283 'files': fileit,
281 'files': fileit,
284 'tar': tarit,
282 'tar': tarit,
285 'tbz2': lambda name, mtime: tarit(name, mtime, 'bz2'),
283 'tbz2': lambda name, mtime: tarit(name, mtime, 'bz2'),
286 'tgz': lambda name, mtime: tarit(name, mtime, 'gz'),
284 'tgz': lambda name, mtime: tarit(name, mtime, 'gz'),
287 'uzip': lambda name, mtime: zipit(name, mtime, False),
285 'uzip': lambda name, mtime: zipit(name, mtime, False),
288 'zip': zipit,
286 'zip': zipit,
289 }
287 }
290
288
291 def archive(repo, dest, node, kind, decode=True, matchfn=None,
289 def archive(repo, dest, node, kind, decode=True, matchfn=None,
292 prefix='', mtime=None, subrepos=False):
290 prefix='', mtime=None, subrepos=False):
293 '''create archive of repo as it was at node.
291 '''create archive of repo as it was at node.
294
292
295 dest can be name of directory, name of archive file, or file
293 dest can be name of directory, name of archive file, or file
296 object to write archive to.
294 object to write archive to.
297
295
298 kind is type of archive to create.
296 kind is type of archive to create.
299
297
300 decode tells whether to put files through decode filters from
298 decode tells whether to put files through decode filters from
301 hgrc.
299 hgrc.
302
300
303 matchfn is function to filter names of files to write to archive.
301 matchfn is function to filter names of files to write to archive.
304
302
305 prefix is name of path to put before every archive member.
303 prefix is name of path to put before every archive member.
306
304
307 mtime is the modified time, in seconds, or None to use the changeset time.
305 mtime is the modified time, in seconds, or None to use the changeset time.
308
306
309 subrepos tells whether to include subrepos.
307 subrepos tells whether to include subrepos.
310 '''
308 '''
311
309
312 if kind == 'files':
310 if kind == 'files':
313 if prefix:
311 if prefix:
314 raise error.Abort(_('cannot give prefix when archiving to files'))
312 raise error.Abort(_('cannot give prefix when archiving to files'))
315 else:
313 else:
316 prefix = tidyprefix(dest, kind, prefix)
314 prefix = tidyprefix(dest, kind, prefix)
317
315
318 def write(name, mode, islink, getdata):
316 def write(name, mode, islink, getdata):
319 data = getdata()
317 data = getdata()
320 if decode:
318 if decode:
321 data = repo.wwritedata(name, data)
319 data = repo.wwritedata(name, data)
322 archiver.addfile(prefix + name, mode, islink, data)
320 archiver.addfile(prefix + name, mode, islink, data)
323
321
324 if kind not in archivers:
322 if kind not in archivers:
325 raise error.Abort(_("unknown archive type '%s'") % kind)
323 raise error.Abort(_("unknown archive type '%s'") % kind)
326
324
327 ctx = repo[node]
325 ctx = repo[node]
328 archiver = archivers[kind](dest, mtime or ctx.date()[0])
326 archiver = archivers[kind](dest, mtime or ctx.date()[0])
329
327
330 if repo.ui.configbool("ui", "archivemeta"):
328 if repo.ui.configbool("ui", "archivemeta"):
331 name = '.hg_archival.txt'
329 name = '.hg_archival.txt'
332 if not matchfn or matchfn(name):
330 if not matchfn or matchfn(name):
333 write(name, 0o644, False, lambda: buildmetadata(ctx))
331 write(name, 0o644, False, lambda: buildmetadata(ctx))
334
332
335 if matchfn:
333 if matchfn:
336 files = [f for f in ctx.manifest().keys() if matchfn(f)]
334 files = [f for f in ctx.manifest().keys() if matchfn(f)]
337 else:
335 else:
338 files = ctx.manifest().keys()
336 files = ctx.manifest().keys()
339 total = len(files)
337 total = len(files)
340 if total:
338 if total:
341 files.sort()
339 files.sort()
342 repo.ui.progress(_('archiving'), 0, unit=_('files'), total=total)
340 repo.ui.progress(_('archiving'), 0, unit=_('files'), total=total)
343 for i, f in enumerate(files):
341 for i, f in enumerate(files):
344 ff = ctx.flags(f)
342 ff = ctx.flags(f)
345 write(f, 'x' in ff and 0o755 or 0o644, 'l' in ff, ctx[f].data)
343 write(f, 'x' in ff and 0o755 or 0o644, 'l' in ff, ctx[f].data)
346 repo.ui.progress(_('archiving'), i + 1, item=f,
344 repo.ui.progress(_('archiving'), i + 1, item=f,
347 unit=_('files'), total=total)
345 unit=_('files'), total=total)
348 repo.ui.progress(_('archiving'), None)
346 repo.ui.progress(_('archiving'), None)
349
347
350 if subrepos:
348 if subrepos:
351 for subpath in sorted(ctx.substate):
349 for subpath in sorted(ctx.substate):
352 sub = ctx.workingsub(subpath)
350 sub = ctx.workingsub(subpath)
353 submatch = matchmod.subdirmatcher(subpath, matchfn)
351 submatch = matchmod.subdirmatcher(subpath, matchfn)
354 total += sub.archive(archiver, prefix, submatch, decode)
352 total += sub.archive(archiver, prefix, submatch, decode)
355
353
356 if total == 0:
354 if total == 0:
357 raise error.Abort(_('no files match the archive pattern'))
355 raise error.Abort(_('no files match the archive pattern'))
358
356
359 archiver.done()
357 archiver.done()
360 return total
358 return total
General Comments 0
You need to be logged in to leave comments. Login now