##// END OF EJS Templates
archival: fix a missing r'' on a kwargs check...
Augie Fackler -
r36744:009da8c2 default
parent child Browse files
Show More
@@ -1,364 +1,364 b''
1 # archival.py - revision archival for mercurial
1 # archival.py - revision archival for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import gzip
10 import gzip
11 import os
11 import os
12 import struct
12 import struct
13 import tarfile
13 import tarfile
14 import time
14 import time
15 import zipfile
15 import zipfile
16 import zlib
16 import zlib
17
17
18 from .i18n import _
18 from .i18n import _
19
19
20 from . import (
20 from . import (
21 error,
21 error,
22 formatter,
22 formatter,
23 match as matchmod,
23 match as matchmod,
24 pycompat,
24 pycompat,
25 scmutil,
25 scmutil,
26 util,
26 util,
27 vfs as vfsmod,
27 vfs as vfsmod,
28 )
28 )
29 stringio = util.stringio
29 stringio = util.stringio
30
30
31 # from unzip source code:
31 # from unzip source code:
32 _UNX_IFREG = 0x8000
32 _UNX_IFREG = 0x8000
33 _UNX_IFLNK = 0xa000
33 _UNX_IFLNK = 0xa000
34
34
35 def tidyprefix(dest, kind, prefix):
35 def tidyprefix(dest, kind, prefix):
36 '''choose prefix to use for names in archive. make sure prefix is
36 '''choose prefix to use for names in archive. make sure prefix is
37 safe for consumers.'''
37 safe for consumers.'''
38
38
39 if prefix:
39 if prefix:
40 prefix = util.normpath(prefix)
40 prefix = util.normpath(prefix)
41 else:
41 else:
42 if not isinstance(dest, bytes):
42 if not isinstance(dest, bytes):
43 raise ValueError('dest must be string if no prefix')
43 raise ValueError('dest must be string if no prefix')
44 prefix = os.path.basename(dest)
44 prefix = os.path.basename(dest)
45 lower = prefix.lower()
45 lower = prefix.lower()
46 for sfx in exts.get(kind, []):
46 for sfx in exts.get(kind, []):
47 if lower.endswith(sfx):
47 if lower.endswith(sfx):
48 prefix = prefix[:-len(sfx)]
48 prefix = prefix[:-len(sfx)]
49 break
49 break
50 lpfx = os.path.normpath(util.localpath(prefix))
50 lpfx = os.path.normpath(util.localpath(prefix))
51 prefix = util.pconvert(lpfx)
51 prefix = util.pconvert(lpfx)
52 if not prefix.endswith('/'):
52 if not prefix.endswith('/'):
53 prefix += '/'
53 prefix += '/'
54 # Drop the leading '.' path component if present, so Windows can read the
54 # Drop the leading '.' path component if present, so Windows can read the
55 # zip files (issue4634)
55 # zip files (issue4634)
56 if prefix.startswith('./'):
56 if prefix.startswith('./'):
57 prefix = prefix[2:]
57 prefix = prefix[2:]
58 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
58 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
59 raise error.Abort(_('archive prefix contains illegal components'))
59 raise error.Abort(_('archive prefix contains illegal components'))
60 return prefix
60 return prefix
61
61
62 exts = {
62 exts = {
63 'tar': ['.tar'],
63 'tar': ['.tar'],
64 'tbz2': ['.tbz2', '.tar.bz2'],
64 'tbz2': ['.tbz2', '.tar.bz2'],
65 'tgz': ['.tgz', '.tar.gz'],
65 'tgz': ['.tgz', '.tar.gz'],
66 'zip': ['.zip'],
66 'zip': ['.zip'],
67 }
67 }
68
68
69 def guesskind(dest):
69 def guesskind(dest):
70 for kind, extensions in exts.iteritems():
70 for kind, extensions in exts.iteritems():
71 if any(dest.endswith(ext) for ext in extensions):
71 if any(dest.endswith(ext) for ext in extensions):
72 return kind
72 return kind
73 return None
73 return None
74
74
75 def _rootctx(repo):
75 def _rootctx(repo):
76 # repo[0] may be hidden
76 # repo[0] may be hidden
77 for rev in repo:
77 for rev in repo:
78 return repo[rev]
78 return repo[rev]
79 return repo['null']
79 return repo['null']
80
80
81 # {tags} on ctx includes local tags and 'tip', with no current way to limit
81 # {tags} on ctx includes local tags and 'tip', with no current way to limit
82 # that to global tags. Therefore, use {latesttag} as a substitute when
82 # that to global tags. Therefore, use {latesttag} as a substitute when
83 # the distance is 0, since that will be the list of global tags on ctx.
83 # the distance is 0, since that will be the list of global tags on ctx.
84 _defaultmetatemplate = br'''
84 _defaultmetatemplate = br'''
85 repo: {root}
85 repo: {root}
86 node: {ifcontains(rev, revset("wdir()"), "{p1node}{dirty}", "{node}")}
86 node: {ifcontains(rev, revset("wdir()"), "{p1node}{dirty}", "{node}")}
87 branch: {branch|utf8}
87 branch: {branch|utf8}
88 {ifeq(latesttagdistance, 0, join(latesttag % "tag: {tag}", "\n"),
88 {ifeq(latesttagdistance, 0, join(latesttag % "tag: {tag}", "\n"),
89 separate("\n",
89 separate("\n",
90 join(latesttag % "latesttag: {tag}", "\n"),
90 join(latesttag % "latesttag: {tag}", "\n"),
91 "latesttagdistance: {latesttagdistance}",
91 "latesttagdistance: {latesttagdistance}",
92 "changessincelatesttag: {changessincelatesttag}"))}
92 "changessincelatesttag: {changessincelatesttag}"))}
93 '''[1:] # drop leading '\n'
93 '''[1:] # drop leading '\n'
94
94
95 def buildmetadata(ctx):
95 def buildmetadata(ctx):
96 '''build content of .hg_archival.txt'''
96 '''build content of .hg_archival.txt'''
97 repo = ctx.repo()
97 repo = ctx.repo()
98
98
99 opts = {
99 opts = {
100 'template': repo.ui.config('experimental', 'archivemetatemplate',
100 'template': repo.ui.config('experimental', 'archivemetatemplate',
101 _defaultmetatemplate)
101 _defaultmetatemplate)
102 }
102 }
103
103
104 out = util.stringio()
104 out = util.stringio()
105
105
106 fm = formatter.formatter(repo.ui, out, 'archive', opts)
106 fm = formatter.formatter(repo.ui, out, 'archive', opts)
107 fm.startitem()
107 fm.startitem()
108 fm.context(ctx=ctx)
108 fm.context(ctx=ctx)
109 fm.data(root=_rootctx(repo).hex())
109 fm.data(root=_rootctx(repo).hex())
110
110
111 if ctx.rev() is None:
111 if ctx.rev() is None:
112 dirty = ''
112 dirty = ''
113 if ctx.dirty(missing=True):
113 if ctx.dirty(missing=True):
114 dirty = '+'
114 dirty = '+'
115 fm.data(dirty=dirty)
115 fm.data(dirty=dirty)
116 fm.end()
116 fm.end()
117
117
118 return out.getvalue()
118 return out.getvalue()
119
119
120 class tarit(object):
120 class tarit(object):
121 '''write archive to tar file or stream. can write uncompressed,
121 '''write archive to tar file or stream. can write uncompressed,
122 or compress with gzip or bzip2.'''
122 or compress with gzip or bzip2.'''
123
123
124 class GzipFileWithTime(gzip.GzipFile):
124 class GzipFileWithTime(gzip.GzipFile):
125
125
126 def __init__(self, *args, **kw):
126 def __init__(self, *args, **kw):
127 timestamp = None
127 timestamp = None
128 if 'timestamp' in kw:
128 if r'timestamp' in kw:
129 timestamp = kw.pop(r'timestamp')
129 timestamp = kw.pop(r'timestamp')
130 if timestamp is None:
130 if timestamp is None:
131 self.timestamp = time.time()
131 self.timestamp = time.time()
132 else:
132 else:
133 self.timestamp = timestamp
133 self.timestamp = timestamp
134 gzip.GzipFile.__init__(self, *args, **kw)
134 gzip.GzipFile.__init__(self, *args, **kw)
135
135
136 def _write_gzip_header(self):
136 def _write_gzip_header(self):
137 self.fileobj.write('\037\213') # magic header
137 self.fileobj.write('\037\213') # magic header
138 self.fileobj.write('\010') # compression method
138 self.fileobj.write('\010') # compression method
139 fname = self.name
139 fname = self.name
140 if fname and fname.endswith('.gz'):
140 if fname and fname.endswith('.gz'):
141 fname = fname[:-3]
141 fname = fname[:-3]
142 flags = 0
142 flags = 0
143 if fname:
143 if fname:
144 flags = gzip.FNAME
144 flags = gzip.FNAME
145 self.fileobj.write(chr(flags))
145 self.fileobj.write(chr(flags))
146 gzip.write32u(self.fileobj, long(self.timestamp))
146 gzip.write32u(self.fileobj, long(self.timestamp))
147 self.fileobj.write('\002')
147 self.fileobj.write('\002')
148 self.fileobj.write('\377')
148 self.fileobj.write('\377')
149 if fname:
149 if fname:
150 self.fileobj.write(fname + '\000')
150 self.fileobj.write(fname + '\000')
151
151
152 def __init__(self, dest, mtime, kind=''):
152 def __init__(self, dest, mtime, kind=''):
153 self.mtime = mtime
153 self.mtime = mtime
154 self.fileobj = None
154 self.fileobj = None
155
155
156 def taropen(mode, name='', fileobj=None):
156 def taropen(mode, name='', fileobj=None):
157 if kind == 'gz':
157 if kind == 'gz':
158 mode = mode[0:1]
158 mode = mode[0:1]
159 if not fileobj:
159 if not fileobj:
160 fileobj = open(name, mode + 'b')
160 fileobj = open(name, mode + 'b')
161 gzfileobj = self.GzipFileWithTime(name, mode + 'b',
161 gzfileobj = self.GzipFileWithTime(name, mode + 'b',
162 zlib.Z_BEST_COMPRESSION,
162 zlib.Z_BEST_COMPRESSION,
163 fileobj, timestamp=mtime)
163 fileobj, timestamp=mtime)
164 self.fileobj = gzfileobj
164 self.fileobj = gzfileobj
165 return tarfile.TarFile.taropen(
165 return tarfile.TarFile.taropen(
166 name, pycompat.sysstr(mode), gzfileobj)
166 name, pycompat.sysstr(mode), gzfileobj)
167 else:
167 else:
168 return tarfile.open(
168 return tarfile.open(
169 name, pycompat.sysstr(mode + kind), fileobj)
169 name, pycompat.sysstr(mode + kind), fileobj)
170
170
171 if isinstance(dest, bytes):
171 if isinstance(dest, bytes):
172 self.z = taropen('w:', name=dest)
172 self.z = taropen('w:', name=dest)
173 else:
173 else:
174 self.z = taropen('w|', fileobj=dest)
174 self.z = taropen('w|', fileobj=dest)
175
175
176 def addfile(self, name, mode, islink, data):
176 def addfile(self, name, mode, islink, data):
177 name = pycompat.fsdecode(name)
177 name = pycompat.fsdecode(name)
178 i = tarfile.TarInfo(name)
178 i = tarfile.TarInfo(name)
179 i.mtime = self.mtime
179 i.mtime = self.mtime
180 i.size = len(data)
180 i.size = len(data)
181 if islink:
181 if islink:
182 i.type = tarfile.SYMTYPE
182 i.type = tarfile.SYMTYPE
183 i.mode = 0o777
183 i.mode = 0o777
184 i.linkname = pycompat.fsdecode(data)
184 i.linkname = pycompat.fsdecode(data)
185 data = None
185 data = None
186 i.size = 0
186 i.size = 0
187 else:
187 else:
188 i.mode = mode
188 i.mode = mode
189 data = stringio(data)
189 data = stringio(data)
190 self.z.addfile(i, data)
190 self.z.addfile(i, data)
191
191
192 def done(self):
192 def done(self):
193 self.z.close()
193 self.z.close()
194 if self.fileobj:
194 if self.fileobj:
195 self.fileobj.close()
195 self.fileobj.close()
196
196
197 class tellable(object):
197 class tellable(object):
198 '''provide tell method for zipfile.ZipFile when writing to http
198 '''provide tell method for zipfile.ZipFile when writing to http
199 response file object.'''
199 response file object.'''
200
200
201 def __init__(self, fp):
201 def __init__(self, fp):
202 self.fp = fp
202 self.fp = fp
203 self.offset = 0
203 self.offset = 0
204
204
205 def __getattr__(self, key):
205 def __getattr__(self, key):
206 return getattr(self.fp, key)
206 return getattr(self.fp, key)
207
207
208 def write(self, s):
208 def write(self, s):
209 self.fp.write(s)
209 self.fp.write(s)
210 self.offset += len(s)
210 self.offset += len(s)
211
211
212 def tell(self):
212 def tell(self):
213 return self.offset
213 return self.offset
214
214
215 class zipit(object):
215 class zipit(object):
216 '''write archive to zip file or stream. can write uncompressed,
216 '''write archive to zip file or stream. can write uncompressed,
217 or compressed with deflate.'''
217 or compressed with deflate.'''
218
218
219 def __init__(self, dest, mtime, compress=True):
219 def __init__(self, dest, mtime, compress=True):
220 if not isinstance(dest, bytes):
220 if not isinstance(dest, bytes):
221 try:
221 try:
222 dest.tell()
222 dest.tell()
223 except (AttributeError, IOError):
223 except (AttributeError, IOError):
224 dest = tellable(dest)
224 dest = tellable(dest)
225 self.z = zipfile.ZipFile(pycompat.fsdecode(dest), r'w',
225 self.z = zipfile.ZipFile(pycompat.fsdecode(dest), r'w',
226 compress and zipfile.ZIP_DEFLATED or
226 compress and zipfile.ZIP_DEFLATED or
227 zipfile.ZIP_STORED)
227 zipfile.ZIP_STORED)
228
228
229 # Python's zipfile module emits deprecation warnings if we try
229 # Python's zipfile module emits deprecation warnings if we try
230 # to store files with a date before 1980.
230 # to store files with a date before 1980.
231 epoch = 315532800 # calendar.timegm((1980, 1, 1, 0, 0, 0, 1, 1, 0))
231 epoch = 315532800 # calendar.timegm((1980, 1, 1, 0, 0, 0, 1, 1, 0))
232 if mtime < epoch:
232 if mtime < epoch:
233 mtime = epoch
233 mtime = epoch
234
234
235 self.mtime = mtime
235 self.mtime = mtime
236 self.date_time = time.gmtime(mtime)[:6]
236 self.date_time = time.gmtime(mtime)[:6]
237
237
238 def addfile(self, name, mode, islink, data):
238 def addfile(self, name, mode, islink, data):
239 i = zipfile.ZipInfo(pycompat.fsdecode(name), self.date_time)
239 i = zipfile.ZipInfo(pycompat.fsdecode(name), self.date_time)
240 i.compress_type = self.z.compression
240 i.compress_type = self.z.compression
241 # unzip will not honor unix file modes unless file creator is
241 # unzip will not honor unix file modes unless file creator is
242 # set to unix (id 3).
242 # set to unix (id 3).
243 i.create_system = 3
243 i.create_system = 3
244 ftype = _UNX_IFREG
244 ftype = _UNX_IFREG
245 if islink:
245 if islink:
246 mode = 0o777
246 mode = 0o777
247 ftype = _UNX_IFLNK
247 ftype = _UNX_IFLNK
248 i.external_attr = (mode | ftype) << 16
248 i.external_attr = (mode | ftype) << 16
249 # add "extended-timestamp" extra block, because zip archives
249 # add "extended-timestamp" extra block, because zip archives
250 # without this will be extracted with unexpected timestamp,
250 # without this will be extracted with unexpected timestamp,
251 # if TZ is not configured as GMT
251 # if TZ is not configured as GMT
252 i.extra += struct.pack('<hhBl',
252 i.extra += struct.pack('<hhBl',
253 0x5455, # block type: "extended-timestamp"
253 0x5455, # block type: "extended-timestamp"
254 1 + 4, # size of this block
254 1 + 4, # size of this block
255 1, # "modification time is present"
255 1, # "modification time is present"
256 int(self.mtime)) # last modification (UTC)
256 int(self.mtime)) # last modification (UTC)
257 self.z.writestr(i, data)
257 self.z.writestr(i, data)
258
258
259 def done(self):
259 def done(self):
260 self.z.close()
260 self.z.close()
261
261
262 class fileit(object):
262 class fileit(object):
263 '''write archive as files in directory.'''
263 '''write archive as files in directory.'''
264
264
265 def __init__(self, name, mtime):
265 def __init__(self, name, mtime):
266 self.basedir = name
266 self.basedir = name
267 self.opener = vfsmod.vfs(self.basedir)
267 self.opener = vfsmod.vfs(self.basedir)
268 self.mtime = mtime
268 self.mtime = mtime
269
269
270 def addfile(self, name, mode, islink, data):
270 def addfile(self, name, mode, islink, data):
271 if islink:
271 if islink:
272 self.opener.symlink(data, name)
272 self.opener.symlink(data, name)
273 return
273 return
274 f = self.opener(name, "w", atomictemp=True)
274 f = self.opener(name, "w", atomictemp=True)
275 f.write(data)
275 f.write(data)
276 f.close()
276 f.close()
277 destfile = os.path.join(self.basedir, name)
277 destfile = os.path.join(self.basedir, name)
278 os.chmod(destfile, mode)
278 os.chmod(destfile, mode)
279 if self.mtime is not None:
279 if self.mtime is not None:
280 os.utime(destfile, (self.mtime, self.mtime))
280 os.utime(destfile, (self.mtime, self.mtime))
281
281
282 def done(self):
282 def done(self):
283 pass
283 pass
284
284
285 archivers = {
285 archivers = {
286 'files': fileit,
286 'files': fileit,
287 'tar': tarit,
287 'tar': tarit,
288 'tbz2': lambda name, mtime: tarit(name, mtime, 'bz2'),
288 'tbz2': lambda name, mtime: tarit(name, mtime, 'bz2'),
289 'tgz': lambda name, mtime: tarit(name, mtime, 'gz'),
289 'tgz': lambda name, mtime: tarit(name, mtime, 'gz'),
290 'uzip': lambda name, mtime: zipit(name, mtime, False),
290 'uzip': lambda name, mtime: zipit(name, mtime, False),
291 'zip': zipit,
291 'zip': zipit,
292 }
292 }
293
293
294 def archive(repo, dest, node, kind, decode=True, matchfn=None,
294 def archive(repo, dest, node, kind, decode=True, matchfn=None,
295 prefix='', mtime=None, subrepos=False):
295 prefix='', mtime=None, subrepos=False):
296 '''create archive of repo as it was at node.
296 '''create archive of repo as it was at node.
297
297
298 dest can be name of directory, name of archive file, or file
298 dest can be name of directory, name of archive file, or file
299 object to write archive to.
299 object to write archive to.
300
300
301 kind is type of archive to create.
301 kind is type of archive to create.
302
302
303 decode tells whether to put files through decode filters from
303 decode tells whether to put files through decode filters from
304 hgrc.
304 hgrc.
305
305
306 matchfn is function to filter names of files to write to archive.
306 matchfn is function to filter names of files to write to archive.
307
307
308 prefix is name of path to put before every archive member.
308 prefix is name of path to put before every archive member.
309
309
310 mtime is the modified time, in seconds, or None to use the changeset time.
310 mtime is the modified time, in seconds, or None to use the changeset time.
311
311
312 subrepos tells whether to include subrepos.
312 subrepos tells whether to include subrepos.
313 '''
313 '''
314
314
315 if kind == 'files':
315 if kind == 'files':
316 if prefix:
316 if prefix:
317 raise error.Abort(_('cannot give prefix when archiving to files'))
317 raise error.Abort(_('cannot give prefix when archiving to files'))
318 else:
318 else:
319 prefix = tidyprefix(dest, kind, prefix)
319 prefix = tidyprefix(dest, kind, prefix)
320
320
321 def write(name, mode, islink, getdata):
321 def write(name, mode, islink, getdata):
322 data = getdata()
322 data = getdata()
323 if decode:
323 if decode:
324 data = repo.wwritedata(name, data)
324 data = repo.wwritedata(name, data)
325 archiver.addfile(prefix + name, mode, islink, data)
325 archiver.addfile(prefix + name, mode, islink, data)
326
326
327 if kind not in archivers:
327 if kind not in archivers:
328 raise error.Abort(_("unknown archive type '%s'") % kind)
328 raise error.Abort(_("unknown archive type '%s'") % kind)
329
329
330 ctx = repo[node]
330 ctx = repo[node]
331 archiver = archivers[kind](dest, mtime or ctx.date()[0])
331 archiver = archivers[kind](dest, mtime or ctx.date()[0])
332
332
333 if repo.ui.configbool("ui", "archivemeta"):
333 if repo.ui.configbool("ui", "archivemeta"):
334 name = '.hg_archival.txt'
334 name = '.hg_archival.txt'
335 if not matchfn or matchfn(name):
335 if not matchfn or matchfn(name):
336 write(name, 0o644, False, lambda: buildmetadata(ctx))
336 write(name, 0o644, False, lambda: buildmetadata(ctx))
337
337
338 if matchfn:
338 if matchfn:
339 files = [f for f in ctx.manifest().keys() if matchfn(f)]
339 files = [f for f in ctx.manifest().keys() if matchfn(f)]
340 else:
340 else:
341 files = ctx.manifest().keys()
341 files = ctx.manifest().keys()
342 total = len(files)
342 total = len(files)
343 if total:
343 if total:
344 files.sort()
344 files.sort()
345 scmutil.fileprefetchhooks(repo, ctx, files)
345 scmutil.fileprefetchhooks(repo, ctx, files)
346 repo.ui.progress(_('archiving'), 0, unit=_('files'), total=total)
346 repo.ui.progress(_('archiving'), 0, unit=_('files'), total=total)
347 for i, f in enumerate(files):
347 for i, f in enumerate(files):
348 ff = ctx.flags(f)
348 ff = ctx.flags(f)
349 write(f, 'x' in ff and 0o755 or 0o644, 'l' in ff, ctx[f].data)
349 write(f, 'x' in ff and 0o755 or 0o644, 'l' in ff, ctx[f].data)
350 repo.ui.progress(_('archiving'), i + 1, item=f,
350 repo.ui.progress(_('archiving'), i + 1, item=f,
351 unit=_('files'), total=total)
351 unit=_('files'), total=total)
352 repo.ui.progress(_('archiving'), None)
352 repo.ui.progress(_('archiving'), None)
353
353
354 if subrepos:
354 if subrepos:
355 for subpath in sorted(ctx.substate):
355 for subpath in sorted(ctx.substate):
356 sub = ctx.workingsub(subpath)
356 sub = ctx.workingsub(subpath)
357 submatch = matchmod.subdirmatcher(subpath, matchfn)
357 submatch = matchmod.subdirmatcher(subpath, matchfn)
358 total += sub.archive(archiver, prefix, submatch, decode)
358 total += sub.archive(archiver, prefix, submatch, decode)
359
359
360 if total == 0:
360 if total == 0:
361 raise error.Abort(_('no files match the archive pattern'))
361 raise error.Abort(_('no files match the archive pattern'))
362
362
363 archiver.done()
363 archiver.done()
364 return total
364 return total
General Comments 0
You need to be logged in to leave comments. Login now