##// END OF EJS Templates
archive: create alwaysmatcher when no matcher provided...
Martin von Zweigbergk -
r40444:997997eb default
parent child Browse files
Show More
@@ -1,349 +1,349
1 1 # archival.py - revision archival for mercurial
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import gzip
11 11 import os
12 12 import struct
13 13 import tarfile
14 14 import time
15 15 import zipfile
16 16 import zlib
17 17
18 18 from .i18n import _
19 19 from .node import (
20 20 nullrev,
21 21 )
22 22
23 23 from . import (
24 24 error,
25 25 formatter,
26 26 match as matchmod,
27 27 pycompat,
28 28 scmutil,
29 29 util,
30 30 vfs as vfsmod,
31 31 )
32 32 stringio = util.stringio
33 33
34 34 # from unzip source code:
35 35 _UNX_IFREG = 0x8000
36 36 _UNX_IFLNK = 0xa000
37 37
38 38 def tidyprefix(dest, kind, prefix):
39 39 '''choose prefix to use for names in archive. make sure prefix is
40 40 safe for consumers.'''
41 41
42 42 if prefix:
43 43 prefix = util.normpath(prefix)
44 44 else:
45 45 if not isinstance(dest, bytes):
46 46 raise ValueError('dest must be string if no prefix')
47 47 prefix = os.path.basename(dest)
48 48 lower = prefix.lower()
49 49 for sfx in exts.get(kind, []):
50 50 if lower.endswith(sfx):
51 51 prefix = prefix[:-len(sfx)]
52 52 break
53 53 lpfx = os.path.normpath(util.localpath(prefix))
54 54 prefix = util.pconvert(lpfx)
55 55 if not prefix.endswith('/'):
56 56 prefix += '/'
57 57 # Drop the leading '.' path component if present, so Windows can read the
58 58 # zip files (issue4634)
59 59 if prefix.startswith('./'):
60 60 prefix = prefix[2:]
61 61 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
62 62 raise error.Abort(_('archive prefix contains illegal components'))
63 63 return prefix
64 64
65 65 exts = {
66 66 'tar': ['.tar'],
67 67 'tbz2': ['.tbz2', '.tar.bz2'],
68 68 'tgz': ['.tgz', '.tar.gz'],
69 69 'zip': ['.zip'],
70 70 }
71 71
72 72 def guesskind(dest):
73 73 for kind, extensions in exts.iteritems():
74 74 if any(dest.endswith(ext) for ext in extensions):
75 75 return kind
76 76 return None
77 77
78 78 def _rootctx(repo):
79 79 # repo[0] may be hidden
80 80 for rev in repo:
81 81 return repo[rev]
82 82 return repo[nullrev]
83 83
84 84 # {tags} on ctx includes local tags and 'tip', with no current way to limit
85 85 # that to global tags. Therefore, use {latesttag} as a substitute when
86 86 # the distance is 0, since that will be the list of global tags on ctx.
87 87 _defaultmetatemplate = br'''
88 88 repo: {root}
89 89 node: {ifcontains(rev, revset("wdir()"), "{p1node}{dirty}", "{node}")}
90 90 branch: {branch|utf8}
91 91 {ifeq(latesttagdistance, 0, join(latesttag % "tag: {tag}", "\n"),
92 92 separate("\n",
93 93 join(latesttag % "latesttag: {tag}", "\n"),
94 94 "latesttagdistance: {latesttagdistance}",
95 95 "changessincelatesttag: {changessincelatesttag}"))}
96 96 '''[1:] # drop leading '\n'
97 97
98 98 def buildmetadata(ctx):
99 99 '''build content of .hg_archival.txt'''
100 100 repo = ctx.repo()
101 101
102 102 opts = {
103 103 'template': repo.ui.config('experimental', 'archivemetatemplate',
104 104 _defaultmetatemplate)
105 105 }
106 106
107 107 out = util.stringio()
108 108
109 109 fm = formatter.formatter(repo.ui, out, 'archive', opts)
110 110 fm.startitem()
111 111 fm.context(ctx=ctx)
112 112 fm.data(root=_rootctx(repo).hex())
113 113
114 114 if ctx.rev() is None:
115 115 dirty = ''
116 116 if ctx.dirty(missing=True):
117 117 dirty = '+'
118 118 fm.data(dirty=dirty)
119 119 fm.end()
120 120
121 121 return out.getvalue()
122 122
123 123 class tarit(object):
124 124 '''write archive to tar file or stream. can write uncompressed,
125 125 or compress with gzip or bzip2.'''
126 126
127 127 class GzipFileWithTime(gzip.GzipFile):
128 128
129 129 def __init__(self, *args, **kw):
130 130 timestamp = None
131 131 if r'timestamp' in kw:
132 132 timestamp = kw.pop(r'timestamp')
133 133 if timestamp is None:
134 134 self.timestamp = time.time()
135 135 else:
136 136 self.timestamp = timestamp
137 137 gzip.GzipFile.__init__(self, *args, **kw)
138 138
139 139 def _write_gzip_header(self):
140 140 self.fileobj.write('\037\213') # magic header
141 141 self.fileobj.write('\010') # compression method
142 142 fname = self.name
143 143 if fname and fname.endswith('.gz'):
144 144 fname = fname[:-3]
145 145 flags = 0
146 146 if fname:
147 147 flags = gzip.FNAME
148 148 self.fileobj.write(pycompat.bytechr(flags))
149 149 gzip.write32u(self.fileobj, int(self.timestamp))
150 150 self.fileobj.write('\002')
151 151 self.fileobj.write('\377')
152 152 if fname:
153 153 self.fileobj.write(fname + '\000')
154 154
155 155 def __init__(self, dest, mtime, kind=''):
156 156 self.mtime = mtime
157 157 self.fileobj = None
158 158
159 159 def taropen(mode, name='', fileobj=None):
160 160 if kind == 'gz':
161 161 mode = mode[0:1]
162 162 if not fileobj:
163 163 fileobj = open(name, mode + 'b')
164 164 gzfileobj = self.GzipFileWithTime(name,
165 165 pycompat.sysstr(mode + 'b'),
166 166 zlib.Z_BEST_COMPRESSION,
167 167 fileobj, timestamp=mtime)
168 168 self.fileobj = gzfileobj
169 169 return tarfile.TarFile.taropen(
170 170 name, pycompat.sysstr(mode), gzfileobj)
171 171 else:
172 172 return tarfile.open(
173 173 name, pycompat.sysstr(mode + kind), fileobj)
174 174
175 175 if isinstance(dest, bytes):
176 176 self.z = taropen('w:', name=dest)
177 177 else:
178 178 self.z = taropen('w|', fileobj=dest)
179 179
180 180 def addfile(self, name, mode, islink, data):
181 181 name = pycompat.fsdecode(name)
182 182 i = tarfile.TarInfo(name)
183 183 i.mtime = self.mtime
184 184 i.size = len(data)
185 185 if islink:
186 186 i.type = tarfile.SYMTYPE
187 187 i.mode = 0o777
188 188 i.linkname = pycompat.fsdecode(data)
189 189 data = None
190 190 i.size = 0
191 191 else:
192 192 i.mode = mode
193 193 data = stringio(data)
194 194 self.z.addfile(i, data)
195 195
196 196 def done(self):
197 197 self.z.close()
198 198 if self.fileobj:
199 199 self.fileobj.close()
200 200
201 201 class zipit(object):
202 202 '''write archive to zip file or stream. can write uncompressed,
203 203 or compressed with deflate.'''
204 204
205 205 def __init__(self, dest, mtime, compress=True):
206 206 if isinstance(dest, bytes):
207 207 dest = pycompat.fsdecode(dest)
208 208 self.z = zipfile.ZipFile(dest, r'w',
209 209 compress and zipfile.ZIP_DEFLATED or
210 210 zipfile.ZIP_STORED)
211 211
212 212 # Python's zipfile module emits deprecation warnings if we try
213 213 # to store files with a date before 1980.
214 214 epoch = 315532800 # calendar.timegm((1980, 1, 1, 0, 0, 0, 1, 1, 0))
215 215 if mtime < epoch:
216 216 mtime = epoch
217 217
218 218 self.mtime = mtime
219 219 self.date_time = time.gmtime(mtime)[:6]
220 220
221 221 def addfile(self, name, mode, islink, data):
222 222 i = zipfile.ZipInfo(pycompat.fsdecode(name), self.date_time)
223 223 i.compress_type = self.z.compression
224 224 # unzip will not honor unix file modes unless file creator is
225 225 # set to unix (id 3).
226 226 i.create_system = 3
227 227 ftype = _UNX_IFREG
228 228 if islink:
229 229 mode = 0o777
230 230 ftype = _UNX_IFLNK
231 231 i.external_attr = (mode | ftype) << 16
232 232 # add "extended-timestamp" extra block, because zip archives
233 233 # without this will be extracted with unexpected timestamp,
234 234 # if TZ is not configured as GMT
235 235 i.extra += struct.pack('<hhBl',
236 236 0x5455, # block type: "extended-timestamp"
237 237 1 + 4, # size of this block
238 238 1, # "modification time is present"
239 239 int(self.mtime)) # last modification (UTC)
240 240 self.z.writestr(i, data)
241 241
242 242 def done(self):
243 243 self.z.close()
244 244
245 245 class fileit(object):
246 246 '''write archive as files in directory.'''
247 247
248 248 def __init__(self, name, mtime):
249 249 self.basedir = name
250 250 self.opener = vfsmod.vfs(self.basedir)
251 251 self.mtime = mtime
252 252
253 253 def addfile(self, name, mode, islink, data):
254 254 if islink:
255 255 self.opener.symlink(data, name)
256 256 return
257 257 f = self.opener(name, "w", atomictemp=False)
258 258 f.write(data)
259 259 f.close()
260 260 destfile = os.path.join(self.basedir, name)
261 261 os.chmod(destfile, mode)
262 262 if self.mtime is not None:
263 263 os.utime(destfile, (self.mtime, self.mtime))
264 264
265 265 def done(self):
266 266 pass
267 267
268 268 archivers = {
269 269 'files': fileit,
270 270 'tar': tarit,
271 271 'tbz2': lambda name, mtime: tarit(name, mtime, 'bz2'),
272 272 'tgz': lambda name, mtime: tarit(name, mtime, 'gz'),
273 273 'uzip': lambda name, mtime: zipit(name, mtime, False),
274 274 'zip': zipit,
275 275 }
276 276
277 277 def archive(repo, dest, node, kind, decode=True, match=None,
278 278 prefix='', mtime=None, subrepos=False):
279 279 '''create archive of repo as it was at node.
280 280
281 281 dest can be name of directory, name of archive file, or file
282 282 object to write archive to.
283 283
284 284 kind is type of archive to create.
285 285
286 286 decode tells whether to put files through decode filters from
287 287 hgrc.
288 288
289 289 match is a matcher to filter names of files to write to archive.
290 290
291 291 prefix is name of path to put before every archive member.
292 292
293 293 mtime is the modified time, in seconds, or None to use the changeset time.
294 294
295 295 subrepos tells whether to include subrepos.
296 296 '''
297 297
298 298 if kind == 'files':
299 299 if prefix:
300 300 raise error.Abort(_('cannot give prefix when archiving to files'))
301 301 else:
302 302 prefix = tidyprefix(dest, kind, prefix)
303 303
304 304 def write(name, mode, islink, getdata):
305 305 data = getdata()
306 306 if decode:
307 307 data = repo.wwritedata(name, data)
308 308 archiver.addfile(prefix + name, mode, islink, data)
309 309
310 310 if kind not in archivers:
311 311 raise error.Abort(_("unknown archive type '%s'") % kind)
312 312
313 313 ctx = repo[node]
314 314 archiver = archivers[kind](dest, mtime or ctx.date()[0])
315 315
316 if not match:
317 match = scmutil.matchall(repo)
318
316 319 if repo.ui.configbool("ui", "archivemeta"):
317 320 name = '.hg_archival.txt'
318 if not match or match(name):
321 if match(name):
319 322 write(name, 0o644, False, lambda: buildmetadata(ctx))
320 323
321 if match:
322 files = [f for f in ctx.manifest().keys() if match(f)]
323 else:
324 files = ctx.manifest().keys()
324 files = [f for f in ctx.manifest().keys() if match(f)]
325 325 total = len(files)
326 326 if total:
327 327 files.sort()
328 328 scmutil.prefetchfiles(repo, [ctx.rev()],
329 329 scmutil.matchfiles(repo, files))
330 330 progress = scmutil.progress(repo.ui, _('archiving'), unit=_('files'),
331 331 total=total)
332 332 progress.update(0)
333 333 for f in files:
334 334 ff = ctx.flags(f)
335 335 write(f, 'x' in ff and 0o755 or 0o644, 'l' in ff, ctx[f].data)
336 336 progress.increment(item=f)
337 337 progress.complete()
338 338
339 339 if subrepos:
340 340 for subpath in sorted(ctx.substate):
341 341 sub = ctx.workingsub(subpath)
342 342 submatch = matchmod.subdirmatcher(subpath, match)
343 343 total += sub.archive(archiver, prefix, submatch, decode)
344 344
345 345 if total == 0:
346 346 raise error.Abort(_('no files match the archive pattern'))
347 347
348 348 archiver.done()
349 349 return total
General Comments 0
You need to be logged in to leave comments. Login now