##// END OF EJS Templates
archival: force a `CompressionError` to bytes before passing to `error.Abort`...
Matt Harbison -
r48825:406a7e62 default
parent child Browse files
Show More
@@ -1,397 +1,399
1 # archival.py - revision archival for mercurial
1 # archival.py - revision archival for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import gzip
10 import gzip
11 import os
11 import os
12 import struct
12 import struct
13 import tarfile
13 import tarfile
14 import time
14 import time
15 import zipfile
15 import zipfile
16 import zlib
16 import zlib
17
17
18 from .i18n import _
18 from .i18n import _
19 from .node import nullrev
19 from .node import nullrev
20 from .pycompat import open
20 from .pycompat import open
21
21
22 from . import (
22 from . import (
23 error,
23 error,
24 formatter,
24 formatter,
25 match as matchmod,
25 match as matchmod,
26 pycompat,
26 pycompat,
27 scmutil,
27 scmutil,
28 util,
28 util,
29 vfs as vfsmod,
29 vfs as vfsmod,
30 )
30 )
31
31
32 from .utils import stringutil
33
32 stringio = util.stringio
34 stringio = util.stringio
33
35
34 # from unzip source code:
36 # from unzip source code:
35 _UNX_IFREG = 0x8000
37 _UNX_IFREG = 0x8000
36 _UNX_IFLNK = 0xA000
38 _UNX_IFLNK = 0xA000
37
39
38
40
39 def tidyprefix(dest, kind, prefix):
41 def tidyprefix(dest, kind, prefix):
40 """choose prefix to use for names in archive. make sure prefix is
42 """choose prefix to use for names in archive. make sure prefix is
41 safe for consumers."""
43 safe for consumers."""
42
44
43 if prefix:
45 if prefix:
44 prefix = util.normpath(prefix)
46 prefix = util.normpath(prefix)
45 else:
47 else:
46 if not isinstance(dest, bytes):
48 if not isinstance(dest, bytes):
47 raise ValueError(b'dest must be string if no prefix')
49 raise ValueError(b'dest must be string if no prefix')
48 prefix = os.path.basename(dest)
50 prefix = os.path.basename(dest)
49 lower = prefix.lower()
51 lower = prefix.lower()
50 for sfx in exts.get(kind, []):
52 for sfx in exts.get(kind, []):
51 if lower.endswith(sfx):
53 if lower.endswith(sfx):
52 prefix = prefix[: -len(sfx)]
54 prefix = prefix[: -len(sfx)]
53 break
55 break
54 lpfx = os.path.normpath(util.localpath(prefix))
56 lpfx = os.path.normpath(util.localpath(prefix))
55 prefix = util.pconvert(lpfx)
57 prefix = util.pconvert(lpfx)
56 if not prefix.endswith(b'/'):
58 if not prefix.endswith(b'/'):
57 prefix += b'/'
59 prefix += b'/'
58 # Drop the leading '.' path component if present, so Windows can read the
60 # Drop the leading '.' path component if present, so Windows can read the
59 # zip files (issue4634)
61 # zip files (issue4634)
60 if prefix.startswith(b'./'):
62 if prefix.startswith(b'./'):
61 prefix = prefix[2:]
63 prefix = prefix[2:]
62 if prefix.startswith(b'../') or os.path.isabs(lpfx) or b'/../' in prefix:
64 if prefix.startswith(b'../') or os.path.isabs(lpfx) or b'/../' in prefix:
63 raise error.Abort(_(b'archive prefix contains illegal components'))
65 raise error.Abort(_(b'archive prefix contains illegal components'))
64 return prefix
66 return prefix
65
67
66
68
67 exts = {
69 exts = {
68 b'tar': [b'.tar'],
70 b'tar': [b'.tar'],
69 b'tbz2': [b'.tbz2', b'.tar.bz2'],
71 b'tbz2': [b'.tbz2', b'.tar.bz2'],
70 b'tgz': [b'.tgz', b'.tar.gz'],
72 b'tgz': [b'.tgz', b'.tar.gz'],
71 b'zip': [b'.zip'],
73 b'zip': [b'.zip'],
72 b'txz': [b'.txz', b'.tar.xz'],
74 b'txz': [b'.txz', b'.tar.xz'],
73 }
75 }
74
76
75
77
76 def guesskind(dest):
78 def guesskind(dest):
77 for kind, extensions in pycompat.iteritems(exts):
79 for kind, extensions in pycompat.iteritems(exts):
78 if any(dest.endswith(ext) for ext in extensions):
80 if any(dest.endswith(ext) for ext in extensions):
79 return kind
81 return kind
80 return None
82 return None
81
83
82
84
83 def _rootctx(repo):
85 def _rootctx(repo):
84 # repo[0] may be hidden
86 # repo[0] may be hidden
85 for rev in repo:
87 for rev in repo:
86 return repo[rev]
88 return repo[rev]
87 return repo[nullrev]
89 return repo[nullrev]
88
90
89
91
90 # {tags} on ctx includes local tags and 'tip', with no current way to limit
92 # {tags} on ctx includes local tags and 'tip', with no current way to limit
91 # that to global tags. Therefore, use {latesttag} as a substitute when
93 # that to global tags. Therefore, use {latesttag} as a substitute when
92 # the distance is 0, since that will be the list of global tags on ctx.
94 # the distance is 0, since that will be the list of global tags on ctx.
93 _defaultmetatemplate = br'''
95 _defaultmetatemplate = br'''
94 repo: {root}
96 repo: {root}
95 node: {ifcontains(rev, revset("wdir()"), "{p1node}{dirty}", "{node}")}
97 node: {ifcontains(rev, revset("wdir()"), "{p1node}{dirty}", "{node}")}
96 branch: {branch|utf8}
98 branch: {branch|utf8}
97 {ifeq(latesttagdistance, 0, join(latesttag % "tag: {tag}", "\n"),
99 {ifeq(latesttagdistance, 0, join(latesttag % "tag: {tag}", "\n"),
98 separate("\n",
100 separate("\n",
99 join(latesttag % "latesttag: {tag}", "\n"),
101 join(latesttag % "latesttag: {tag}", "\n"),
100 "latesttagdistance: {latesttagdistance}",
102 "latesttagdistance: {latesttagdistance}",
101 "changessincelatesttag: {changessincelatesttag}"))}
103 "changessincelatesttag: {changessincelatesttag}"))}
102 '''[
104 '''[
103 1:
105 1:
104 ] # drop leading '\n'
106 ] # drop leading '\n'
105
107
106
108
107 def buildmetadata(ctx):
109 def buildmetadata(ctx):
108 '''build content of .hg_archival.txt'''
110 '''build content of .hg_archival.txt'''
109 repo = ctx.repo()
111 repo = ctx.repo()
110
112
111 opts = {
113 opts = {
112 b'template': repo.ui.config(
114 b'template': repo.ui.config(
113 b'experimental', b'archivemetatemplate', _defaultmetatemplate
115 b'experimental', b'archivemetatemplate', _defaultmetatemplate
114 )
116 )
115 }
117 }
116
118
117 out = util.stringio()
119 out = util.stringio()
118
120
119 fm = formatter.formatter(repo.ui, out, b'archive', opts)
121 fm = formatter.formatter(repo.ui, out, b'archive', opts)
120 fm.startitem()
122 fm.startitem()
121 fm.context(ctx=ctx)
123 fm.context(ctx=ctx)
122 fm.data(root=_rootctx(repo).hex())
124 fm.data(root=_rootctx(repo).hex())
123
125
124 if ctx.rev() is None:
126 if ctx.rev() is None:
125 dirty = b''
127 dirty = b''
126 if ctx.dirty(missing=True):
128 if ctx.dirty(missing=True):
127 dirty = b'+'
129 dirty = b'+'
128 fm.data(dirty=dirty)
130 fm.data(dirty=dirty)
129 fm.end()
131 fm.end()
130
132
131 return out.getvalue()
133 return out.getvalue()
132
134
133
135
134 class tarit(object):
136 class tarit(object):
135 """write archive to tar file or stream. can write uncompressed,
137 """write archive to tar file or stream. can write uncompressed,
136 or compress with gzip or bzip2."""
138 or compress with gzip or bzip2."""
137
139
138 if pycompat.ispy3:
140 if pycompat.ispy3:
139 GzipFileWithTime = gzip.GzipFile # camelcase-required
141 GzipFileWithTime = gzip.GzipFile # camelcase-required
140 else:
142 else:
141
143
142 class GzipFileWithTime(gzip.GzipFile):
144 class GzipFileWithTime(gzip.GzipFile):
143 def __init__(self, *args, **kw):
145 def __init__(self, *args, **kw):
144 timestamp = None
146 timestamp = None
145 if 'mtime' in kw:
147 if 'mtime' in kw:
146 timestamp = kw.pop('mtime')
148 timestamp = kw.pop('mtime')
147 if timestamp is None:
149 if timestamp is None:
148 self.timestamp = time.time()
150 self.timestamp = time.time()
149 else:
151 else:
150 self.timestamp = timestamp
152 self.timestamp = timestamp
151 gzip.GzipFile.__init__(self, *args, **kw)
153 gzip.GzipFile.__init__(self, *args, **kw)
152
154
153 def _write_gzip_header(self):
155 def _write_gzip_header(self):
154 self.fileobj.write(b'\037\213') # magic header
156 self.fileobj.write(b'\037\213') # magic header
155 self.fileobj.write(b'\010') # compression method
157 self.fileobj.write(b'\010') # compression method
156 fname = self.name
158 fname = self.name
157 if fname and fname.endswith(b'.gz'):
159 if fname and fname.endswith(b'.gz'):
158 fname = fname[:-3]
160 fname = fname[:-3]
159 flags = 0
161 flags = 0
160 if fname:
162 if fname:
161 flags = gzip.FNAME # pytype: disable=module-attr
163 flags = gzip.FNAME # pytype: disable=module-attr
162 self.fileobj.write(pycompat.bytechr(flags))
164 self.fileobj.write(pycompat.bytechr(flags))
163 gzip.write32u( # pytype: disable=module-attr
165 gzip.write32u( # pytype: disable=module-attr
164 self.fileobj, int(self.timestamp)
166 self.fileobj, int(self.timestamp)
165 )
167 )
166 self.fileobj.write(b'\002')
168 self.fileobj.write(b'\002')
167 self.fileobj.write(b'\377')
169 self.fileobj.write(b'\377')
168 if fname:
170 if fname:
169 self.fileobj.write(fname + b'\000')
171 self.fileobj.write(fname + b'\000')
170
172
171 def __init__(self, dest, mtime, kind=b''):
173 def __init__(self, dest, mtime, kind=b''):
172 self.mtime = mtime
174 self.mtime = mtime
173 self.fileobj = None
175 self.fileobj = None
174
176
175 def taropen(mode, name=b'', fileobj=None):
177 def taropen(mode, name=b'', fileobj=None):
176 if kind == b'gz':
178 if kind == b'gz':
177 mode = mode[0:1]
179 mode = mode[0:1]
178 if not fileobj:
180 if not fileobj:
179 fileobj = open(name, mode + b'b')
181 fileobj = open(name, mode + b'b')
180 gzfileobj = self.GzipFileWithTime(
182 gzfileobj = self.GzipFileWithTime(
181 name,
183 name,
182 pycompat.sysstr(mode + b'b'),
184 pycompat.sysstr(mode + b'b'),
183 zlib.Z_BEST_COMPRESSION,
185 zlib.Z_BEST_COMPRESSION,
184 fileobj,
186 fileobj,
185 mtime=mtime,
187 mtime=mtime,
186 )
188 )
187 self.fileobj = gzfileobj
189 self.fileobj = gzfileobj
188 return (
190 return (
189 tarfile.TarFile.taropen( # pytype: disable=attribute-error
191 tarfile.TarFile.taropen( # pytype: disable=attribute-error
190 name, pycompat.sysstr(mode), gzfileobj
192 name, pycompat.sysstr(mode), gzfileobj
191 )
193 )
192 )
194 )
193 else:
195 else:
194 try:
196 try:
195 return tarfile.open(
197 return tarfile.open(
196 name, pycompat.sysstr(mode + kind), fileobj
198 name, pycompat.sysstr(mode + kind), fileobj
197 )
199 )
198 except tarfile.CompressionError as e:
200 except tarfile.CompressionError as e:
199 raise error.Abort(pycompat.bytestr(e))
201 raise error.Abort(stringutil.forcebytestr(e))
200
202
201 if isinstance(dest, bytes):
203 if isinstance(dest, bytes):
202 self.z = taropen(b'w:', name=dest)
204 self.z = taropen(b'w:', name=dest)
203 else:
205 else:
204 self.z = taropen(b'w|', fileobj=dest)
206 self.z = taropen(b'w|', fileobj=dest)
205
207
206 def addfile(self, name, mode, islink, data):
208 def addfile(self, name, mode, islink, data):
207 name = pycompat.fsdecode(name)
209 name = pycompat.fsdecode(name)
208 i = tarfile.TarInfo(name)
210 i = tarfile.TarInfo(name)
209 i.mtime = self.mtime
211 i.mtime = self.mtime
210 i.size = len(data)
212 i.size = len(data)
211 if islink:
213 if islink:
212 i.type = tarfile.SYMTYPE
214 i.type = tarfile.SYMTYPE
213 i.mode = 0o777
215 i.mode = 0o777
214 i.linkname = pycompat.fsdecode(data)
216 i.linkname = pycompat.fsdecode(data)
215 data = None
217 data = None
216 i.size = 0
218 i.size = 0
217 else:
219 else:
218 i.mode = mode
220 i.mode = mode
219 data = stringio(data)
221 data = stringio(data)
220 self.z.addfile(i, data)
222 self.z.addfile(i, data)
221
223
222 def done(self):
224 def done(self):
223 self.z.close()
225 self.z.close()
224 if self.fileobj:
226 if self.fileobj:
225 self.fileobj.close()
227 self.fileobj.close()
226
228
227
229
228 class zipit(object):
230 class zipit(object):
229 """write archive to zip file or stream. can write uncompressed,
231 """write archive to zip file or stream. can write uncompressed,
230 or compressed with deflate."""
232 or compressed with deflate."""
231
233
232 def __init__(self, dest, mtime, compress=True):
234 def __init__(self, dest, mtime, compress=True):
233 if isinstance(dest, bytes):
235 if isinstance(dest, bytes):
234 dest = pycompat.fsdecode(dest)
236 dest = pycompat.fsdecode(dest)
235 self.z = zipfile.ZipFile(
237 self.z = zipfile.ZipFile(
236 dest, 'w', compress and zipfile.ZIP_DEFLATED or zipfile.ZIP_STORED
238 dest, 'w', compress and zipfile.ZIP_DEFLATED or zipfile.ZIP_STORED
237 )
239 )
238
240
239 # Python's zipfile module emits deprecation warnings if we try
241 # Python's zipfile module emits deprecation warnings if we try
240 # to store files with a date before 1980.
242 # to store files with a date before 1980.
241 epoch = 315532800 # calendar.timegm((1980, 1, 1, 0, 0, 0, 1, 1, 0))
243 epoch = 315532800 # calendar.timegm((1980, 1, 1, 0, 0, 0, 1, 1, 0))
242 if mtime < epoch:
244 if mtime < epoch:
243 mtime = epoch
245 mtime = epoch
244
246
245 self.mtime = mtime
247 self.mtime = mtime
246 self.date_time = time.gmtime(mtime)[:6]
248 self.date_time = time.gmtime(mtime)[:6]
247
249
248 def addfile(self, name, mode, islink, data):
250 def addfile(self, name, mode, islink, data):
249 i = zipfile.ZipInfo(pycompat.fsdecode(name), self.date_time)
251 i = zipfile.ZipInfo(pycompat.fsdecode(name), self.date_time)
250 i.compress_type = self.z.compression # pytype: disable=attribute-error
252 i.compress_type = self.z.compression # pytype: disable=attribute-error
251 # unzip will not honor unix file modes unless file creator is
253 # unzip will not honor unix file modes unless file creator is
252 # set to unix (id 3).
254 # set to unix (id 3).
253 i.create_system = 3
255 i.create_system = 3
254 ftype = _UNX_IFREG
256 ftype = _UNX_IFREG
255 if islink:
257 if islink:
256 mode = 0o777
258 mode = 0o777
257 ftype = _UNX_IFLNK
259 ftype = _UNX_IFLNK
258 i.external_attr = (mode | ftype) << 16
260 i.external_attr = (mode | ftype) << 16
259 # add "extended-timestamp" extra block, because zip archives
261 # add "extended-timestamp" extra block, because zip archives
260 # without this will be extracted with unexpected timestamp,
262 # without this will be extracted with unexpected timestamp,
261 # if TZ is not configured as GMT
263 # if TZ is not configured as GMT
262 i.extra += struct.pack(
264 i.extra += struct.pack(
263 b'<hhBl',
265 b'<hhBl',
264 0x5455, # block type: "extended-timestamp"
266 0x5455, # block type: "extended-timestamp"
265 1 + 4, # size of this block
267 1 + 4, # size of this block
266 1, # "modification time is present"
268 1, # "modification time is present"
267 int(self.mtime),
269 int(self.mtime),
268 ) # last modification (UTC)
270 ) # last modification (UTC)
269 self.z.writestr(i, data)
271 self.z.writestr(i, data)
270
272
271 def done(self):
273 def done(self):
272 self.z.close()
274 self.z.close()
273
275
274
276
275 class fileit(object):
277 class fileit(object):
276 '''write archive as files in directory.'''
278 '''write archive as files in directory.'''
277
279
278 def __init__(self, name, mtime):
280 def __init__(self, name, mtime):
279 self.basedir = name
281 self.basedir = name
280 self.opener = vfsmod.vfs(self.basedir)
282 self.opener = vfsmod.vfs(self.basedir)
281 self.mtime = mtime
283 self.mtime = mtime
282
284
283 def addfile(self, name, mode, islink, data):
285 def addfile(self, name, mode, islink, data):
284 if islink:
286 if islink:
285 self.opener.symlink(data, name)
287 self.opener.symlink(data, name)
286 return
288 return
287 f = self.opener(name, b"w", atomictemp=False)
289 f = self.opener(name, b"w", atomictemp=False)
288 f.write(data)
290 f.write(data)
289 f.close()
291 f.close()
290 destfile = os.path.join(self.basedir, name)
292 destfile = os.path.join(self.basedir, name)
291 os.chmod(destfile, mode)
293 os.chmod(destfile, mode)
292 if self.mtime is not None:
294 if self.mtime is not None:
293 os.utime(destfile, (self.mtime, self.mtime))
295 os.utime(destfile, (self.mtime, self.mtime))
294
296
295 def done(self):
297 def done(self):
296 pass
298 pass
297
299
298
300
299 archivers = {
301 archivers = {
300 b'files': fileit,
302 b'files': fileit,
301 b'tar': tarit,
303 b'tar': tarit,
302 b'tbz2': lambda name, mtime: tarit(name, mtime, b'bz2'),
304 b'tbz2': lambda name, mtime: tarit(name, mtime, b'bz2'),
303 b'tgz': lambda name, mtime: tarit(name, mtime, b'gz'),
305 b'tgz': lambda name, mtime: tarit(name, mtime, b'gz'),
304 b'txz': lambda name, mtime: tarit(name, mtime, b'xz'),
306 b'txz': lambda name, mtime: tarit(name, mtime, b'xz'),
305 b'uzip': lambda name, mtime: zipit(name, mtime, False),
307 b'uzip': lambda name, mtime: zipit(name, mtime, False),
306 b'zip': zipit,
308 b'zip': zipit,
307 }
309 }
308
310
309
311
310 def archive(
312 def archive(
311 repo,
313 repo,
312 dest,
314 dest,
313 node,
315 node,
314 kind,
316 kind,
315 decode=True,
317 decode=True,
316 match=None,
318 match=None,
317 prefix=b'',
319 prefix=b'',
318 mtime=None,
320 mtime=None,
319 subrepos=False,
321 subrepos=False,
320 ):
322 ):
321 """create archive of repo as it was at node.
323 """create archive of repo as it was at node.
322
324
323 dest can be name of directory, name of archive file, or file
325 dest can be name of directory, name of archive file, or file
324 object to write archive to.
326 object to write archive to.
325
327
326 kind is type of archive to create.
328 kind is type of archive to create.
327
329
328 decode tells whether to put files through decode filters from
330 decode tells whether to put files through decode filters from
329 hgrc.
331 hgrc.
330
332
331 match is a matcher to filter names of files to write to archive.
333 match is a matcher to filter names of files to write to archive.
332
334
333 prefix is name of path to put before every archive member.
335 prefix is name of path to put before every archive member.
334
336
335 mtime is the modified time, in seconds, or None to use the changeset time.
337 mtime is the modified time, in seconds, or None to use the changeset time.
336
338
337 subrepos tells whether to include subrepos.
339 subrepos tells whether to include subrepos.
338 """
340 """
339
341
340 if kind == b'txz' and not pycompat.ispy3:
342 if kind == b'txz' and not pycompat.ispy3:
341 raise error.Abort(_(b'xz compression is only available in Python 3'))
343 raise error.Abort(_(b'xz compression is only available in Python 3'))
342
344
343 if kind == b'files':
345 if kind == b'files':
344 if prefix:
346 if prefix:
345 raise error.Abort(_(b'cannot give prefix when archiving to files'))
347 raise error.Abort(_(b'cannot give prefix when archiving to files'))
346 else:
348 else:
347 prefix = tidyprefix(dest, kind, prefix)
349 prefix = tidyprefix(dest, kind, prefix)
348
350
349 def write(name, mode, islink, getdata):
351 def write(name, mode, islink, getdata):
350 data = getdata()
352 data = getdata()
351 if decode:
353 if decode:
352 data = repo.wwritedata(name, data)
354 data = repo.wwritedata(name, data)
353 archiver.addfile(prefix + name, mode, islink, data)
355 archiver.addfile(prefix + name, mode, islink, data)
354
356
355 if kind not in archivers:
357 if kind not in archivers:
356 raise error.Abort(_(b"unknown archive type '%s'") % kind)
358 raise error.Abort(_(b"unknown archive type '%s'") % kind)
357
359
358 ctx = repo[node]
360 ctx = repo[node]
359 archiver = archivers[kind](dest, mtime or ctx.date()[0])
361 archiver = archivers[kind](dest, mtime or ctx.date()[0])
360
362
361 if not match:
363 if not match:
362 match = scmutil.matchall(repo)
364 match = scmutil.matchall(repo)
363
365
364 if repo.ui.configbool(b"ui", b"archivemeta"):
366 if repo.ui.configbool(b"ui", b"archivemeta"):
365 name = b'.hg_archival.txt'
367 name = b'.hg_archival.txt'
366 if match(name):
368 if match(name):
367 write(name, 0o644, False, lambda: buildmetadata(ctx))
369 write(name, 0o644, False, lambda: buildmetadata(ctx))
368
370
369 files = list(ctx.manifest().walk(match))
371 files = list(ctx.manifest().walk(match))
370 total = len(files)
372 total = len(files)
371 if total:
373 if total:
372 files.sort()
374 files.sort()
373 scmutil.prefetchfiles(
375 scmutil.prefetchfiles(
374 repo, [(ctx.rev(), scmutil.matchfiles(repo, files))]
376 repo, [(ctx.rev(), scmutil.matchfiles(repo, files))]
375 )
377 )
376 progress = repo.ui.makeprogress(
378 progress = repo.ui.makeprogress(
377 _(b'archiving'), unit=_(b'files'), total=total
379 _(b'archiving'), unit=_(b'files'), total=total
378 )
380 )
379 progress.update(0)
381 progress.update(0)
380 for f in files:
382 for f in files:
381 ff = ctx.flags(f)
383 ff = ctx.flags(f)
382 write(f, b'x' in ff and 0o755 or 0o644, b'l' in ff, ctx[f].data)
384 write(f, b'x' in ff and 0o755 or 0o644, b'l' in ff, ctx[f].data)
383 progress.increment(item=f)
385 progress.increment(item=f)
384 progress.complete()
386 progress.complete()
385
387
386 if subrepos:
388 if subrepos:
387 for subpath in sorted(ctx.substate):
389 for subpath in sorted(ctx.substate):
388 sub = ctx.workingsub(subpath)
390 sub = ctx.workingsub(subpath)
389 submatch = matchmod.subdirmatcher(subpath, match)
391 submatch = matchmod.subdirmatcher(subpath, match)
390 subprefix = prefix + subpath + b'/'
392 subprefix = prefix + subpath + b'/'
391 total += sub.archive(archiver, subprefix, submatch, decode)
393 total += sub.archive(archiver, subprefix, submatch, decode)
392
394
393 if total == 0:
395 if total == 0:
394 raise error.Abort(_(b'no files match the archive pattern'))
396 raise error.Abort(_(b'no files match the archive pattern'))
395
397
396 archiver.done()
398 archiver.done()
397 return total
399 return total
General Comments 0
You need to be logged in to leave comments. Login now