Show More
@@ -1,65 +1,68 b'' | |||||
1 | """ |
|
1 | """ | |
2 | Un-targz and retargz a targz file to ensure reproducible build. |
|
2 | Un-targz and retargz a targz file to ensure reproducible build. | |
3 |
|
3 | |||
4 | usage: |
|
4 | usage: | |
5 |
|
5 | |||
6 | $ export SOURCE_DATE_EPOCH=$(date +%s) |
|
6 | $ export SOURCE_DATE_EPOCH=$(date +%s) | |
7 | ... |
|
7 | ... | |
8 | $ python retar.py <tarfile.gz> |
|
8 | $ python retar.py <tarfile.gz> | |
9 |
|
9 | |||
10 | The process of creating an sdist can be non-reproducible: |
|
10 | The process of creating an sdist can be non-reproducible: | |
11 | - directory created during the process get a mtime of the creation date; |
|
11 | - directory created during the process get a mtime of the creation date; | |
12 | - gziping files embed the timestamp of fo zip creation. |
|
12 | - gziping files embed the timestamp of fo zip creation. | |
13 |
|
13 | |||
14 | This will untar-retar; ensuring that all mtime > SOURCE_DATE_EPOCH will be set |
|
14 | This will untar-retar; ensuring that all mtime > SOURCE_DATE_EPOCH will be set | |
15 | equal to SOURCE_DATE_EPOCH. |
|
15 | equal to SOURCE_DATE_EPOCH. | |
16 |
|
16 | |||
17 | """ |
|
17 | """ | |
18 |
|
18 | |||
19 | import tarfile |
|
19 | import tarfile | |
20 | import sys |
|
20 | import sys | |
21 | import os |
|
21 | import os | |
22 | import gzip |
|
22 | import gzip | |
23 | import io |
|
23 | import io | |
24 |
|
24 | |||
|
25 | from pathlib import Path | |||
|
26 | ||||
25 | if len(sys.argv) > 2: |
|
27 | if len(sys.argv) > 2: | |
26 | raise ValueError("Too many arguments") |
|
28 | raise ValueError("Too many arguments") | |
27 |
|
29 | |||
28 |
|
30 | |||
29 | timestamp = int(os.environ["SOURCE_DATE_EPOCH"]) |
|
31 | timestamp = int(os.environ["SOURCE_DATE_EPOCH"]) | |
30 |
|
32 | |||
|
33 | path = Path(sys.argv[1]) | |||
31 | old_buf = io.BytesIO() |
|
34 | old_buf = io.BytesIO() | |
32 |
with open( |
|
35 | with open(path, "rb") as f: | |
33 | old_buf.write(f.read()) |
|
36 | old_buf.write(f.read()) | |
34 | old_buf.seek(0) |
|
37 | old_buf.seek(0) | |
35 | old = tarfile.open(fileobj=old_buf, mode="r:gz") |
|
38 | old = tarfile.open(fileobj=old_buf, mode="r:gz") | |
36 |
|
39 | |||
37 | buf = io.BytesIO() |
|
40 | buf = io.BytesIO() | |
38 | new = tarfile.open(fileobj=buf, mode="w", format=tarfile.GNU_FORMAT) |
|
41 | new = tarfile.open(fileobj=buf, mode="w", format=tarfile.GNU_FORMAT) | |
39 | for i, m in enumerate(old): |
|
42 | for i, m in enumerate(old): | |
40 | data = None |
|
43 | data = None | |
41 | # mutation does not work, copy |
|
44 | # mutation does not work, copy | |
42 | if m.name.endswith('.DS_Store'): |
|
45 | if m.name.endswith('.DS_Store'): | |
43 | continue |
|
46 | continue | |
44 | m2 = tarfile.TarInfo(m.name) |
|
47 | m2 = tarfile.TarInfo(m.name) | |
45 | m2.mtime = min(timestamp, m.mtime) |
|
48 | m2.mtime = min(timestamp, m.mtime) | |
46 | m2.size = m.size |
|
49 | m2.size = m.size | |
47 | m2.type = m.type |
|
50 | m2.type = m.type | |
48 | m2.linkname = m.linkname |
|
51 | m2.linkname = m.linkname | |
49 | m2.mode = m.mode |
|
52 | m2.mode = m.mode | |
50 | if m.isdir(): |
|
53 | if m.isdir(): | |
51 | new.addfile(m2) |
|
54 | new.addfile(m2) | |
52 | else: |
|
55 | else: | |
53 | data = old.extractfile(m) |
|
56 | data = old.extractfile(m) | |
54 | new.addfile(m2, data) |
|
57 | new.addfile(m2, data) | |
55 | new.close() |
|
58 | new.close() | |
56 | old.close() |
|
59 | old.close() | |
57 |
|
60 | |||
58 | buf.seek(0) |
|
61 | buf.seek(0) | |
59 |
with open( |
|
62 | with open(path, "wb") as f: | |
60 | with gzip.GzipFile('', "wb", fileobj=f, mtime=timestamp) as gzf: |
|
63 | with gzip.GzipFile('', "wb", fileobj=f, mtime=timestamp) as gzf: | |
61 | gzf.write(buf.read()) |
|
64 | gzf.write(buf.read()) | |
62 |
|
65 | |||
63 | # checks the archive is valid. |
|
66 | # checks the archive is valid. | |
64 |
archive = tarfile.open( |
|
67 | archive = tarfile.open(path) | |
65 | names = archive.getnames() |
|
68 | names = archive.getnames() |
General Comments 0
You need to be logged in to leave comments.
Login now