Show More
@@ -1,184 +1,187 | |||
|
1 | 1 | # Copyright (C) 2015 - Mike Edgar <adgar@google.com> |
|
2 | 2 | # |
|
3 | 3 | # This extension enables removal of file content at a given revision, |
|
4 | 4 | # rewriting the data/metadata of successive revisions to preserve revision log |
|
5 | 5 | # integrity. |
|
6 | 6 | |
|
7 | 7 | """erase file content at a given revision |
|
8 | 8 | |
|
9 | 9 | The censor command instructs Mercurial to erase all content of a file at a given |
|
10 | 10 | revision *without updating the changeset hash.* This allows existing history to |
|
11 | 11 | remain valid while preventing future clones/pulls from receiving the erased |
|
12 | 12 | data. |
|
13 | 13 | |
|
14 | 14 | Typical uses for censor are due to security or legal requirements, including:: |
|
15 | 15 | |
|
16 | 16 | * Passwords, private keys, cryptographic material |
|
17 | 17 | * Licensed data/code/libraries for which the license has expired |
|
18 | 18 | * Personally Identifiable Information or other private data |
|
19 | 19 | |
|
20 | 20 | Censored nodes can interrupt mercurial's typical operation whenever the excised |
|
21 | 21 | data needs to be materialized. Some commands, like ``hg cat``/``hg revert``, |
|
22 | 22 | simply fail when asked to produce censored data. Others, like ``hg verify`` and |
|
23 | 23 | ``hg update``, must be capable of tolerating censored data to continue to |
|
24 | 24 | function in a meaningful way. Such commands only tolerate censored file |
|
25 | 25 | revisions if they are allowed by the "censor.policy=ignore" config option. |
|
26 | 26 | """ |
|
27 | 27 | |
|
28 | 28 | from __future__ import absolute_import |
|
29 | 29 | |
|
30 | 30 | from mercurial.i18n import _ |
|
31 | 31 | from mercurial.node import short |
|
32 | 32 | |
|
33 | 33 | from mercurial import ( |
|
34 | 34 | error, |
|
35 | 35 | pycompat, |
|
36 | 36 | registrar, |
|
37 | 37 | revlog, |
|
38 | 38 | scmutil, |
|
39 | 39 | util, |
|
40 | 40 | ) |
|
41 | 41 | |
|
42 | 42 | cmdtable = {} |
|
43 | 43 | command = registrar.command(cmdtable) |
|
44 | 44 | # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for |
|
45 | 45 | # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should |
|
46 | 46 | # be specifying the version(s) of Mercurial they are tested with, or |
|
47 | 47 | # leave the attribute unspecified. |
|
48 | 48 | testedwith = 'ships-with-hg-core' |
|
49 | 49 | |
|
50 | 50 | @command('censor', |
|
51 | 51 | [('r', 'rev', '', _('censor file from specified revision'), _('REV')), |
|
52 | 52 | ('t', 'tombstone', '', _('replacement tombstone data'), _('TEXT'))], |
|
53 | 53 | _('-r REV [-t TEXT] [FILE]')) |
|
54 | 54 | def censor(ui, repo, path, rev='', tombstone='', **opts): |
|
55 | 55 | with repo.wlock(), repo.lock(): |
|
56 | 56 | return _docensor(ui, repo, path, rev, tombstone, **opts) |
|
57 | 57 | |
|
58 | 58 | def _docensor(ui, repo, path, rev='', tombstone='', **opts): |
|
59 | 59 | if not path: |
|
60 | 60 | raise error.Abort(_('must specify file path to censor')) |
|
61 | 61 | if not rev: |
|
62 | 62 | raise error.Abort(_('must specify revision to censor')) |
|
63 | 63 | |
|
64 | 64 | wctx = repo[None] |
|
65 | 65 | |
|
66 | 66 | m = scmutil.match(wctx, (path,)) |
|
67 | 67 | if m.anypats() or len(m.files()) != 1: |
|
68 | 68 | raise error.Abort(_('can only specify an explicit filename')) |
|
69 | 69 | path = m.files()[0] |
|
70 | 70 | flog = repo.file(path) |
|
71 | 71 | if not len(flog): |
|
72 | 72 | raise error.Abort(_('cannot censor file with no history')) |
|
73 | 73 | |
|
74 | 74 | rev = scmutil.revsingle(repo, rev, rev).rev() |
|
75 | 75 | try: |
|
76 | 76 | ctx = repo[rev] |
|
77 | 77 | except KeyError: |
|
78 | 78 | raise error.Abort(_('invalid revision identifier %s') % rev) |
|
79 | 79 | |
|
80 | 80 | try: |
|
81 | 81 | fctx = ctx.filectx(path) |
|
82 | 82 | except error.LookupError: |
|
83 | 83 | raise error.Abort(_('file does not exist at revision %s') % rev) |
|
84 | 84 | |
|
85 | 85 | fnode = fctx.filenode() |
|
86 | headctxs = [repo[c] for c in repo.heads()] | |
|
87 | heads = [c for c in headctxs if path in c and c.filenode(path) == fnode] | |
|
86 | heads = [] | |
|
87 | for headnode in repo.heads(): | |
|
88 | c = repo[headnode] | |
|
89 | if path in c and c.filenode(path) == fnode: | |
|
90 | heads.append(c) | |
|
88 | 91 | if heads: |
|
89 | 92 | headlist = ', '.join([short(c.node()) for c in heads]) |
|
90 | 93 | raise error.Abort(_('cannot censor file in heads (%s)') % headlist, |
|
91 | 94 | hint=_('clean/delete and commit first')) |
|
92 | 95 | |
|
93 | 96 | wp = wctx.parents() |
|
94 | 97 | if ctx.node() in [p.node() for p in wp]: |
|
95 | 98 | raise error.Abort(_('cannot censor working directory'), |
|
96 | 99 | hint=_('clean/delete/update first')) |
|
97 | 100 | |
|
98 | 101 | flogv = flog.version & 0xFFFF |
|
99 | 102 | if flogv != revlog.REVLOGV1: |
|
100 | 103 | raise error.Abort( |
|
101 | 104 | _('censor does not support revlog version %d') % (flogv,)) |
|
102 | 105 | |
|
103 | 106 | tombstone = revlog.packmeta({"censored": tombstone}, "") |
|
104 | 107 | |
|
105 | 108 | crev = fctx.filerev() |
|
106 | 109 | |
|
107 | 110 | if len(tombstone) > flog.rawsize(crev): |
|
108 | 111 | raise error.Abort(_( |
|
109 | 112 | 'censor tombstone must be no longer than censored data')) |
|
110 | 113 | |
|
111 | 114 | # Using two files instead of one makes it easy to rewrite entry-by-entry |
|
112 | 115 | idxread = repo.svfs(flog.indexfile, 'r') |
|
113 | 116 | idxwrite = repo.svfs(flog.indexfile, 'wb', atomictemp=True) |
|
114 | 117 | if flog.version & revlog.FLAG_INLINE_DATA: |
|
115 | 118 | dataread, datawrite = idxread, idxwrite |
|
116 | 119 | else: |
|
117 | 120 | dataread = repo.svfs(flog.datafile, 'r') |
|
118 | 121 | datawrite = repo.svfs(flog.datafile, 'wb', atomictemp=True) |
|
119 | 122 | |
|
120 | 123 | # Copy all revlog data up to the entry to be censored. |
|
121 | 124 | rio = revlog.revlogio() |
|
122 | 125 | offset = flog.start(crev) |
|
123 | 126 | |
|
124 | 127 | for chunk in util.filechunkiter(idxread, limit=crev * rio.size): |
|
125 | 128 | idxwrite.write(chunk) |
|
126 | 129 | for chunk in util.filechunkiter(dataread, limit=offset): |
|
127 | 130 | datawrite.write(chunk) |
|
128 | 131 | |
|
129 | 132 | def rewriteindex(r, newoffs, newdata=None): |
|
130 | 133 | """Rewrite the index entry with a new data offset and optional new data. |
|
131 | 134 | |
|
132 | 135 | The newdata argument, if given, is a tuple of three positive integers: |
|
133 | 136 | (new compressed, new uncompressed, added flag bits). |
|
134 | 137 | """ |
|
135 | 138 | offlags, comp, uncomp, base, link, p1, p2, nodeid = flog.index[r] |
|
136 | 139 | flags = revlog.gettype(offlags) |
|
137 | 140 | if newdata: |
|
138 | 141 | comp, uncomp, nflags = newdata |
|
139 | 142 | flags |= nflags |
|
140 | 143 | offlags = revlog.offset_type(newoffs, flags) |
|
141 | 144 | e = (offlags, comp, uncomp, r, link, p1, p2, nodeid) |
|
142 | 145 | idxwrite.write(rio.packentry(e, None, flog.version, r)) |
|
143 | 146 | idxread.seek(rio.size, 1) |
|
144 | 147 | |
|
145 | 148 | def rewrite(r, offs, data, nflags=revlog.REVIDX_DEFAULT_FLAGS): |
|
146 | 149 | """Write the given full text to the filelog with the given data offset. |
|
147 | 150 | |
|
148 | 151 | Returns: |
|
149 | 152 | The integer number of data bytes written, for tracking data offsets. |
|
150 | 153 | """ |
|
151 | 154 | flag, compdata = flog.compress(data) |
|
152 | 155 | newcomp = len(flag) + len(compdata) |
|
153 | 156 | rewriteindex(r, offs, (newcomp, len(data), nflags)) |
|
154 | 157 | datawrite.write(flag) |
|
155 | 158 | datawrite.write(compdata) |
|
156 | 159 | dataread.seek(flog.length(r), 1) |
|
157 | 160 | return newcomp |
|
158 | 161 | |
|
159 | 162 | # Rewrite censored revlog entry with (padded) tombstone data. |
|
160 | 163 | pad = ' ' * (flog.rawsize(crev) - len(tombstone)) |
|
161 | 164 | offset += rewrite(crev, offset, tombstone + pad, revlog.REVIDX_ISCENSORED) |
|
162 | 165 | |
|
163 | 166 | # Rewrite all following filelog revisions fixing up offsets and deltas. |
|
164 | 167 | for srev in pycompat.xrange(crev + 1, len(flog)): |
|
165 | 168 | if crev in flog.parentrevs(srev): |
|
166 | 169 | # Immediate children of censored node must be re-added as fulltext. |
|
167 | 170 | try: |
|
168 | 171 | revdata = flog.revision(srev) |
|
169 | 172 | except error.CensoredNodeError as e: |
|
170 | 173 | revdata = e.tombstone |
|
171 | 174 | dlen = rewrite(srev, offset, revdata) |
|
172 | 175 | else: |
|
173 | 176 | # Copy any other revision data verbatim after fixing up the offset. |
|
174 | 177 | rewriteindex(srev, offset) |
|
175 | 178 | dlen = flog.length(srev) |
|
176 | 179 | for chunk in util.filechunkiter(dataread, limit=dlen): |
|
177 | 180 | datawrite.write(chunk) |
|
178 | 181 | offset += dlen |
|
179 | 182 | |
|
180 | 183 | idxread.close() |
|
181 | 184 | idxwrite.close() |
|
182 | 185 | if dataread is not idxread: |
|
183 | 186 | dataread.close() |
|
184 | 187 | datawrite.close() |
General Comments 0
You need to be logged in to leave comments.
Login now