Show More
@@ -1,187 +1,187 b'' | |||
|
1 | 1 | # Copyright (C) 2015 - Mike Edgar <adgar@google.com> |
|
2 | 2 | # |
|
3 | 3 | # This extension enables removal of file content at a given revision, |
|
4 | 4 | # rewriting the data/metadata of successive revisions to preserve revision log |
|
5 | 5 | # integrity. |
|
6 | 6 | |
|
7 | 7 | """erase file content at a given revision |
|
8 | 8 | |
|
9 | 9 | The censor command instructs Mercurial to erase all content of a file at a given |
|
10 | 10 | revision *without updating the changeset hash.* This allows existing history to |
|
11 | 11 | remain valid while preventing future clones/pulls from receiving the erased |
|
12 | 12 | data. |
|
13 | 13 | |
|
14 | 14 | Typical uses for censor are due to security or legal requirements, including:: |
|
15 | 15 | |
|
16 | 16 | * Passwords, private keys, cryptographic material |
|
17 | 17 | * Licensed data/code/libraries for which the license has expired |
|
18 | 18 | * Personally Identifiable Information or other private data |
|
19 | 19 | |
|
20 | 20 | Censored nodes can interrupt mercurial's typical operation whenever the excised |
|
21 | 21 | data needs to be materialized. Some commands, like ``hg cat``/``hg revert``, |
|
22 | 22 | simply fail when asked to produce censored data. Others, like ``hg verify`` and |
|
23 | 23 | ``hg update``, must be capable of tolerating censored data to continue to |
|
24 | 24 | function in a meaningful way. Such commands only tolerate censored file |
|
25 | 25 | revisions if they are allowed by the "censor.policy=ignore" config option. |
|
26 | 26 | """ |
|
27 | 27 | |
|
28 | 28 | from __future__ import absolute_import |
|
29 | 29 | |
|
30 | 30 | from mercurial.i18n import _ |
|
31 | 31 | from mercurial.node import short |
|
32 | 32 | |
|
33 | 33 | from mercurial import ( |
|
34 | 34 | error, |
|
35 | 35 | pycompat, |
|
36 | 36 | registrar, |
|
37 | 37 | revlog, |
|
38 | 38 | scmutil, |
|
39 | 39 | util, |
|
40 | 40 | ) |
|
41 | 41 | |
|
42 | 42 | cmdtable = {} |
|
43 | 43 | command = registrar.command(cmdtable) |
|
44 | 44 | # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for |
|
45 | 45 | # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should |
|
46 | 46 | # be specifying the version(s) of Mercurial they are tested with, or |
|
47 | 47 | # leave the attribute unspecified. |
|
48 | 48 | testedwith = 'ships-with-hg-core' |
|
49 | 49 | |
|
50 | 50 | @command('censor', |
|
51 | 51 | [('r', 'rev', '', _('censor file from specified revision'), _('REV')), |
|
52 | 52 | ('t', 'tombstone', '', _('replacement tombstone data'), _('TEXT'))], |
|
53 | 53 | _('-r REV [-t TEXT] [FILE]')) |
|
54 | 54 | def censor(ui, repo, path, rev='', tombstone='', **opts): |
|
55 | 55 | with repo.wlock(), repo.lock(): |
|
56 | 56 | return _docensor(ui, repo, path, rev, tombstone, **opts) |
|
57 | 57 | |
|
58 | 58 | def _docensor(ui, repo, path, rev='', tombstone='', **opts): |
|
59 | 59 | if not path: |
|
60 | 60 | raise error.Abort(_('must specify file path to censor')) |
|
61 | 61 | if not rev: |
|
62 | 62 | raise error.Abort(_('must specify revision to censor')) |
|
63 | 63 | |
|
64 | 64 | wctx = repo[None] |
|
65 | 65 | |
|
66 | 66 | m = scmutil.match(wctx, (path,)) |
|
67 | 67 | if m.anypats() or len(m.files()) != 1: |
|
68 | 68 | raise error.Abort(_('can only specify an explicit filename')) |
|
69 | 69 | path = m.files()[0] |
|
70 | 70 | flog = repo.file(path) |
|
71 | 71 | if not len(flog): |
|
72 | 72 | raise error.Abort(_('cannot censor file with no history')) |
|
73 | 73 | |
|
74 | 74 | rev = scmutil.revsingle(repo, rev, rev).rev() |
|
75 | 75 | try: |
|
76 | 76 | ctx = repo[rev] |
|
77 | 77 | except KeyError: |
|
78 | 78 | raise error.Abort(_('invalid revision identifier %s') % rev) |
|
79 | 79 | |
|
80 | 80 | try: |
|
81 | 81 | fctx = ctx.filectx(path) |
|
82 | 82 | except error.LookupError: |
|
83 | 83 | raise error.Abort(_('file does not exist at revision %s') % rev) |
|
84 | 84 | |
|
85 | 85 | fnode = fctx.filenode() |
|
86 | 86 | heads = [] |
|
87 | 87 | for headnode in repo.heads(): |
|
88 | c = repo[headnode] | |
|
89 | if path in c and c.filenode(path) == fnode: | |
|
90 | heads.append(c) | |
|
88 | hc = repo[headnode] | |
|
89 | if path in hc and hc.filenode(path) == fnode: | |
|
90 | heads.append(hc) | |
|
91 | 91 | if heads: |
|
92 | 92 | headlist = ', '.join([short(c.node()) for c in heads]) |
|
93 | 93 | raise error.Abort(_('cannot censor file in heads (%s)') % headlist, |
|
94 | 94 | hint=_('clean/delete and commit first')) |
|
95 | 95 | |
|
96 | 96 | wp = wctx.parents() |
|
97 | 97 | if ctx.node() in [p.node() for p in wp]: |
|
98 | 98 | raise error.Abort(_('cannot censor working directory'), |
|
99 | 99 | hint=_('clean/delete/update first')) |
|
100 | 100 | |
|
101 | 101 | flogv = flog.version & 0xFFFF |
|
102 | 102 | if flogv != revlog.REVLOGV1: |
|
103 | 103 | raise error.Abort( |
|
104 | 104 | _('censor does not support revlog version %d') % (flogv,)) |
|
105 | 105 | |
|
106 | 106 | tombstone = revlog.packmeta({"censored": tombstone}, "") |
|
107 | 107 | |
|
108 | 108 | crev = fctx.filerev() |
|
109 | 109 | |
|
110 | 110 | if len(tombstone) > flog.rawsize(crev): |
|
111 | 111 | raise error.Abort(_( |
|
112 | 112 | 'censor tombstone must be no longer than censored data')) |
|
113 | 113 | |
|
114 | 114 | # Using two files instead of one makes it easy to rewrite entry-by-entry |
|
115 | 115 | idxread = repo.svfs(flog.indexfile, 'r') |
|
116 | 116 | idxwrite = repo.svfs(flog.indexfile, 'wb', atomictemp=True) |
|
117 | 117 | if flog.version & revlog.FLAG_INLINE_DATA: |
|
118 | 118 | dataread, datawrite = idxread, idxwrite |
|
119 | 119 | else: |
|
120 | 120 | dataread = repo.svfs(flog.datafile, 'r') |
|
121 | 121 | datawrite = repo.svfs(flog.datafile, 'wb', atomictemp=True) |
|
122 | 122 | |
|
123 | 123 | # Copy all revlog data up to the entry to be censored. |
|
124 | 124 | rio = revlog.revlogio() |
|
125 | 125 | offset = flog.start(crev) |
|
126 | 126 | |
|
127 | 127 | for chunk in util.filechunkiter(idxread, limit=crev * rio.size): |
|
128 | 128 | idxwrite.write(chunk) |
|
129 | 129 | for chunk in util.filechunkiter(dataread, limit=offset): |
|
130 | 130 | datawrite.write(chunk) |
|
131 | 131 | |
|
132 | 132 | def rewriteindex(r, newoffs, newdata=None): |
|
133 | 133 | """Rewrite the index entry with a new data offset and optional new data. |
|
134 | 134 | |
|
135 | 135 | The newdata argument, if given, is a tuple of three positive integers: |
|
136 | 136 | (new compressed, new uncompressed, added flag bits). |
|
137 | 137 | """ |
|
138 | 138 | offlags, comp, uncomp, base, link, p1, p2, nodeid = flog.index[r] |
|
139 | 139 | flags = revlog.gettype(offlags) |
|
140 | 140 | if newdata: |
|
141 | 141 | comp, uncomp, nflags = newdata |
|
142 | 142 | flags |= nflags |
|
143 | 143 | offlags = revlog.offset_type(newoffs, flags) |
|
144 | 144 | e = (offlags, comp, uncomp, r, link, p1, p2, nodeid) |
|
145 | 145 | idxwrite.write(rio.packentry(e, None, flog.version, r)) |
|
146 | 146 | idxread.seek(rio.size, 1) |
|
147 | 147 | |
|
148 | 148 | def rewrite(r, offs, data, nflags=revlog.REVIDX_DEFAULT_FLAGS): |
|
149 | 149 | """Write the given full text to the filelog with the given data offset. |
|
150 | 150 | |
|
151 | 151 | Returns: |
|
152 | 152 | The integer number of data bytes written, for tracking data offsets. |
|
153 | 153 | """ |
|
154 | 154 | flag, compdata = flog.compress(data) |
|
155 | 155 | newcomp = len(flag) + len(compdata) |
|
156 | 156 | rewriteindex(r, offs, (newcomp, len(data), nflags)) |
|
157 | 157 | datawrite.write(flag) |
|
158 | 158 | datawrite.write(compdata) |
|
159 | 159 | dataread.seek(flog.length(r), 1) |
|
160 | 160 | return newcomp |
|
161 | 161 | |
|
162 | 162 | # Rewrite censored revlog entry with (padded) tombstone data. |
|
163 | 163 | pad = ' ' * (flog.rawsize(crev) - len(tombstone)) |
|
164 | 164 | offset += rewrite(crev, offset, tombstone + pad, revlog.REVIDX_ISCENSORED) |
|
165 | 165 | |
|
166 | 166 | # Rewrite all following filelog revisions fixing up offsets and deltas. |
|
167 | 167 | for srev in pycompat.xrange(crev + 1, len(flog)): |
|
168 | 168 | if crev in flog.parentrevs(srev): |
|
169 | 169 | # Immediate children of censored node must be re-added as fulltext. |
|
170 | 170 | try: |
|
171 | 171 | revdata = flog.revision(srev) |
|
172 | 172 | except error.CensoredNodeError as e: |
|
173 | 173 | revdata = e.tombstone |
|
174 | 174 | dlen = rewrite(srev, offset, revdata) |
|
175 | 175 | else: |
|
176 | 176 | # Copy any other revision data verbatim after fixing up the offset. |
|
177 | 177 | rewriteindex(srev, offset) |
|
178 | 178 | dlen = flog.length(srev) |
|
179 | 179 | for chunk in util.filechunkiter(dataread, limit=dlen): |
|
180 | 180 | datawrite.write(chunk) |
|
181 | 181 | offset += dlen |
|
182 | 182 | |
|
183 | 183 | idxread.close() |
|
184 | 184 | idxwrite.close() |
|
185 | 185 | if dataread is not idxread: |
|
186 | 186 | dataread.close() |
|
187 | 187 | datawrite.close() |
General Comments 0
You need to be logged in to leave comments.
Login now