Show More
@@ -1,377 +1,378 b'' | |||
|
1 | 1 | # debugcommands.py - debug logic for remotefilelog |
|
2 | 2 | # |
|
3 | 3 | # Copyright 2013 Facebook, Inc. |
|
4 | 4 | # |
|
5 | 5 | # This software may be used and distributed according to the terms of the |
|
6 | 6 | # GNU General Public License version 2 or any later version. |
|
7 | 7 | from __future__ import absolute_import |
|
8 | 8 | |
|
9 | 9 | import hashlib |
|
10 | 10 | import os |
|
11 | 11 | import zlib |
|
12 | 12 | |
|
13 | 13 | from mercurial.node import bin, hex, nullid, short |
|
14 | 14 | from mercurial.i18n import _ |
|
15 | 15 | from mercurial import ( |
|
16 | 16 | error, |
|
17 | 17 | filelog, |
|
18 | 18 | node as nodemod, |
|
19 | pycompat, | |
|
19 | 20 | revlog, |
|
20 | 21 | ) |
|
21 | 22 | from . import ( |
|
22 | 23 | constants, |
|
23 | 24 | datapack, |
|
24 | 25 | extutil, |
|
25 | 26 | fileserverclient, |
|
26 | 27 | historypack, |
|
27 | 28 | repack, |
|
28 | 29 | shallowutil, |
|
29 | 30 | ) |
|
30 | 31 | |
|
31 | 32 | def debugremotefilelog(ui, path, **opts): |
|
32 | 33 | decompress = opts.get(r'decompress') |
|
33 | 34 | |
|
34 | 35 | size, firstnode, mapping = parsefileblob(path, decompress) |
|
35 | 36 | |
|
36 | 37 | ui.status(_("size: %d bytes\n") % (size)) |
|
37 | 38 | ui.status(_("path: %s \n") % (path)) |
|
38 | 39 | ui.status(_("key: %s \n") % (short(firstnode))) |
|
39 | 40 | ui.status(_("\n")) |
|
40 | 41 | ui.status(_("%12s => %12s %13s %13s %12s\n") % |
|
41 | 42 | ("node", "p1", "p2", "linknode", "copyfrom")) |
|
42 | 43 | |
|
43 | 44 | queue = [firstnode] |
|
44 | 45 | while queue: |
|
45 | 46 | node = queue.pop(0) |
|
46 | 47 | p1, p2, linknode, copyfrom = mapping[node] |
|
47 | 48 | ui.status(_("%s => %s %s %s %s\n") % |
|
48 | 49 | (short(node), short(p1), short(p2), short(linknode), copyfrom)) |
|
49 | 50 | if p1 != nullid: |
|
50 | 51 | queue.append(p1) |
|
51 | 52 | if p2 != nullid: |
|
52 | 53 | queue.append(p2) |
|
53 | 54 | |
|
54 | 55 | def buildtemprevlog(repo, file): |
|
55 | 56 | # get filename key |
|
56 | 57 | filekey = nodemod.hex(hashlib.sha1(file).digest()) |
|
57 | 58 | filedir = os.path.join(repo.path, 'store/data', filekey) |
|
58 | 59 | |
|
59 | 60 | # sort all entries based on linkrev |
|
60 | 61 | fctxs = [] |
|
61 | 62 | for filenode in os.listdir(filedir): |
|
62 | 63 | if '_old' not in filenode: |
|
63 | 64 | fctxs.append(repo.filectx(file, fileid=bin(filenode))) |
|
64 | 65 | |
|
65 | 66 | fctxs = sorted(fctxs, key=lambda x: x.linkrev()) |
|
66 | 67 | |
|
67 | 68 | # add to revlog |
|
68 | 69 | temppath = repo.sjoin('data/temprevlog.i') |
|
69 | 70 | if os.path.exists(temppath): |
|
70 | 71 | os.remove(temppath) |
|
71 | 72 | r = filelog.filelog(repo.svfs, 'temprevlog') |
|
72 | 73 | |
|
73 | 74 | class faket(object): |
|
74 | 75 | def add(self, a, b, c): |
|
75 | 76 | pass |
|
76 | 77 | t = faket() |
|
77 | 78 | for fctx in fctxs: |
|
78 | 79 | if fctx.node() not in repo: |
|
79 | 80 | continue |
|
80 | 81 | |
|
81 | 82 | p = fctx.filelog().parents(fctx.filenode()) |
|
82 | 83 | meta = {} |
|
83 | 84 | if fctx.renamed(): |
|
84 | 85 | meta['copy'] = fctx.renamed()[0] |
|
85 | 86 | meta['copyrev'] = hex(fctx.renamed()[1]) |
|
86 | 87 | |
|
87 | 88 | r.add(fctx.data(), meta, t, fctx.linkrev(), p[0], p[1]) |
|
88 | 89 | |
|
89 | 90 | return r |
|
90 | 91 | |
|
91 | 92 | def debugindex(orig, ui, repo, file_=None, **opts): |
|
92 | 93 | """dump the contents of an index file""" |
|
93 | 94 | if (opts.get(r'changelog') or |
|
94 | 95 | opts.get(r'manifest') or |
|
95 | 96 | opts.get(r'dir') or |
|
96 | 97 | not shallowutil.isenabled(repo) or |
|
97 | 98 | not repo.shallowmatch(file_)): |
|
98 | 99 | return orig(ui, repo, file_, **opts) |
|
99 | 100 | |
|
100 | 101 | r = buildtemprevlog(repo, file_) |
|
101 | 102 | |
|
102 | 103 | # debugindex like normal |
|
103 | 104 | format = opts.get('format', 0) |
|
104 | 105 | if format not in (0, 1): |
|
105 | 106 | raise error.Abort(_("unknown format %d") % format) |
|
106 | 107 | |
|
107 | 108 | generaldelta = r.version & revlog.FLAG_GENERALDELTA |
|
108 | 109 | if generaldelta: |
|
109 | 110 | basehdr = ' delta' |
|
110 | 111 | else: |
|
111 | 112 | basehdr = ' base' |
|
112 | 113 | |
|
113 | 114 | if format == 0: |
|
114 | 115 | ui.write((" rev offset length " + basehdr + " linkrev" |
|
115 | 116 | " nodeid p1 p2\n")) |
|
116 | 117 | elif format == 1: |
|
117 | 118 | ui.write((" rev flag offset length" |
|
118 | 119 | " size " + basehdr + " link p1 p2" |
|
119 | 120 | " nodeid\n")) |
|
120 | 121 | |
|
121 | 122 | for i in r: |
|
122 | 123 | node = r.node(i) |
|
123 | 124 | if generaldelta: |
|
124 | 125 | base = r.deltaparent(i) |
|
125 | 126 | else: |
|
126 | 127 | base = r.chainbase(i) |
|
127 | 128 | if format == 0: |
|
128 | 129 | try: |
|
129 | 130 | pp = r.parents(node) |
|
130 | 131 | except Exception: |
|
131 | 132 | pp = [nullid, nullid] |
|
132 | 133 | ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % ( |
|
133 | 134 | i, r.start(i), r.length(i), base, r.linkrev(i), |
|
134 | 135 | short(node), short(pp[0]), short(pp[1]))) |
|
135 | 136 | elif format == 1: |
|
136 | 137 | pr = r.parentrevs(i) |
|
137 | 138 | ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % ( |
|
138 | 139 | i, r.flags(i), r.start(i), r.length(i), r.rawsize(i), |
|
139 | 140 | base, r.linkrev(i), pr[0], pr[1], short(node))) |
|
140 | 141 | |
|
141 | 142 | def debugindexdot(orig, ui, repo, file_): |
|
142 | 143 | """dump an index DAG as a graphviz dot file""" |
|
143 | 144 | if not shallowutil.isenabled(repo): |
|
144 | 145 | return orig(ui, repo, file_) |
|
145 | 146 | |
|
146 | 147 | r = buildtemprevlog(repo, os.path.basename(file_)[:-2]) |
|
147 | 148 | |
|
148 | 149 | ui.write(("digraph G {\n")) |
|
149 | 150 | for i in r: |
|
150 | 151 | node = r.node(i) |
|
151 | 152 | pp = r.parents(node) |
|
152 | 153 | ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i)) |
|
153 | 154 | if pp[1] != nullid: |
|
154 | 155 | ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i)) |
|
155 | 156 | ui.write("}\n") |
|
156 | 157 | |
|
157 | 158 | def verifyremotefilelog(ui, path, **opts): |
|
158 | 159 | decompress = opts.get(r'decompress') |
|
159 | 160 | |
|
160 | 161 | for root, dirs, files in os.walk(path): |
|
161 | 162 | for file in files: |
|
162 | 163 | if file == "repos": |
|
163 | 164 | continue |
|
164 | 165 | filepath = os.path.join(root, file) |
|
165 | 166 | size, firstnode, mapping = parsefileblob(filepath, decompress) |
|
166 | 167 | for p1, p2, linknode, copyfrom in mapping.itervalues(): |
|
167 | 168 | if linknode == nullid: |
|
168 | 169 | actualpath = os.path.relpath(root, path) |
|
169 | 170 | key = fileserverclient.getcachekey("reponame", actualpath, |
|
170 | 171 | file) |
|
171 | 172 | ui.status("%s %s\n" % (key, os.path.relpath(filepath, |
|
172 | 173 | path))) |
|
173 | 174 | |
|
174 | 175 | def _decompressblob(raw): |
|
175 | 176 | return zlib.decompress(raw) |
|
176 | 177 | |
|
177 | 178 | def parsefileblob(path, decompress): |
|
178 | 179 | f = open(path, "rb") |
|
179 | 180 | try: |
|
180 | 181 | raw = f.read() |
|
181 | 182 | finally: |
|
182 | 183 | f.close() |
|
183 | 184 | |
|
184 | 185 | if decompress: |
|
185 | 186 | raw = _decompressblob(raw) |
|
186 | 187 | |
|
187 | 188 | offset, size, flags = shallowutil.parsesizeflags(raw) |
|
188 | 189 | start = offset + size |
|
189 | 190 | |
|
190 | 191 | firstnode = None |
|
191 | 192 | |
|
192 | 193 | mapping = {} |
|
193 | 194 | while start < len(raw): |
|
194 | 195 | divider = raw.index('\0', start + 80) |
|
195 | 196 | |
|
196 | 197 | currentnode = raw[start:(start + 20)] |
|
197 | 198 | if not firstnode: |
|
198 | 199 | firstnode = currentnode |
|
199 | 200 | |
|
200 | 201 | p1 = raw[(start + 20):(start + 40)] |
|
201 | 202 | p2 = raw[(start + 40):(start + 60)] |
|
202 | 203 | linknode = raw[(start + 60):(start + 80)] |
|
203 | 204 | copyfrom = raw[(start + 80):divider] |
|
204 | 205 | |
|
205 | 206 | mapping[currentnode] = (p1, p2, linknode, copyfrom) |
|
206 | 207 | start = divider + 1 |
|
207 | 208 | |
|
208 | 209 | return size, firstnode, mapping |
|
209 | 210 | |
|
210 | 211 | def debugdatapack(ui, *paths, **opts): |
|
211 | 212 | for path in paths: |
|
212 | 213 | if '.data' in path: |
|
213 | 214 | path = path[:path.index('.data')] |
|
214 | 215 | ui.write("%s:\n" % path) |
|
215 | 216 | dpack = datapack.datapack(path) |
|
216 | 217 | node = opts.get(r'node') |
|
217 | 218 | if node: |
|
218 | 219 | deltachain = dpack.getdeltachain('', bin(node)) |
|
219 | 220 | dumpdeltachain(ui, deltachain, **opts) |
|
220 | 221 | return |
|
221 | 222 | |
|
222 | 223 | if opts.get(r'long'): |
|
223 | 224 | hashformatter = hex |
|
224 | 225 | hashlen = 42 |
|
225 | 226 | else: |
|
226 | 227 | hashformatter = short |
|
227 | 228 | hashlen = 14 |
|
228 | 229 | |
|
229 | 230 | lastfilename = None |
|
230 | 231 | totaldeltasize = 0 |
|
231 | 232 | totalblobsize = 0 |
|
232 | 233 | def printtotals(): |
|
233 | 234 | if lastfilename is not None: |
|
234 | 235 | ui.write("\n") |
|
235 | 236 | if not totaldeltasize or not totalblobsize: |
|
236 | 237 | return |
|
237 | 238 | difference = totalblobsize - totaldeltasize |
|
238 | 239 | deltastr = "%0.1f%% %s" % ( |
|
239 | 240 | (100.0 * abs(difference) / totalblobsize), |
|
240 | 241 | ("smaller" if difference > 0 else "bigger")) |
|
241 | 242 | |
|
242 | 243 | ui.write(("Total:%s%s %s (%s)\n") % ( |
|
243 | 244 | "".ljust(2 * hashlen - len("Total:")), |
|
244 | 245 | ('%d' % totaldeltasize).ljust(12), |
|
245 | 246 | ('%d' % totalblobsize).ljust(9), |
|
246 | 247 | deltastr |
|
247 | 248 | )) |
|
248 | 249 | |
|
249 | 250 | bases = {} |
|
250 | 251 | nodes = set() |
|
251 | 252 | failures = 0 |
|
252 | 253 | for filename, node, deltabase, deltalen in dpack.iterentries(): |
|
253 | 254 | bases[node] = deltabase |
|
254 | 255 | if node in nodes: |
|
255 | 256 | ui.write(("Bad entry: %s appears twice\n" % short(node))) |
|
256 | 257 | failures += 1 |
|
257 | 258 | nodes.add(node) |
|
258 | 259 | if filename != lastfilename: |
|
259 | 260 | printtotals() |
|
260 | 261 | name = '(empty name)' if filename == '' else filename |
|
261 | 262 | ui.write("%s:\n" % name) |
|
262 | 263 | ui.write("%s%s%s%s\n" % ( |
|
263 | 264 | "Node".ljust(hashlen), |
|
264 | 265 | "Delta Base".ljust(hashlen), |
|
265 | 266 | "Delta Length".ljust(14), |
|
266 | 267 | "Blob Size".ljust(9))) |
|
267 | 268 | lastfilename = filename |
|
268 | 269 | totalblobsize = 0 |
|
269 | 270 | totaldeltasize = 0 |
|
270 | 271 | |
|
271 | 272 | # Metadata could be missing, in which case it will be an empty dict. |
|
272 | 273 | meta = dpack.getmeta(filename, node) |
|
273 | 274 | if constants.METAKEYSIZE in meta: |
|
274 | 275 | blobsize = meta[constants.METAKEYSIZE] |
|
275 | 276 | totaldeltasize += deltalen |
|
276 | 277 | totalblobsize += blobsize |
|
277 | 278 | else: |
|
278 | 279 | blobsize = "(missing)" |
|
279 |
ui.write("%s %s %s% |
|
|
280 | ui.write("%s %s %s%s\n" % ( | |
|
280 | 281 | hashformatter(node), |
|
281 | 282 | hashformatter(deltabase), |
|
282 | 283 | ('%d' % deltalen).ljust(14), |
|
283 | blobsize)) | |
|
284 | pycompat.bytestr(blobsize))) | |
|
284 | 285 | |
|
285 | 286 | if filename is not None: |
|
286 | 287 | printtotals() |
|
287 | 288 | |
|
288 | 289 | failures += _sanitycheck(ui, set(nodes), bases) |
|
289 | 290 | if failures > 1: |
|
290 | 291 | ui.warn(("%d failures\n" % failures)) |
|
291 | 292 | return 1 |
|
292 | 293 | |
|
293 | 294 | def _sanitycheck(ui, nodes, bases): |
|
294 | 295 | """ |
|
295 | 296 | Does some basic sanity checking on a packfiles with ``nodes`` ``bases`` (a |
|
296 | 297 | mapping of node->base): |
|
297 | 298 | |
|
298 | 299 | - Each deltabase must itself be a node elsewhere in the pack |
|
299 | 300 | - There must be no cycles |
|
300 | 301 | """ |
|
301 | 302 | failures = 0 |
|
302 | 303 | for node in nodes: |
|
303 | 304 | seen = set() |
|
304 | 305 | current = node |
|
305 | 306 | deltabase = bases[current] |
|
306 | 307 | |
|
307 | 308 | while deltabase != nullid: |
|
308 | 309 | if deltabase not in nodes: |
|
309 | 310 | ui.warn(("Bad entry: %s has an unknown deltabase (%s)\n" % |
|
310 | 311 | (short(node), short(deltabase)))) |
|
311 | 312 | failures += 1 |
|
312 | 313 | break |
|
313 | 314 | |
|
314 | 315 | if deltabase in seen: |
|
315 | 316 | ui.warn(("Bad entry: %s has a cycle (at %s)\n" % |
|
316 | 317 | (short(node), short(deltabase)))) |
|
317 | 318 | failures += 1 |
|
318 | 319 | break |
|
319 | 320 | |
|
320 | 321 | current = deltabase |
|
321 | 322 | seen.add(current) |
|
322 | 323 | deltabase = bases[current] |
|
323 | 324 | # Since ``node`` begins a valid chain, reset/memoize its base to nullid |
|
324 | 325 | # so we don't traverse it again. |
|
325 | 326 | bases[node] = nullid |
|
326 | 327 | return failures |
|
327 | 328 | |
|
328 | 329 | def dumpdeltachain(ui, deltachain, **opts): |
|
329 | 330 | hashformatter = hex |
|
330 | 331 | hashlen = 40 |
|
331 | 332 | |
|
332 | 333 | lastfilename = None |
|
333 | 334 | for filename, node, filename, deltabasenode, delta in deltachain: |
|
334 | 335 | if filename != lastfilename: |
|
335 | 336 | ui.write("\n%s\n" % filename) |
|
336 | 337 | lastfilename = filename |
|
337 | 338 | ui.write("%s %s %s %s\n" % ( |
|
338 | 339 | "Node".ljust(hashlen), |
|
339 | 340 | "Delta Base".ljust(hashlen), |
|
340 | 341 | "Delta SHA1".ljust(hashlen), |
|
341 | 342 | "Delta Length".ljust(6), |
|
342 | 343 | )) |
|
343 | 344 | |
|
344 | 345 | ui.write("%s %s %s %d\n" % ( |
|
345 | 346 | hashformatter(node), |
|
346 | 347 | hashformatter(deltabasenode), |
|
347 | 348 | nodemod.hex(hashlib.sha1(delta).digest()), |
|
348 | 349 | len(delta))) |
|
349 | 350 | |
|
350 | 351 | def debughistorypack(ui, path): |
|
351 | 352 | if '.hist' in path: |
|
352 | 353 | path = path[:path.index('.hist')] |
|
353 | 354 | hpack = historypack.historypack(path) |
|
354 | 355 | |
|
355 | 356 | lastfilename = None |
|
356 | 357 | for entry in hpack.iterentries(): |
|
357 | 358 | filename, node, p1node, p2node, linknode, copyfrom = entry |
|
358 | 359 | if filename != lastfilename: |
|
359 | 360 | ui.write("\n%s\n" % filename) |
|
360 | 361 | ui.write("%s%s%s%s%s\n" % ( |
|
361 | 362 | "Node".ljust(14), |
|
362 | 363 | "P1 Node".ljust(14), |
|
363 | 364 | "P2 Node".ljust(14), |
|
364 | 365 | "Link Node".ljust(14), |
|
365 | 366 | "Copy From")) |
|
366 | 367 | lastfilename = filename |
|
367 | 368 | ui.write("%s %s %s %s %s\n" % (short(node), short(p1node), |
|
368 | 369 | short(p2node), short(linknode), copyfrom)) |
|
369 | 370 | |
|
370 | 371 | def debugwaitonrepack(repo): |
|
371 | 372 | with extutil.flock(repack.repacklockvfs(repo).join('repacklock'), ''): |
|
372 | 373 | return |
|
373 | 374 | |
|
374 | 375 | def debugwaitonprefetch(repo): |
|
375 | 376 | with repo._lock(repo.svfs, "prefetchlock", True, None, |
|
376 | 377 | None, _('prefetching in %s') % repo.origroot): |
|
377 | 378 | pass |
General Comments 0
You need to be logged in to leave comments.
Login now