##// END OF EJS Templates
remotefilelog: fix format str, blobsize isn't always a #, can be "(missing)"...
Kyle Lippincott -
r41973:94168550 default
parent child Browse files
Show More
@@ -1,377 +1,378 b''
1 # debugcommands.py - debug logic for remotefilelog
1 # debugcommands.py - debug logic for remotefilelog
2 #
2 #
3 # Copyright 2013 Facebook, Inc.
3 # Copyright 2013 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from __future__ import absolute_import
7 from __future__ import absolute_import
8
8
9 import hashlib
9 import hashlib
10 import os
10 import os
11 import zlib
11 import zlib
12
12
13 from mercurial.node import bin, hex, nullid, short
13 from mercurial.node import bin, hex, nullid, short
14 from mercurial.i18n import _
14 from mercurial.i18n import _
15 from mercurial import (
15 from mercurial import (
16 error,
16 error,
17 filelog,
17 filelog,
18 node as nodemod,
18 node as nodemod,
19 pycompat,
19 revlog,
20 revlog,
20 )
21 )
21 from . import (
22 from . import (
22 constants,
23 constants,
23 datapack,
24 datapack,
24 extutil,
25 extutil,
25 fileserverclient,
26 fileserverclient,
26 historypack,
27 historypack,
27 repack,
28 repack,
28 shallowutil,
29 shallowutil,
29 )
30 )
30
31
31 def debugremotefilelog(ui, path, **opts):
32 def debugremotefilelog(ui, path, **opts):
32 decompress = opts.get(r'decompress')
33 decompress = opts.get(r'decompress')
33
34
34 size, firstnode, mapping = parsefileblob(path, decompress)
35 size, firstnode, mapping = parsefileblob(path, decompress)
35
36
36 ui.status(_("size: %d bytes\n") % (size))
37 ui.status(_("size: %d bytes\n") % (size))
37 ui.status(_("path: %s \n") % (path))
38 ui.status(_("path: %s \n") % (path))
38 ui.status(_("key: %s \n") % (short(firstnode)))
39 ui.status(_("key: %s \n") % (short(firstnode)))
39 ui.status(_("\n"))
40 ui.status(_("\n"))
40 ui.status(_("%12s => %12s %13s %13s %12s\n") %
41 ui.status(_("%12s => %12s %13s %13s %12s\n") %
41 ("node", "p1", "p2", "linknode", "copyfrom"))
42 ("node", "p1", "p2", "linknode", "copyfrom"))
42
43
43 queue = [firstnode]
44 queue = [firstnode]
44 while queue:
45 while queue:
45 node = queue.pop(0)
46 node = queue.pop(0)
46 p1, p2, linknode, copyfrom = mapping[node]
47 p1, p2, linknode, copyfrom = mapping[node]
47 ui.status(_("%s => %s %s %s %s\n") %
48 ui.status(_("%s => %s %s %s %s\n") %
48 (short(node), short(p1), short(p2), short(linknode), copyfrom))
49 (short(node), short(p1), short(p2), short(linknode), copyfrom))
49 if p1 != nullid:
50 if p1 != nullid:
50 queue.append(p1)
51 queue.append(p1)
51 if p2 != nullid:
52 if p2 != nullid:
52 queue.append(p2)
53 queue.append(p2)
53
54
54 def buildtemprevlog(repo, file):
55 def buildtemprevlog(repo, file):
55 # get filename key
56 # get filename key
56 filekey = nodemod.hex(hashlib.sha1(file).digest())
57 filekey = nodemod.hex(hashlib.sha1(file).digest())
57 filedir = os.path.join(repo.path, 'store/data', filekey)
58 filedir = os.path.join(repo.path, 'store/data', filekey)
58
59
59 # sort all entries based on linkrev
60 # sort all entries based on linkrev
60 fctxs = []
61 fctxs = []
61 for filenode in os.listdir(filedir):
62 for filenode in os.listdir(filedir):
62 if '_old' not in filenode:
63 if '_old' not in filenode:
63 fctxs.append(repo.filectx(file, fileid=bin(filenode)))
64 fctxs.append(repo.filectx(file, fileid=bin(filenode)))
64
65
65 fctxs = sorted(fctxs, key=lambda x: x.linkrev())
66 fctxs = sorted(fctxs, key=lambda x: x.linkrev())
66
67
67 # add to revlog
68 # add to revlog
68 temppath = repo.sjoin('data/temprevlog.i')
69 temppath = repo.sjoin('data/temprevlog.i')
69 if os.path.exists(temppath):
70 if os.path.exists(temppath):
70 os.remove(temppath)
71 os.remove(temppath)
71 r = filelog.filelog(repo.svfs, 'temprevlog')
72 r = filelog.filelog(repo.svfs, 'temprevlog')
72
73
73 class faket(object):
74 class faket(object):
74 def add(self, a, b, c):
75 def add(self, a, b, c):
75 pass
76 pass
76 t = faket()
77 t = faket()
77 for fctx in fctxs:
78 for fctx in fctxs:
78 if fctx.node() not in repo:
79 if fctx.node() not in repo:
79 continue
80 continue
80
81
81 p = fctx.filelog().parents(fctx.filenode())
82 p = fctx.filelog().parents(fctx.filenode())
82 meta = {}
83 meta = {}
83 if fctx.renamed():
84 if fctx.renamed():
84 meta['copy'] = fctx.renamed()[0]
85 meta['copy'] = fctx.renamed()[0]
85 meta['copyrev'] = hex(fctx.renamed()[1])
86 meta['copyrev'] = hex(fctx.renamed()[1])
86
87
87 r.add(fctx.data(), meta, t, fctx.linkrev(), p[0], p[1])
88 r.add(fctx.data(), meta, t, fctx.linkrev(), p[0], p[1])
88
89
89 return r
90 return r
90
91
91 def debugindex(orig, ui, repo, file_=None, **opts):
92 def debugindex(orig, ui, repo, file_=None, **opts):
92 """dump the contents of an index file"""
93 """dump the contents of an index file"""
93 if (opts.get(r'changelog') or
94 if (opts.get(r'changelog') or
94 opts.get(r'manifest') or
95 opts.get(r'manifest') or
95 opts.get(r'dir') or
96 opts.get(r'dir') or
96 not shallowutil.isenabled(repo) or
97 not shallowutil.isenabled(repo) or
97 not repo.shallowmatch(file_)):
98 not repo.shallowmatch(file_)):
98 return orig(ui, repo, file_, **opts)
99 return orig(ui, repo, file_, **opts)
99
100
100 r = buildtemprevlog(repo, file_)
101 r = buildtemprevlog(repo, file_)
101
102
102 # debugindex like normal
103 # debugindex like normal
103 format = opts.get('format', 0)
104 format = opts.get('format', 0)
104 if format not in (0, 1):
105 if format not in (0, 1):
105 raise error.Abort(_("unknown format %d") % format)
106 raise error.Abort(_("unknown format %d") % format)
106
107
107 generaldelta = r.version & revlog.FLAG_GENERALDELTA
108 generaldelta = r.version & revlog.FLAG_GENERALDELTA
108 if generaldelta:
109 if generaldelta:
109 basehdr = ' delta'
110 basehdr = ' delta'
110 else:
111 else:
111 basehdr = ' base'
112 basehdr = ' base'
112
113
113 if format == 0:
114 if format == 0:
114 ui.write((" rev offset length " + basehdr + " linkrev"
115 ui.write((" rev offset length " + basehdr + " linkrev"
115 " nodeid p1 p2\n"))
116 " nodeid p1 p2\n"))
116 elif format == 1:
117 elif format == 1:
117 ui.write((" rev flag offset length"
118 ui.write((" rev flag offset length"
118 " size " + basehdr + " link p1 p2"
119 " size " + basehdr + " link p1 p2"
119 " nodeid\n"))
120 " nodeid\n"))
120
121
121 for i in r:
122 for i in r:
122 node = r.node(i)
123 node = r.node(i)
123 if generaldelta:
124 if generaldelta:
124 base = r.deltaparent(i)
125 base = r.deltaparent(i)
125 else:
126 else:
126 base = r.chainbase(i)
127 base = r.chainbase(i)
127 if format == 0:
128 if format == 0:
128 try:
129 try:
129 pp = r.parents(node)
130 pp = r.parents(node)
130 except Exception:
131 except Exception:
131 pp = [nullid, nullid]
132 pp = [nullid, nullid]
132 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
133 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
133 i, r.start(i), r.length(i), base, r.linkrev(i),
134 i, r.start(i), r.length(i), base, r.linkrev(i),
134 short(node), short(pp[0]), short(pp[1])))
135 short(node), short(pp[0]), short(pp[1])))
135 elif format == 1:
136 elif format == 1:
136 pr = r.parentrevs(i)
137 pr = r.parentrevs(i)
137 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
138 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
138 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
139 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
139 base, r.linkrev(i), pr[0], pr[1], short(node)))
140 base, r.linkrev(i), pr[0], pr[1], short(node)))
140
141
141 def debugindexdot(orig, ui, repo, file_):
142 def debugindexdot(orig, ui, repo, file_):
142 """dump an index DAG as a graphviz dot file"""
143 """dump an index DAG as a graphviz dot file"""
143 if not shallowutil.isenabled(repo):
144 if not shallowutil.isenabled(repo):
144 return orig(ui, repo, file_)
145 return orig(ui, repo, file_)
145
146
146 r = buildtemprevlog(repo, os.path.basename(file_)[:-2])
147 r = buildtemprevlog(repo, os.path.basename(file_)[:-2])
147
148
148 ui.write(("digraph G {\n"))
149 ui.write(("digraph G {\n"))
149 for i in r:
150 for i in r:
150 node = r.node(i)
151 node = r.node(i)
151 pp = r.parents(node)
152 pp = r.parents(node)
152 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
153 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
153 if pp[1] != nullid:
154 if pp[1] != nullid:
154 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
155 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
155 ui.write("}\n")
156 ui.write("}\n")
156
157
157 def verifyremotefilelog(ui, path, **opts):
158 def verifyremotefilelog(ui, path, **opts):
158 decompress = opts.get(r'decompress')
159 decompress = opts.get(r'decompress')
159
160
160 for root, dirs, files in os.walk(path):
161 for root, dirs, files in os.walk(path):
161 for file in files:
162 for file in files:
162 if file == "repos":
163 if file == "repos":
163 continue
164 continue
164 filepath = os.path.join(root, file)
165 filepath = os.path.join(root, file)
165 size, firstnode, mapping = parsefileblob(filepath, decompress)
166 size, firstnode, mapping = parsefileblob(filepath, decompress)
166 for p1, p2, linknode, copyfrom in mapping.itervalues():
167 for p1, p2, linknode, copyfrom in mapping.itervalues():
167 if linknode == nullid:
168 if linknode == nullid:
168 actualpath = os.path.relpath(root, path)
169 actualpath = os.path.relpath(root, path)
169 key = fileserverclient.getcachekey("reponame", actualpath,
170 key = fileserverclient.getcachekey("reponame", actualpath,
170 file)
171 file)
171 ui.status("%s %s\n" % (key, os.path.relpath(filepath,
172 ui.status("%s %s\n" % (key, os.path.relpath(filepath,
172 path)))
173 path)))
173
174
174 def _decompressblob(raw):
175 def _decompressblob(raw):
175 return zlib.decompress(raw)
176 return zlib.decompress(raw)
176
177
177 def parsefileblob(path, decompress):
178 def parsefileblob(path, decompress):
178 f = open(path, "rb")
179 f = open(path, "rb")
179 try:
180 try:
180 raw = f.read()
181 raw = f.read()
181 finally:
182 finally:
182 f.close()
183 f.close()
183
184
184 if decompress:
185 if decompress:
185 raw = _decompressblob(raw)
186 raw = _decompressblob(raw)
186
187
187 offset, size, flags = shallowutil.parsesizeflags(raw)
188 offset, size, flags = shallowutil.parsesizeflags(raw)
188 start = offset + size
189 start = offset + size
189
190
190 firstnode = None
191 firstnode = None
191
192
192 mapping = {}
193 mapping = {}
193 while start < len(raw):
194 while start < len(raw):
194 divider = raw.index('\0', start + 80)
195 divider = raw.index('\0', start + 80)
195
196
196 currentnode = raw[start:(start + 20)]
197 currentnode = raw[start:(start + 20)]
197 if not firstnode:
198 if not firstnode:
198 firstnode = currentnode
199 firstnode = currentnode
199
200
200 p1 = raw[(start + 20):(start + 40)]
201 p1 = raw[(start + 20):(start + 40)]
201 p2 = raw[(start + 40):(start + 60)]
202 p2 = raw[(start + 40):(start + 60)]
202 linknode = raw[(start + 60):(start + 80)]
203 linknode = raw[(start + 60):(start + 80)]
203 copyfrom = raw[(start + 80):divider]
204 copyfrom = raw[(start + 80):divider]
204
205
205 mapping[currentnode] = (p1, p2, linknode, copyfrom)
206 mapping[currentnode] = (p1, p2, linknode, copyfrom)
206 start = divider + 1
207 start = divider + 1
207
208
208 return size, firstnode, mapping
209 return size, firstnode, mapping
209
210
210 def debugdatapack(ui, *paths, **opts):
211 def debugdatapack(ui, *paths, **opts):
211 for path in paths:
212 for path in paths:
212 if '.data' in path:
213 if '.data' in path:
213 path = path[:path.index('.data')]
214 path = path[:path.index('.data')]
214 ui.write("%s:\n" % path)
215 ui.write("%s:\n" % path)
215 dpack = datapack.datapack(path)
216 dpack = datapack.datapack(path)
216 node = opts.get(r'node')
217 node = opts.get(r'node')
217 if node:
218 if node:
218 deltachain = dpack.getdeltachain('', bin(node))
219 deltachain = dpack.getdeltachain('', bin(node))
219 dumpdeltachain(ui, deltachain, **opts)
220 dumpdeltachain(ui, deltachain, **opts)
220 return
221 return
221
222
222 if opts.get(r'long'):
223 if opts.get(r'long'):
223 hashformatter = hex
224 hashformatter = hex
224 hashlen = 42
225 hashlen = 42
225 else:
226 else:
226 hashformatter = short
227 hashformatter = short
227 hashlen = 14
228 hashlen = 14
228
229
229 lastfilename = None
230 lastfilename = None
230 totaldeltasize = 0
231 totaldeltasize = 0
231 totalblobsize = 0
232 totalblobsize = 0
232 def printtotals():
233 def printtotals():
233 if lastfilename is not None:
234 if lastfilename is not None:
234 ui.write("\n")
235 ui.write("\n")
235 if not totaldeltasize or not totalblobsize:
236 if not totaldeltasize or not totalblobsize:
236 return
237 return
237 difference = totalblobsize - totaldeltasize
238 difference = totalblobsize - totaldeltasize
238 deltastr = "%0.1f%% %s" % (
239 deltastr = "%0.1f%% %s" % (
239 (100.0 * abs(difference) / totalblobsize),
240 (100.0 * abs(difference) / totalblobsize),
240 ("smaller" if difference > 0 else "bigger"))
241 ("smaller" if difference > 0 else "bigger"))
241
242
242 ui.write(("Total:%s%s %s (%s)\n") % (
243 ui.write(("Total:%s%s %s (%s)\n") % (
243 "".ljust(2 * hashlen - len("Total:")),
244 "".ljust(2 * hashlen - len("Total:")),
244 ('%d' % totaldeltasize).ljust(12),
245 ('%d' % totaldeltasize).ljust(12),
245 ('%d' % totalblobsize).ljust(9),
246 ('%d' % totalblobsize).ljust(9),
246 deltastr
247 deltastr
247 ))
248 ))
248
249
249 bases = {}
250 bases = {}
250 nodes = set()
251 nodes = set()
251 failures = 0
252 failures = 0
252 for filename, node, deltabase, deltalen in dpack.iterentries():
253 for filename, node, deltabase, deltalen in dpack.iterentries():
253 bases[node] = deltabase
254 bases[node] = deltabase
254 if node in nodes:
255 if node in nodes:
255 ui.write(("Bad entry: %s appears twice\n" % short(node)))
256 ui.write(("Bad entry: %s appears twice\n" % short(node)))
256 failures += 1
257 failures += 1
257 nodes.add(node)
258 nodes.add(node)
258 if filename != lastfilename:
259 if filename != lastfilename:
259 printtotals()
260 printtotals()
260 name = '(empty name)' if filename == '' else filename
261 name = '(empty name)' if filename == '' else filename
261 ui.write("%s:\n" % name)
262 ui.write("%s:\n" % name)
262 ui.write("%s%s%s%s\n" % (
263 ui.write("%s%s%s%s\n" % (
263 "Node".ljust(hashlen),
264 "Node".ljust(hashlen),
264 "Delta Base".ljust(hashlen),
265 "Delta Base".ljust(hashlen),
265 "Delta Length".ljust(14),
266 "Delta Length".ljust(14),
266 "Blob Size".ljust(9)))
267 "Blob Size".ljust(9)))
267 lastfilename = filename
268 lastfilename = filename
268 totalblobsize = 0
269 totalblobsize = 0
269 totaldeltasize = 0
270 totaldeltasize = 0
270
271
271 # Metadata could be missing, in which case it will be an empty dict.
272 # Metadata could be missing, in which case it will be an empty dict.
272 meta = dpack.getmeta(filename, node)
273 meta = dpack.getmeta(filename, node)
273 if constants.METAKEYSIZE in meta:
274 if constants.METAKEYSIZE in meta:
274 blobsize = meta[constants.METAKEYSIZE]
275 blobsize = meta[constants.METAKEYSIZE]
275 totaldeltasize += deltalen
276 totaldeltasize += deltalen
276 totalblobsize += blobsize
277 totalblobsize += blobsize
277 else:
278 else:
278 blobsize = "(missing)"
279 blobsize = "(missing)"
279 ui.write("%s %s %s%d\n" % (
280 ui.write("%s %s %s%s\n" % (
280 hashformatter(node),
281 hashformatter(node),
281 hashformatter(deltabase),
282 hashformatter(deltabase),
282 ('%d' % deltalen).ljust(14),
283 ('%d' % deltalen).ljust(14),
283 blobsize))
284 pycompat.bytestr(blobsize)))
284
285
285 if filename is not None:
286 if filename is not None:
286 printtotals()
287 printtotals()
287
288
288 failures += _sanitycheck(ui, set(nodes), bases)
289 failures += _sanitycheck(ui, set(nodes), bases)
289 if failures > 1:
290 if failures > 1:
290 ui.warn(("%d failures\n" % failures))
291 ui.warn(("%d failures\n" % failures))
291 return 1
292 return 1
292
293
293 def _sanitycheck(ui, nodes, bases):
294 def _sanitycheck(ui, nodes, bases):
294 """
295 """
295 Does some basic sanity checking on a packfiles with ``nodes`` ``bases`` (a
296 Does some basic sanity checking on a packfiles with ``nodes`` ``bases`` (a
296 mapping of node->base):
297 mapping of node->base):
297
298
298 - Each deltabase must itself be a node elsewhere in the pack
299 - Each deltabase must itself be a node elsewhere in the pack
299 - There must be no cycles
300 - There must be no cycles
300 """
301 """
301 failures = 0
302 failures = 0
302 for node in nodes:
303 for node in nodes:
303 seen = set()
304 seen = set()
304 current = node
305 current = node
305 deltabase = bases[current]
306 deltabase = bases[current]
306
307
307 while deltabase != nullid:
308 while deltabase != nullid:
308 if deltabase not in nodes:
309 if deltabase not in nodes:
309 ui.warn(("Bad entry: %s has an unknown deltabase (%s)\n" %
310 ui.warn(("Bad entry: %s has an unknown deltabase (%s)\n" %
310 (short(node), short(deltabase))))
311 (short(node), short(deltabase))))
311 failures += 1
312 failures += 1
312 break
313 break
313
314
314 if deltabase in seen:
315 if deltabase in seen:
315 ui.warn(("Bad entry: %s has a cycle (at %s)\n" %
316 ui.warn(("Bad entry: %s has a cycle (at %s)\n" %
316 (short(node), short(deltabase))))
317 (short(node), short(deltabase))))
317 failures += 1
318 failures += 1
318 break
319 break
319
320
320 current = deltabase
321 current = deltabase
321 seen.add(current)
322 seen.add(current)
322 deltabase = bases[current]
323 deltabase = bases[current]
323 # Since ``node`` begins a valid chain, reset/memoize its base to nullid
324 # Since ``node`` begins a valid chain, reset/memoize its base to nullid
324 # so we don't traverse it again.
325 # so we don't traverse it again.
325 bases[node] = nullid
326 bases[node] = nullid
326 return failures
327 return failures
327
328
328 def dumpdeltachain(ui, deltachain, **opts):
329 def dumpdeltachain(ui, deltachain, **opts):
329 hashformatter = hex
330 hashformatter = hex
330 hashlen = 40
331 hashlen = 40
331
332
332 lastfilename = None
333 lastfilename = None
333 for filename, node, filename, deltabasenode, delta in deltachain:
334 for filename, node, filename, deltabasenode, delta in deltachain:
334 if filename != lastfilename:
335 if filename != lastfilename:
335 ui.write("\n%s\n" % filename)
336 ui.write("\n%s\n" % filename)
336 lastfilename = filename
337 lastfilename = filename
337 ui.write("%s %s %s %s\n" % (
338 ui.write("%s %s %s %s\n" % (
338 "Node".ljust(hashlen),
339 "Node".ljust(hashlen),
339 "Delta Base".ljust(hashlen),
340 "Delta Base".ljust(hashlen),
340 "Delta SHA1".ljust(hashlen),
341 "Delta SHA1".ljust(hashlen),
341 "Delta Length".ljust(6),
342 "Delta Length".ljust(6),
342 ))
343 ))
343
344
344 ui.write("%s %s %s %d\n" % (
345 ui.write("%s %s %s %d\n" % (
345 hashformatter(node),
346 hashformatter(node),
346 hashformatter(deltabasenode),
347 hashformatter(deltabasenode),
347 nodemod.hex(hashlib.sha1(delta).digest()),
348 nodemod.hex(hashlib.sha1(delta).digest()),
348 len(delta)))
349 len(delta)))
349
350
350 def debughistorypack(ui, path):
351 def debughistorypack(ui, path):
351 if '.hist' in path:
352 if '.hist' in path:
352 path = path[:path.index('.hist')]
353 path = path[:path.index('.hist')]
353 hpack = historypack.historypack(path)
354 hpack = historypack.historypack(path)
354
355
355 lastfilename = None
356 lastfilename = None
356 for entry in hpack.iterentries():
357 for entry in hpack.iterentries():
357 filename, node, p1node, p2node, linknode, copyfrom = entry
358 filename, node, p1node, p2node, linknode, copyfrom = entry
358 if filename != lastfilename:
359 if filename != lastfilename:
359 ui.write("\n%s\n" % filename)
360 ui.write("\n%s\n" % filename)
360 ui.write("%s%s%s%s%s\n" % (
361 ui.write("%s%s%s%s%s\n" % (
361 "Node".ljust(14),
362 "Node".ljust(14),
362 "P1 Node".ljust(14),
363 "P1 Node".ljust(14),
363 "P2 Node".ljust(14),
364 "P2 Node".ljust(14),
364 "Link Node".ljust(14),
365 "Link Node".ljust(14),
365 "Copy From"))
366 "Copy From"))
366 lastfilename = filename
367 lastfilename = filename
367 ui.write("%s %s %s %s %s\n" % (short(node), short(p1node),
368 ui.write("%s %s %s %s %s\n" % (short(node), short(p1node),
368 short(p2node), short(linknode), copyfrom))
369 short(p2node), short(linknode), copyfrom))
369
370
370 def debugwaitonrepack(repo):
371 def debugwaitonrepack(repo):
371 with extutil.flock(repack.repacklockvfs(repo).join('repacklock'), ''):
372 with extutil.flock(repack.repacklockvfs(repo).join('repacklock'), ''):
372 return
373 return
373
374
374 def debugwaitonprefetch(repo):
375 def debugwaitonprefetch(repo):
375 with repo._lock(repo.svfs, "prefetchlock", True, None,
376 with repo._lock(repo.svfs, "prefetchlock", True, None,
376 None, _('prefetching in %s') % repo.origroot):
377 None, _('prefetching in %s') % repo.origroot):
377 pass
378 pass
General Comments 0
You need to be logged in to leave comments. Login now