##// END OF EJS Templates
lfconvert: use a `changing_parents` context to clear the dirstate...
marmoute -
r51005:a8602987 default
parent child Browse files
Show More
@@ -1,674 +1,675 b''
1 1 # Copyright 2009-2010 Gregory P. Ward
2 2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 3 # Copyright 2010-2011 Fog Creek Software
4 4 # Copyright 2010-2011 Unity Technologies
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 '''High-level command function for lfconvert, plus the cmdtable.'''
10 10
11 11 import binascii
12 12 import os
13 13 import shutil
14 14
15 15 from mercurial.i18n import _
16 16 from mercurial.node import (
17 17 bin,
18 18 hex,
19 19 )
20 20
21 21 from mercurial import (
22 22 cmdutil,
23 23 context,
24 24 error,
25 25 exthelper,
26 26 hg,
27 27 lock,
28 28 logcmdutil,
29 29 match as matchmod,
30 30 pycompat,
31 31 scmutil,
32 32 util,
33 33 )
34 34 from mercurial.utils import hashutil
35 35
36 36 from ..convert import (
37 37 convcmd,
38 38 filemap,
39 39 )
40 40
41 41 from . import lfutil, storefactory
42 42
43 43 release = lock.release
44 44
45 45 # -- Commands ----------------------------------------------------------
46 46
47 47 eh = exthelper.exthelper()
48 48
49 49
50 50 @eh.command(
51 51 b'lfconvert',
52 52 [
53 53 (
54 54 b's',
55 55 b'size',
56 56 b'',
57 57 _(b'minimum size (MB) for files to be converted as largefiles'),
58 58 b'SIZE',
59 59 ),
60 60 (
61 61 b'',
62 62 b'to-normal',
63 63 False,
64 64 _(b'convert from a largefiles repo to a normal repo'),
65 65 ),
66 66 ],
67 67 _(b'hg lfconvert SOURCE DEST [FILE ...]'),
68 68 norepo=True,
69 69 inferrepo=True,
70 70 )
71 71 def lfconvert(ui, src, dest, *pats, **opts):
72 72 """convert a normal repository to a largefiles repository
73 73
74 74 Convert repository SOURCE to a new repository DEST, identical to
75 75 SOURCE except that certain files will be converted as largefiles:
76 76 specifically, any file that matches any PATTERN *or* whose size is
77 77 above the minimum size threshold is converted as a largefile. The
78 78 size used to determine whether or not to track a file as a
79 79 largefile is the size of the first version of the file. The
80 80 minimum size can be specified either with --size or in
81 81 configuration as ``largefiles.size``.
82 82
83 83 After running this command you will need to make sure that
84 84 largefiles is enabled anywhere you intend to push the new
85 85 repository.
86 86
87 87 Use --to-normal to convert largefiles back to normal files; after
88 88 this, the DEST repository can be used without largefiles at all."""
89 89
90 90 opts = pycompat.byteskwargs(opts)
91 91 if opts[b'to_normal']:
92 92 tolfile = False
93 93 else:
94 94 tolfile = True
95 95 size = lfutil.getminsize(ui, True, opts.get(b'size'), default=None)
96 96
97 97 if not hg.islocal(src):
98 98 raise error.Abort(_(b'%s is not a local Mercurial repo') % src)
99 99 if not hg.islocal(dest):
100 100 raise error.Abort(_(b'%s is not a local Mercurial repo') % dest)
101 101
102 102 rsrc = hg.repository(ui, src)
103 103 ui.status(_(b'initializing destination %s\n') % dest)
104 104 rdst = hg.repository(ui, dest, create=True)
105 105
106 106 success = False
107 107 dstwlock = dstlock = None
108 108 try:
109 109 # Get a list of all changesets in the source. The easy way to do this
110 110 # is to simply walk the changelog, using changelog.nodesbetween().
111 111 # Take a look at mercurial/revlog.py:639 for more details.
112 112 # Use a generator instead of a list to decrease memory usage
113 113 ctxs = (
114 114 rsrc[ctx]
115 115 for ctx in rsrc.changelog.nodesbetween(None, rsrc.heads())[0]
116 116 )
117 117 revmap = {rsrc.nullid: rdst.nullid}
118 118 if tolfile:
119 119 # Lock destination to prevent modification while it is converted to.
120 120 # Don't need to lock src because we are just reading from its
121 121 # history which can't change.
122 122 dstwlock = rdst.wlock()
123 123 dstlock = rdst.lock()
124 124
125 125 lfiles = set()
126 126 normalfiles = set()
127 127 if not pats:
128 128 pats = ui.configlist(lfutil.longname, b'patterns')
129 129 if pats:
130 130 matcher = matchmod.match(rsrc.root, b'', list(pats))
131 131 else:
132 132 matcher = None
133 133
134 134 lfiletohash = {}
135 135 with ui.makeprogress(
136 136 _(b'converting revisions'),
137 137 unit=_(b'revisions'),
138 138 total=rsrc[b'tip'].rev(),
139 139 ) as progress:
140 140 for ctx in ctxs:
141 141 progress.update(ctx.rev())
142 142 _lfconvert_addchangeset(
143 143 rsrc,
144 144 rdst,
145 145 ctx,
146 146 revmap,
147 147 lfiles,
148 148 normalfiles,
149 149 matcher,
150 150 size,
151 151 lfiletohash,
152 152 )
153 153
154 154 if rdst.wvfs.exists(lfutil.shortname):
155 155 rdst.wvfs.rmtree(lfutil.shortname)
156 156
157 157 for f in lfiletohash.keys():
158 158 if rdst.wvfs.isfile(f):
159 159 rdst.wvfs.unlink(f)
160 160 try:
161 161 rdst.wvfs.removedirs(rdst.wvfs.dirname(f))
162 162 except OSError:
163 163 pass
164 164
165 165 # If there were any files converted to largefiles, add largefiles
166 166 # to the destination repository's requirements.
167 167 if lfiles:
168 168 rdst.requirements.add(b'largefiles')
169 169 scmutil.writereporequirements(rdst)
170 170 else:
171 171
172 172 class lfsource(filemap.filemap_source):
173 173 def __init__(self, ui, source):
174 174 super(lfsource, self).__init__(ui, source, None)
175 175 self.filemapper.rename[lfutil.shortname] = b'.'
176 176
177 177 def getfile(self, name, rev):
178 178 realname, realrev = rev
179 179 f = super(lfsource, self).getfile(name, rev)
180 180
181 181 if (
182 182 not realname.startswith(lfutil.shortnameslash)
183 183 or f[0] is None
184 184 ):
185 185 return f
186 186
187 187 # Substitute in the largefile data for the hash
188 188 hash = f[0].strip()
189 189 path = lfutil.findfile(rsrc, hash)
190 190
191 191 if path is None:
192 192 raise error.Abort(
193 193 _(b"missing largefile for '%s' in %s")
194 194 % (realname, realrev)
195 195 )
196 196 return util.readfile(path), f[1]
197 197
198 198 class converter(convcmd.converter):
199 199 def __init__(self, ui, source, dest, revmapfile, opts):
200 200 src = lfsource(ui, source)
201 201
202 202 super(converter, self).__init__(
203 203 ui, src, dest, revmapfile, opts
204 204 )
205 205
206 206 found, missing = downloadlfiles(ui, rsrc)
207 207 if missing != 0:
208 208 raise error.Abort(_(b"all largefiles must be present locally"))
209 209
210 210 orig = convcmd.converter
211 211 convcmd.converter = converter
212 212
213 213 try:
214 214 convcmd.convert(
215 215 ui, src, dest, source_type=b'hg', dest_type=b'hg'
216 216 )
217 217 finally:
218 218 convcmd.converter = orig
219 219 success = True
220 220 finally:
221 221 if tolfile:
222 with rdst.dirstate.changing_files(rdst):
222 # XXX is this the right context semantically ?
223 with rdst.dirstate.changing_parents(rdst):
223 224 rdst.dirstate.clear()
224 225 release(dstlock, dstwlock)
225 226 if not success:
226 227 # we failed, remove the new directory
227 228 shutil.rmtree(rdst.root)
228 229
229 230
230 231 def _lfconvert_addchangeset(
231 232 rsrc, rdst, ctx, revmap, lfiles, normalfiles, matcher, size, lfiletohash
232 233 ):
233 234 # Convert src parents to dst parents
234 235 parents = _convertparents(ctx, revmap)
235 236
236 237 # Generate list of changed files
237 238 files = _getchangedfiles(ctx, parents)
238 239
239 240 dstfiles = []
240 241 for f in files:
241 242 if f not in lfiles and f not in normalfiles:
242 243 islfile = _islfile(f, ctx, matcher, size)
243 244 # If this file was renamed or copied then copy
244 245 # the largefile-ness of its predecessor
245 246 if f in ctx.manifest():
246 247 fctx = ctx.filectx(f)
247 248 renamed = fctx.copysource()
248 249 if renamed is None:
249 250 # the code below assumes renamed to be a boolean or a list
250 251 # and won't quite work with the value None
251 252 renamed = False
252 253 renamedlfile = renamed and renamed in lfiles
253 254 islfile |= renamedlfile
254 255 if b'l' in fctx.flags():
255 256 if renamedlfile:
256 257 raise error.Abort(
257 258 _(b'renamed/copied largefile %s becomes symlink')
258 259 % f
259 260 )
260 261 islfile = False
261 262 if islfile:
262 263 lfiles.add(f)
263 264 else:
264 265 normalfiles.add(f)
265 266
266 267 if f in lfiles:
267 268 fstandin = lfutil.standin(f)
268 269 dstfiles.append(fstandin)
269 270 # largefile in manifest if it has not been removed/renamed
270 271 if f in ctx.manifest():
271 272 fctx = ctx.filectx(f)
272 273 if b'l' in fctx.flags():
273 274 renamed = fctx.copysource()
274 275 if renamed and renamed in lfiles:
275 276 raise error.Abort(
276 277 _(b'largefile %s becomes symlink') % f
277 278 )
278 279
279 280 # largefile was modified, update standins
280 281 m = hashutil.sha1(b'')
281 282 m.update(ctx[f].data())
282 283 hash = hex(m.digest())
283 284 if f not in lfiletohash or lfiletohash[f] != hash:
284 285 rdst.wwrite(f, ctx[f].data(), ctx[f].flags())
285 286 executable = b'x' in ctx[f].flags()
286 287 lfutil.writestandin(rdst, fstandin, hash, executable)
287 288 lfiletohash[f] = hash
288 289 else:
289 290 # normal file
290 291 dstfiles.append(f)
291 292
292 293 def getfilectx(repo, memctx, f):
293 294 srcfname = lfutil.splitstandin(f)
294 295 if srcfname is not None:
295 296 # if the file isn't in the manifest then it was removed
296 297 # or renamed, return None to indicate this
297 298 try:
298 299 fctx = ctx.filectx(srcfname)
299 300 except error.LookupError:
300 301 return None
301 302 renamed = fctx.copysource()
302 303 if renamed:
303 304 # standin is always a largefile because largefile-ness
304 305 # doesn't change after rename or copy
305 306 renamed = lfutil.standin(renamed)
306 307
307 308 return context.memfilectx(
308 309 repo,
309 310 memctx,
310 311 f,
311 312 lfiletohash[srcfname] + b'\n',
312 313 b'l' in fctx.flags(),
313 314 b'x' in fctx.flags(),
314 315 renamed,
315 316 )
316 317 else:
317 318 return _getnormalcontext(repo, ctx, f, revmap)
318 319
319 320 # Commit
320 321 _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
321 322
322 323
323 324 def _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap):
324 325 mctx = context.memctx(
325 326 rdst,
326 327 parents,
327 328 ctx.description(),
328 329 dstfiles,
329 330 getfilectx,
330 331 ctx.user(),
331 332 ctx.date(),
332 333 ctx.extra(),
333 334 )
334 335 ret = rdst.commitctx(mctx)
335 336 lfutil.copyalltostore(rdst, ret)
336 337 rdst.setparents(ret)
337 338 revmap[ctx.node()] = rdst.changelog.tip()
338 339
339 340
340 341 # Generate list of changed files
341 342 def _getchangedfiles(ctx, parents):
342 343 files = set(ctx.files())
343 344 if ctx.repo().nullid not in parents:
344 345 mc = ctx.manifest()
345 346 for pctx in ctx.parents():
346 347 for fn in pctx.manifest().diff(mc):
347 348 files.add(fn)
348 349 return files
349 350
350 351
351 352 # Convert src parents to dst parents
352 353 def _convertparents(ctx, revmap):
353 354 parents = []
354 355 for p in ctx.parents():
355 356 parents.append(revmap[p.node()])
356 357 while len(parents) < 2:
357 358 parents.append(ctx.repo().nullid)
358 359 return parents
359 360
360 361
361 362 # Get memfilectx for a normal file
362 363 def _getnormalcontext(repo, ctx, f, revmap):
363 364 try:
364 365 fctx = ctx.filectx(f)
365 366 except error.LookupError:
366 367 return None
367 368 renamed = fctx.copysource()
368 369
369 370 data = fctx.data()
370 371 if f == b'.hgtags':
371 372 data = _converttags(repo.ui, revmap, data)
372 373 return context.memfilectx(
373 374 repo, ctx, f, data, b'l' in fctx.flags(), b'x' in fctx.flags(), renamed
374 375 )
375 376
376 377
377 378 # Remap tag data using a revision map
378 379 def _converttags(ui, revmap, data):
379 380 newdata = []
380 381 for line in data.splitlines():
381 382 try:
382 383 id, name = line.split(b' ', 1)
383 384 except ValueError:
384 385 ui.warn(_(b'skipping incorrectly formatted tag %s\n') % line)
385 386 continue
386 387 try:
387 388 newid = bin(id)
388 389 except binascii.Error:
389 390 ui.warn(_(b'skipping incorrectly formatted id %s\n') % id)
390 391 continue
391 392 try:
392 393 newdata.append(b'%s %s\n' % (hex(revmap[newid]), name))
393 394 except KeyError:
394 395 ui.warn(_(b'no mapping for id %s\n') % id)
395 396 continue
396 397 return b''.join(newdata)
397 398
398 399
399 400 def _islfile(file, ctx, matcher, size):
400 401 """Return true if file should be considered a largefile, i.e.
401 402 matcher matches it or it is larger than size."""
402 403 # never store special .hg* files as largefiles
403 404 if file == b'.hgtags' or file == b'.hgignore' or file == b'.hgsigs':
404 405 return False
405 406 if matcher and matcher(file):
406 407 return True
407 408 try:
408 409 return ctx.filectx(file).size() >= size * 1024 * 1024
409 410 except error.LookupError:
410 411 return False
411 412
412 413
413 414 def uploadlfiles(ui, rsrc, rdst, files):
414 415 '''upload largefiles to the central store'''
415 416
416 417 if not files:
417 418 return
418 419
419 420 store = storefactory.openstore(rsrc, rdst, put=True)
420 421
421 422 at = 0
422 423 ui.debug(b"sending statlfile command for %d largefiles\n" % len(files))
423 424 retval = store.exists(files)
424 425 files = [h for h in files if not retval[h]]
425 426 ui.debug(b"%d largefiles need to be uploaded\n" % len(files))
426 427
427 428 with ui.makeprogress(
428 429 _(b'uploading largefiles'), unit=_(b'files'), total=len(files)
429 430 ) as progress:
430 431 for hash in files:
431 432 progress.update(at)
432 433 source = lfutil.findfile(rsrc, hash)
433 434 if not source:
434 435 raise error.Abort(
435 436 _(
436 437 b'largefile %s missing from store'
437 438 b' (needs to be uploaded)'
438 439 )
439 440 % hash
440 441 )
441 442 # XXX check for errors here
442 443 store.put(source, hash)
443 444 at += 1
444 445
445 446
446 447 def verifylfiles(ui, repo, all=False, contents=False):
447 448 """Verify that every largefile revision in the current changeset
448 449 exists in the central store. With --contents, also verify that
449 450 the contents of each local largefile file revision are correct (SHA-1 hash
450 451 matches the revision ID). With --all, check every changeset in
451 452 this repository."""
452 453 if all:
453 454 revs = repo.revs(b'all()')
454 455 else:
455 456 revs = [b'.']
456 457
457 458 store = storefactory.openstore(repo)
458 459 return store.verify(revs, contents=contents)
459 460
460 461
461 462 def cachelfiles(ui, repo, node, filelist=None):
462 463 """cachelfiles ensures that all largefiles needed by the specified revision
463 464 are present in the repository's largefile cache.
464 465
465 466 returns a tuple (cached, missing). cached is the list of files downloaded
466 467 by this operation; missing is the list of files that were needed but could
467 468 not be found."""
468 469 lfiles = lfutil.listlfiles(repo, node)
469 470 if filelist:
470 471 lfiles = set(lfiles) & set(filelist)
471 472 toget = []
472 473
473 474 ctx = repo[node]
474 475 for lfile in lfiles:
475 476 try:
476 477 expectedhash = lfutil.readasstandin(ctx[lfutil.standin(lfile)])
477 478 except FileNotFoundError:
478 479 continue # node must be None and standin wasn't found in wctx
479 480 if not lfutil.findfile(repo, expectedhash):
480 481 toget.append((lfile, expectedhash))
481 482
482 483 if toget:
483 484 store = storefactory.openstore(repo)
484 485 ret = store.get(toget)
485 486 return ret
486 487
487 488 return ([], [])
488 489
489 490
490 491 def downloadlfiles(ui, repo):
491 492 tonode = repo.changelog.node
492 493 totalsuccess = 0
493 494 totalmissing = 0
494 495 for rev in repo.revs(b'file(%s)', b'path:' + lfutil.shortname):
495 496 success, missing = cachelfiles(ui, repo, tonode(rev))
496 497 totalsuccess += len(success)
497 498 totalmissing += len(missing)
498 499 ui.status(_(b"%d additional largefiles cached\n") % totalsuccess)
499 500 if totalmissing > 0:
500 501 ui.status(_(b"%d largefiles failed to download\n") % totalmissing)
501 502 return totalsuccess, totalmissing
502 503
503 504
504 505 def updatelfiles(
505 506 ui, repo, filelist=None, printmessage=None, normallookup=False
506 507 ):
507 508 """Update largefiles according to standins in the working directory
508 509
509 510 If ``printmessage`` is other than ``None``, it means "print (or
510 511 ignore, for false) message forcibly".
511 512 """
512 513 statuswriter = lfutil.getstatuswriter(ui, repo, printmessage)
513 514 with repo.wlock():
514 515 lfdirstate = lfutil.openlfdirstate(ui, repo)
515 516 lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate)
516 517
517 518 if filelist is not None:
518 519 filelist = set(filelist)
519 520 lfiles = [f for f in lfiles if f in filelist]
520 521
521 522 update = {}
522 523 dropped = set()
523 524 updated, removed = 0, 0
524 525 wvfs = repo.wvfs
525 526 wctx = repo[None]
526 527 for lfile in lfiles:
527 528 lfileorig = os.path.relpath(
528 529 scmutil.backuppath(ui, repo, lfile), start=repo.root
529 530 )
530 531 standin = lfutil.standin(lfile)
531 532 standinorig = os.path.relpath(
532 533 scmutil.backuppath(ui, repo, standin), start=repo.root
533 534 )
534 535 if wvfs.exists(standin):
535 536 if wvfs.exists(standinorig) and wvfs.exists(lfile):
536 537 shutil.copyfile(wvfs.join(lfile), wvfs.join(lfileorig))
537 538 wvfs.unlinkpath(standinorig)
538 539 expecthash = lfutil.readasstandin(wctx[standin])
539 540 if expecthash != b'':
540 541 if lfile not in wctx: # not switched to normal file
541 542 if repo.dirstate.get_entry(standin).any_tracked:
542 543 wvfs.unlinkpath(lfile, ignoremissing=True)
543 544 else:
544 545 dropped.add(lfile)
545 546
546 547 # allocate an entry in largefiles dirstate to prevent
547 548 # lfilesrepo.status() from reporting missing files as
548 549 # removed.
549 550 lfdirstate.hacky_extension_update_file(
550 551 lfile,
551 552 p1_tracked=True,
552 553 wc_tracked=True,
553 554 possibly_dirty=True,
554 555 )
555 556 update[lfile] = expecthash
556 557 else:
557 558 # Remove lfiles for which the standin is deleted, unless the
558 559 # lfile is added to the repository again. This happens when a
559 560 # largefile is converted back to a normal file: the standin
560 561 # disappears, but a new (normal) file appears as the lfile.
561 562 if (
562 563 wvfs.exists(lfile)
563 564 and repo.dirstate.normalize(lfile) not in wctx
564 565 ):
565 566 wvfs.unlinkpath(lfile)
566 567 removed += 1
567 568
568 569 # largefile processing might be slow and be interrupted - be prepared
569 570 lfdirstate.write(repo.currenttransaction())
570 571
571 572 if lfiles:
572 573 lfiles = [f for f in lfiles if f not in dropped]
573 574
574 575 for f in dropped:
575 576 repo.wvfs.unlinkpath(lfutil.standin(f))
576 577 # This needs to happen for dropped files, otherwise they stay in
577 578 # the M state.
578 579 lfdirstate._map.reset_state(f)
579 580
580 581 statuswriter(_(b'getting changed largefiles\n'))
581 582 cachelfiles(ui, repo, None, lfiles)
582 583
583 584 for lfile in lfiles:
584 585 update1 = 0
585 586
586 587 expecthash = update.get(lfile)
587 588 if expecthash:
588 589 if not lfutil.copyfromcache(repo, expecthash, lfile):
589 590 # failed ... but already removed and set to normallookup
590 591 continue
591 592 # Synchronize largefile dirstate to the last modified
592 593 # time of the file
593 594 lfdirstate.hacky_extension_update_file(
594 595 lfile,
595 596 p1_tracked=True,
596 597 wc_tracked=True,
597 598 )
598 599 update1 = 1
599 600
600 601 # copy the exec mode of largefile standin from the repository's
601 602 # dirstate to its state in the lfdirstate.
602 603 standin = lfutil.standin(lfile)
603 604 if wvfs.exists(standin):
604 605 # exec is decided by the users permissions using mask 0o100
605 606 standinexec = wvfs.stat(standin).st_mode & 0o100
606 607 st = wvfs.stat(lfile)
607 608 mode = st.st_mode
608 609 if standinexec != mode & 0o100:
609 610 # first remove all X bits, then shift all R bits to X
610 611 mode &= ~0o111
611 612 if standinexec:
612 613 mode |= (mode >> 2) & 0o111 & ~util.umask
613 614 wvfs.chmod(lfile, mode)
614 615 update1 = 1
615 616
616 617 updated += update1
617 618
618 619 lfutil.synclfdirstate(repo, lfdirstate, lfile, normallookup)
619 620
620 621 lfdirstate.write(repo.currenttransaction())
621 622 if lfiles:
622 623 statuswriter(
623 624 _(b'%d largefiles updated, %d removed\n') % (updated, removed)
624 625 )
625 626
626 627
627 628 @eh.command(
628 629 b'lfpull',
629 630 [(b'r', b'rev', [], _(b'pull largefiles for these revisions'))]
630 631 + cmdutil.remoteopts,
631 632 _(b'-r REV... [-e CMD] [--remotecmd CMD] [SOURCE]'),
632 633 )
633 634 def lfpull(ui, repo, source=b"default", **opts):
634 635 """pull largefiles for the specified revisions from the specified source
635 636
636 637 Pull largefiles that are referenced from local changesets but missing
637 638 locally, pulling from a remote repository to the local cache.
638 639
639 640 If SOURCE is omitted, the 'default' path will be used.
640 641 See :hg:`help urls` for more information.
641 642
642 643 .. container:: verbose
643 644
644 645 Some examples:
645 646
646 647 - pull largefiles for all branch heads::
647 648
648 649 hg lfpull -r "head() and not closed()"
649 650
650 651 - pull largefiles on the default branch::
651 652
652 653 hg lfpull -r "branch(default)"
653 654 """
654 655 repo.lfpullsource = source
655 656
656 657 revs = opts.get('rev', [])
657 658 if not revs:
658 659 raise error.Abort(_(b'no revisions specified'))
659 660 revs = logcmdutil.revrange(repo, revs)
660 661
661 662 numcached = 0
662 663 for rev in revs:
663 664 ui.note(_(b'pulling largefiles for revision %d\n') % rev)
664 665 (cached, missing) = cachelfiles(ui, repo, rev)
665 666 numcached += len(cached)
666 667 ui.status(_(b"%d largefiles cached\n") % numcached)
667 668
668 669
669 670 @eh.command(b'debuglfput', [] + cmdutil.remoteopts, _(b'FILE'))
670 671 def debuglfput(ui, repo, filepath, **kwargs):
671 672 hash = lfutil.hashfile(filepath)
672 673 storefactory.openstore(repo).put(filepath, hash)
673 674 ui.write(b'%s\n' % hash)
674 675 return 0
General Comments 0
You need to be logged in to leave comments. Login now