##// END OF EJS Templates
largefile: use `update_file` instead of `normallookup` in `updatelfiles`...
marmoute -
r48522:47dce5a9 default
parent child Browse files
Show More
@@ -1,670 +1,675 b''
1 1 # Copyright 2009-2010 Gregory P. Ward
2 2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 3 # Copyright 2010-2011 Fog Creek Software
4 4 # Copyright 2010-2011 Unity Technologies
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 '''High-level command function for lfconvert, plus the cmdtable.'''
10 10 from __future__ import absolute_import
11 11
12 12 import errno
13 13 import os
14 14 import shutil
15 15
16 16 from mercurial.i18n import _
17 17 from mercurial.node import (
18 18 bin,
19 19 hex,
20 20 )
21 21
22 22 from mercurial import (
23 23 cmdutil,
24 24 context,
25 25 error,
26 26 exthelper,
27 27 hg,
28 28 lock,
29 29 match as matchmod,
30 30 pycompat,
31 31 scmutil,
32 32 util,
33 33 )
34 34 from mercurial.utils import hashutil
35 35
36 36 from ..convert import (
37 37 convcmd,
38 38 filemap,
39 39 )
40 40
41 41 from . import lfutil, storefactory
42 42
43 43 release = lock.release
44 44
45 45 # -- Commands ----------------------------------------------------------
46 46
47 47 eh = exthelper.exthelper()
48 48
49 49
50 50 @eh.command(
51 51 b'lfconvert',
52 52 [
53 53 (
54 54 b's',
55 55 b'size',
56 56 b'',
57 57 _(b'minimum size (MB) for files to be converted as largefiles'),
58 58 b'SIZE',
59 59 ),
60 60 (
61 61 b'',
62 62 b'to-normal',
63 63 False,
64 64 _(b'convert from a largefiles repo to a normal repo'),
65 65 ),
66 66 ],
67 67 _(b'hg lfconvert SOURCE DEST [FILE ...]'),
68 68 norepo=True,
69 69 inferrepo=True,
70 70 )
71 71 def lfconvert(ui, src, dest, *pats, **opts):
72 72 """convert a normal repository to a largefiles repository
73 73
74 74 Convert repository SOURCE to a new repository DEST, identical to
75 75 SOURCE except that certain files will be converted as largefiles:
76 76 specifically, any file that matches any PATTERN *or* whose size is
77 77 above the minimum size threshold is converted as a largefile. The
78 78 size used to determine whether or not to track a file as a
79 79 largefile is the size of the first version of the file. The
80 80 minimum size can be specified either with --size or in
81 81 configuration as ``largefiles.size``.
82 82
83 83 After running this command you will need to make sure that
84 84 largefiles is enabled anywhere you intend to push the new
85 85 repository.
86 86
87 87 Use --to-normal to convert largefiles back to normal files; after
88 88 this, the DEST repository can be used without largefiles at all."""
89 89
90 90 opts = pycompat.byteskwargs(opts)
91 91 if opts[b'to_normal']:
92 92 tolfile = False
93 93 else:
94 94 tolfile = True
95 95 size = lfutil.getminsize(ui, True, opts.get(b'size'), default=None)
96 96
97 97 if not hg.islocal(src):
98 98 raise error.Abort(_(b'%s is not a local Mercurial repo') % src)
99 99 if not hg.islocal(dest):
100 100 raise error.Abort(_(b'%s is not a local Mercurial repo') % dest)
101 101
102 102 rsrc = hg.repository(ui, src)
103 103 ui.status(_(b'initializing destination %s\n') % dest)
104 104 rdst = hg.repository(ui, dest, create=True)
105 105
106 106 success = False
107 107 dstwlock = dstlock = None
108 108 try:
109 109 # Get a list of all changesets in the source. The easy way to do this
110 110 # is to simply walk the changelog, using changelog.nodesbetween().
111 111 # Take a look at mercurial/revlog.py:639 for more details.
112 112 # Use a generator instead of a list to decrease memory usage
113 113 ctxs = (
114 114 rsrc[ctx]
115 115 for ctx in rsrc.changelog.nodesbetween(None, rsrc.heads())[0]
116 116 )
117 117 revmap = {rsrc.nullid: rdst.nullid}
118 118 if tolfile:
119 119 # Lock destination to prevent modification while it is converted to.
120 120 # Don't need to lock src because we are just reading from its
121 121 # history which can't change.
122 122 dstwlock = rdst.wlock()
123 123 dstlock = rdst.lock()
124 124
125 125 lfiles = set()
126 126 normalfiles = set()
127 127 if not pats:
128 128 pats = ui.configlist(lfutil.longname, b'patterns')
129 129 if pats:
130 130 matcher = matchmod.match(rsrc.root, b'', list(pats))
131 131 else:
132 132 matcher = None
133 133
134 134 lfiletohash = {}
135 135 with ui.makeprogress(
136 136 _(b'converting revisions'),
137 137 unit=_(b'revisions'),
138 138 total=rsrc[b'tip'].rev(),
139 139 ) as progress:
140 140 for ctx in ctxs:
141 141 progress.update(ctx.rev())
142 142 _lfconvert_addchangeset(
143 143 rsrc,
144 144 rdst,
145 145 ctx,
146 146 revmap,
147 147 lfiles,
148 148 normalfiles,
149 149 matcher,
150 150 size,
151 151 lfiletohash,
152 152 )
153 153
154 154 if rdst.wvfs.exists(lfutil.shortname):
155 155 rdst.wvfs.rmtree(lfutil.shortname)
156 156
157 157 for f in lfiletohash.keys():
158 158 if rdst.wvfs.isfile(f):
159 159 rdst.wvfs.unlink(f)
160 160 try:
161 161 rdst.wvfs.removedirs(rdst.wvfs.dirname(f))
162 162 except OSError:
163 163 pass
164 164
165 165 # If there were any files converted to largefiles, add largefiles
166 166 # to the destination repository's requirements.
167 167 if lfiles:
168 168 rdst.requirements.add(b'largefiles')
169 169 scmutil.writereporequirements(rdst)
170 170 else:
171 171
172 172 class lfsource(filemap.filemap_source):
173 173 def __init__(self, ui, source):
174 174 super(lfsource, self).__init__(ui, source, None)
175 175 self.filemapper.rename[lfutil.shortname] = b'.'
176 176
177 177 def getfile(self, name, rev):
178 178 realname, realrev = rev
179 179 f = super(lfsource, self).getfile(name, rev)
180 180
181 181 if (
182 182 not realname.startswith(lfutil.shortnameslash)
183 183 or f[0] is None
184 184 ):
185 185 return f
186 186
187 187 # Substitute in the largefile data for the hash
188 188 hash = f[0].strip()
189 189 path = lfutil.findfile(rsrc, hash)
190 190
191 191 if path is None:
192 192 raise error.Abort(
193 193 _(b"missing largefile for '%s' in %s")
194 194 % (realname, realrev)
195 195 )
196 196 return util.readfile(path), f[1]
197 197
198 198 class converter(convcmd.converter):
199 199 def __init__(self, ui, source, dest, revmapfile, opts):
200 200 src = lfsource(ui, source)
201 201
202 202 super(converter, self).__init__(
203 203 ui, src, dest, revmapfile, opts
204 204 )
205 205
206 206 found, missing = downloadlfiles(ui, rsrc)
207 207 if missing != 0:
208 208 raise error.Abort(_(b"all largefiles must be present locally"))
209 209
210 210 orig = convcmd.converter
211 211 convcmd.converter = converter
212 212
213 213 try:
214 214 convcmd.convert(
215 215 ui, src, dest, source_type=b'hg', dest_type=b'hg'
216 216 )
217 217 finally:
218 218 convcmd.converter = orig
219 219 success = True
220 220 finally:
221 221 if tolfile:
222 222 rdst.dirstate.clear()
223 223 release(dstlock, dstwlock)
224 224 if not success:
225 225 # we failed, remove the new directory
226 226 shutil.rmtree(rdst.root)
227 227
228 228
229 229 def _lfconvert_addchangeset(
230 230 rsrc, rdst, ctx, revmap, lfiles, normalfiles, matcher, size, lfiletohash
231 231 ):
232 232 # Convert src parents to dst parents
233 233 parents = _convertparents(ctx, revmap)
234 234
235 235 # Generate list of changed files
236 236 files = _getchangedfiles(ctx, parents)
237 237
238 238 dstfiles = []
239 239 for f in files:
240 240 if f not in lfiles and f not in normalfiles:
241 241 islfile = _islfile(f, ctx, matcher, size)
242 242 # If this file was renamed or copied then copy
243 243 # the largefile-ness of its predecessor
244 244 if f in ctx.manifest():
245 245 fctx = ctx.filectx(f)
246 246 renamed = fctx.copysource()
247 247 if renamed is None:
248 248 # the code below assumes renamed to be a boolean or a list
249 249 # and won't quite work with the value None
250 250 renamed = False
251 251 renamedlfile = renamed and renamed in lfiles
252 252 islfile |= renamedlfile
253 253 if b'l' in fctx.flags():
254 254 if renamedlfile:
255 255 raise error.Abort(
256 256 _(b'renamed/copied largefile %s becomes symlink')
257 257 % f
258 258 )
259 259 islfile = False
260 260 if islfile:
261 261 lfiles.add(f)
262 262 else:
263 263 normalfiles.add(f)
264 264
265 265 if f in lfiles:
266 266 fstandin = lfutil.standin(f)
267 267 dstfiles.append(fstandin)
268 268 # largefile in manifest if it has not been removed/renamed
269 269 if f in ctx.manifest():
270 270 fctx = ctx.filectx(f)
271 271 if b'l' in fctx.flags():
272 272 renamed = fctx.copysource()
273 273 if renamed and renamed in lfiles:
274 274 raise error.Abort(
275 275 _(b'largefile %s becomes symlink') % f
276 276 )
277 277
278 278 # largefile was modified, update standins
279 279 m = hashutil.sha1(b'')
280 280 m.update(ctx[f].data())
281 281 hash = hex(m.digest())
282 282 if f not in lfiletohash or lfiletohash[f] != hash:
283 283 rdst.wwrite(f, ctx[f].data(), ctx[f].flags())
284 284 executable = b'x' in ctx[f].flags()
285 285 lfutil.writestandin(rdst, fstandin, hash, executable)
286 286 lfiletohash[f] = hash
287 287 else:
288 288 # normal file
289 289 dstfiles.append(f)
290 290
291 291 def getfilectx(repo, memctx, f):
292 292 srcfname = lfutil.splitstandin(f)
293 293 if srcfname is not None:
294 294 # if the file isn't in the manifest then it was removed
295 295 # or renamed, return None to indicate this
296 296 try:
297 297 fctx = ctx.filectx(srcfname)
298 298 except error.LookupError:
299 299 return None
300 300 renamed = fctx.copysource()
301 301 if renamed:
302 302 # standin is always a largefile because largefile-ness
303 303 # doesn't change after rename or copy
304 304 renamed = lfutil.standin(renamed)
305 305
306 306 return context.memfilectx(
307 307 repo,
308 308 memctx,
309 309 f,
310 310 lfiletohash[srcfname] + b'\n',
311 311 b'l' in fctx.flags(),
312 312 b'x' in fctx.flags(),
313 313 renamed,
314 314 )
315 315 else:
316 316 return _getnormalcontext(repo, ctx, f, revmap)
317 317
318 318 # Commit
319 319 _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
320 320
321 321
322 322 def _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap):
323 323 mctx = context.memctx(
324 324 rdst,
325 325 parents,
326 326 ctx.description(),
327 327 dstfiles,
328 328 getfilectx,
329 329 ctx.user(),
330 330 ctx.date(),
331 331 ctx.extra(),
332 332 )
333 333 ret = rdst.commitctx(mctx)
334 334 lfutil.copyalltostore(rdst, ret)
335 335 rdst.setparents(ret)
336 336 revmap[ctx.node()] = rdst.changelog.tip()
337 337
338 338
339 339 # Generate list of changed files
340 340 def _getchangedfiles(ctx, parents):
341 341 files = set(ctx.files())
342 342 if ctx.repo().nullid not in parents:
343 343 mc = ctx.manifest()
344 344 for pctx in ctx.parents():
345 345 for fn in pctx.manifest().diff(mc):
346 346 files.add(fn)
347 347 return files
348 348
349 349
350 350 # Convert src parents to dst parents
351 351 def _convertparents(ctx, revmap):
352 352 parents = []
353 353 for p in ctx.parents():
354 354 parents.append(revmap[p.node()])
355 355 while len(parents) < 2:
356 356 parents.append(ctx.repo().nullid)
357 357 return parents
358 358
359 359
360 360 # Get memfilectx for a normal file
361 361 def _getnormalcontext(repo, ctx, f, revmap):
362 362 try:
363 363 fctx = ctx.filectx(f)
364 364 except error.LookupError:
365 365 return None
366 366 renamed = fctx.copysource()
367 367
368 368 data = fctx.data()
369 369 if f == b'.hgtags':
370 370 data = _converttags(repo.ui, revmap, data)
371 371 return context.memfilectx(
372 372 repo, ctx, f, data, b'l' in fctx.flags(), b'x' in fctx.flags(), renamed
373 373 )
374 374
375 375
376 376 # Remap tag data using a revision map
377 377 def _converttags(ui, revmap, data):
378 378 newdata = []
379 379 for line in data.splitlines():
380 380 try:
381 381 id, name = line.split(b' ', 1)
382 382 except ValueError:
383 383 ui.warn(_(b'skipping incorrectly formatted tag %s\n') % line)
384 384 continue
385 385 try:
386 386 newid = bin(id)
387 387 except TypeError:
388 388 ui.warn(_(b'skipping incorrectly formatted id %s\n') % id)
389 389 continue
390 390 try:
391 391 newdata.append(b'%s %s\n' % (hex(revmap[newid]), name))
392 392 except KeyError:
393 393 ui.warn(_(b'no mapping for id %s\n') % id)
394 394 continue
395 395 return b''.join(newdata)
396 396
397 397
398 398 def _islfile(file, ctx, matcher, size):
399 399 """Return true if file should be considered a largefile, i.e.
400 400 matcher matches it or it is larger than size."""
401 401 # never store special .hg* files as largefiles
402 402 if file == b'.hgtags' or file == b'.hgignore' or file == b'.hgsigs':
403 403 return False
404 404 if matcher and matcher(file):
405 405 return True
406 406 try:
407 407 return ctx.filectx(file).size() >= size * 1024 * 1024
408 408 except error.LookupError:
409 409 return False
410 410
411 411
412 412 def uploadlfiles(ui, rsrc, rdst, files):
413 413 '''upload largefiles to the central store'''
414 414
415 415 if not files:
416 416 return
417 417
418 418 store = storefactory.openstore(rsrc, rdst, put=True)
419 419
420 420 at = 0
421 421 ui.debug(b"sending statlfile command for %d largefiles\n" % len(files))
422 422 retval = store.exists(files)
423 423 files = [h for h in files if not retval[h]]
424 424 ui.debug(b"%d largefiles need to be uploaded\n" % len(files))
425 425
426 426 with ui.makeprogress(
427 427 _(b'uploading largefiles'), unit=_(b'files'), total=len(files)
428 428 ) as progress:
429 429 for hash in files:
430 430 progress.update(at)
431 431 source = lfutil.findfile(rsrc, hash)
432 432 if not source:
433 433 raise error.Abort(
434 434 _(
435 435 b'largefile %s missing from store'
436 436 b' (needs to be uploaded)'
437 437 )
438 438 % hash
439 439 )
440 440 # XXX check for errors here
441 441 store.put(source, hash)
442 442 at += 1
443 443
444 444
445 445 def verifylfiles(ui, repo, all=False, contents=False):
446 446 """Verify that every largefile revision in the current changeset
447 447 exists in the central store. With --contents, also verify that
448 448 the contents of each local largefile file revision are correct (SHA-1 hash
449 449 matches the revision ID). With --all, check every changeset in
450 450 this repository."""
451 451 if all:
452 452 revs = repo.revs(b'all()')
453 453 else:
454 454 revs = [b'.']
455 455
456 456 store = storefactory.openstore(repo)
457 457 return store.verify(revs, contents=contents)
458 458
459 459
460 460 def cachelfiles(ui, repo, node, filelist=None):
461 461 """cachelfiles ensures that all largefiles needed by the specified revision
462 462 are present in the repository's largefile cache.
463 463
464 464 returns a tuple (cached, missing). cached is the list of files downloaded
465 465 by this operation; missing is the list of files that were needed but could
466 466 not be found."""
467 467 lfiles = lfutil.listlfiles(repo, node)
468 468 if filelist:
469 469 lfiles = set(lfiles) & set(filelist)
470 470 toget = []
471 471
472 472 ctx = repo[node]
473 473 for lfile in lfiles:
474 474 try:
475 475 expectedhash = lfutil.readasstandin(ctx[lfutil.standin(lfile)])
476 476 except IOError as err:
477 477 if err.errno == errno.ENOENT:
478 478 continue # node must be None and standin wasn't found in wctx
479 479 raise
480 480 if not lfutil.findfile(repo, expectedhash):
481 481 toget.append((lfile, expectedhash))
482 482
483 483 if toget:
484 484 store = storefactory.openstore(repo)
485 485 ret = store.get(toget)
486 486 return ret
487 487
488 488 return ([], [])
489 489
490 490
491 491 def downloadlfiles(ui, repo):
492 492 tonode = repo.changelog.node
493 493 totalsuccess = 0
494 494 totalmissing = 0
495 495 for rev in repo.revs(b'file(%s)', b'path:' + lfutil.shortname):
496 496 success, missing = cachelfiles(ui, repo, tonode(rev))
497 497 totalsuccess += len(success)
498 498 totalmissing += len(missing)
499 499 ui.status(_(b"%d additional largefiles cached\n") % totalsuccess)
500 500 if totalmissing > 0:
501 501 ui.status(_(b"%d largefiles failed to download\n") % totalmissing)
502 502 return totalsuccess, totalmissing
503 503
504 504
505 505 def updatelfiles(
506 506 ui, repo, filelist=None, printmessage=None, normallookup=False
507 507 ):
508 508 """Update largefiles according to standins in the working directory
509 509
510 510 If ``printmessage`` is other than ``None``, it means "print (or
511 511 ignore, for false) message forcibly".
512 512 """
513 513 statuswriter = lfutil.getstatuswriter(ui, repo, printmessage)
514 514 with repo.wlock():
515 515 lfdirstate = lfutil.openlfdirstate(ui, repo)
516 516 lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate)
517 517
518 518 if filelist is not None:
519 519 filelist = set(filelist)
520 520 lfiles = [f for f in lfiles if f in filelist]
521 521
522 522 with lfdirstate.parentchange():
523 523 update = {}
524 524 dropped = set()
525 525 updated, removed = 0, 0
526 526 wvfs = repo.wvfs
527 527 wctx = repo[None]
528 528 for lfile in lfiles:
529 529 lfileorig = os.path.relpath(
530 530 scmutil.backuppath(ui, repo, lfile), start=repo.root
531 531 )
532 532 standin = lfutil.standin(lfile)
533 533 standinorig = os.path.relpath(
534 534 scmutil.backuppath(ui, repo, standin), start=repo.root
535 535 )
536 536 if wvfs.exists(standin):
537 537 if wvfs.exists(standinorig) and wvfs.exists(lfile):
538 538 shutil.copyfile(wvfs.join(lfile), wvfs.join(lfileorig))
539 539 wvfs.unlinkpath(standinorig)
540 540 expecthash = lfutil.readasstandin(wctx[standin])
541 541 if expecthash != b'':
542 542 if lfile not in wctx: # not switched to normal file
543 543 if repo.dirstate[standin] != b'?':
544 544 wvfs.unlinkpath(lfile, ignoremissing=True)
545 545 else:
546 546 dropped.add(lfile)
547 547
548 548 # use normallookup() to allocate an entry in largefiles
549 549 # dirstate to prevent lfilesrepo.status() from reporting
550 550 # missing files as removed.
551 lfdirstate.normallookup(lfile)
551 lfdirstate.update_file(
552 lfile,
553 p1_tracked=True,
554 wc_tracked=True,
555 possibly_dirty=True,
556 )
552 557 update[lfile] = expecthash
553 558 else:
554 559 # Remove lfiles for which the standin is deleted, unless the
555 560 # lfile is added to the repository again. This happens when a
556 561 # largefile is converted back to a normal file: the standin
557 562 # disappears, but a new (normal) file appears as the lfile.
558 563 if (
559 564 wvfs.exists(lfile)
560 565 and repo.dirstate.normalize(lfile) not in wctx
561 566 ):
562 567 wvfs.unlinkpath(lfile)
563 568 removed += 1
564 569
565 570 # largefile processing might be slow and be interrupted - be prepared
566 571 lfdirstate.write()
567 572
568 573 if lfiles:
569 574 lfiles = [f for f in lfiles if f not in dropped]
570 575
571 576 for f in dropped:
572 577 repo.wvfs.unlinkpath(lfutil.standin(f))
573 578 # This needs to happen for dropped files, otherwise they stay in
574 579 # the M state.
575 580 lfdirstate._drop(f)
576 581
577 582 statuswriter(_(b'getting changed largefiles\n'))
578 583 cachelfiles(ui, repo, None, lfiles)
579 584
580 585 with lfdirstate.parentchange():
581 586 for lfile in lfiles:
582 587 update1 = 0
583 588
584 589 expecthash = update.get(lfile)
585 590 if expecthash:
586 591 if not lfutil.copyfromcache(repo, expecthash, lfile):
587 592 # failed ... but already removed and set to normallookup
588 593 continue
589 594 # Synchronize largefile dirstate to the last modified
590 595 # time of the file
591 596 lfdirstate.update_file(
592 597 lfile, p1_tracked=True, wc_tracked=True
593 598 )
594 599 update1 = 1
595 600
596 601 # copy the exec mode of largefile standin from the repository's
597 602 # dirstate to its state in the lfdirstate.
598 603 standin = lfutil.standin(lfile)
599 604 if wvfs.exists(standin):
600 605 # exec is decided by the users permissions using mask 0o100
601 606 standinexec = wvfs.stat(standin).st_mode & 0o100
602 607 st = wvfs.stat(lfile)
603 608 mode = st.st_mode
604 609 if standinexec != mode & 0o100:
605 610 # first remove all X bits, then shift all R bits to X
606 611 mode &= ~0o111
607 612 if standinexec:
608 613 mode |= (mode >> 2) & 0o111 & ~util.umask
609 614 wvfs.chmod(lfile, mode)
610 615 update1 = 1
611 616
612 617 updated += update1
613 618
614 619 lfutil.synclfdirstate(repo, lfdirstate, lfile, normallookup)
615 620
616 621 lfdirstate.write()
617 622 if lfiles:
618 623 statuswriter(
619 624 _(b'%d largefiles updated, %d removed\n') % (updated, removed)
620 625 )
621 626
622 627
623 628 @eh.command(
624 629 b'lfpull',
625 630 [(b'r', b'rev', [], _(b'pull largefiles for these revisions'))]
626 631 + cmdutil.remoteopts,
627 632 _(b'-r REV... [-e CMD] [--remotecmd CMD] [SOURCE]'),
628 633 )
629 634 def lfpull(ui, repo, source=b"default", **opts):
630 635 """pull largefiles for the specified revisions from the specified source
631 636
632 637 Pull largefiles that are referenced from local changesets but missing
633 638 locally, pulling from a remote repository to the local cache.
634 639
635 640 If SOURCE is omitted, the 'default' path will be used.
636 641 See :hg:`help urls` for more information.
637 642
638 643 .. container:: verbose
639 644
640 645 Some examples:
641 646
642 647 - pull largefiles for all branch heads::
643 648
644 649 hg lfpull -r "head() and not closed()"
645 650
646 651 - pull largefiles on the default branch::
647 652
648 653 hg lfpull -r "branch(default)"
649 654 """
650 655 repo.lfpullsource = source
651 656
652 657 revs = opts.get('rev', [])
653 658 if not revs:
654 659 raise error.Abort(_(b'no revisions specified'))
655 660 revs = scmutil.revrange(repo, revs)
656 661
657 662 numcached = 0
658 663 for rev in revs:
659 664 ui.note(_(b'pulling largefiles for revision %d\n') % rev)
660 665 (cached, missing) = cachelfiles(ui, repo, rev)
661 666 numcached += len(cached)
662 667 ui.status(_(b"%d largefiles cached\n") % numcached)
663 668
664 669
665 670 @eh.command(b'debuglfput', [] + cmdutil.remoteopts, _(b'FILE'))
666 671 def debuglfput(ui, repo, filepath, **kwargs):
667 672 hash = lfutil.hashfile(filepath)
668 673 storefactory.openstore(repo).put(filepath, hash)
669 674 ui.write(b'%s\n' % hash)
670 675 return 0
General Comments 0
You need to be logged in to leave comments. Login now