##// END OF EJS Templates
dirstate: use `dirstate.change_files` to scope the change in `lfconvert`...
marmoute -
r50946:9409f294 default
parent child Browse files
Show More
@@ -1,673 +1,674 b''
1 1 # Copyright 2009-2010 Gregory P. Ward
2 2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 3 # Copyright 2010-2011 Fog Creek Software
4 4 # Copyright 2010-2011 Unity Technologies
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 '''High-level command function for lfconvert, plus the cmdtable.'''
10 10
11 11 import binascii
12 12 import os
13 13 import shutil
14 14
15 15 from mercurial.i18n import _
16 16 from mercurial.node import (
17 17 bin,
18 18 hex,
19 19 )
20 20
21 21 from mercurial import (
22 22 cmdutil,
23 23 context,
24 24 error,
25 25 exthelper,
26 26 hg,
27 27 lock,
28 28 logcmdutil,
29 29 match as matchmod,
30 30 pycompat,
31 31 scmutil,
32 32 util,
33 33 )
34 34 from mercurial.utils import hashutil
35 35
36 36 from ..convert import (
37 37 convcmd,
38 38 filemap,
39 39 )
40 40
41 41 from . import lfutil, storefactory
42 42
43 43 release = lock.release
44 44
45 45 # -- Commands ----------------------------------------------------------
46 46
47 47 eh = exthelper.exthelper()
48 48
49 49
50 50 @eh.command(
51 51 b'lfconvert',
52 52 [
53 53 (
54 54 b's',
55 55 b'size',
56 56 b'',
57 57 _(b'minimum size (MB) for files to be converted as largefiles'),
58 58 b'SIZE',
59 59 ),
60 60 (
61 61 b'',
62 62 b'to-normal',
63 63 False,
64 64 _(b'convert from a largefiles repo to a normal repo'),
65 65 ),
66 66 ],
67 67 _(b'hg lfconvert SOURCE DEST [FILE ...]'),
68 68 norepo=True,
69 69 inferrepo=True,
70 70 )
71 71 def lfconvert(ui, src, dest, *pats, **opts):
72 72 """convert a normal repository to a largefiles repository
73 73
74 74 Convert repository SOURCE to a new repository DEST, identical to
75 75 SOURCE except that certain files will be converted as largefiles:
76 76 specifically, any file that matches any PATTERN *or* whose size is
77 77 above the minimum size threshold is converted as a largefile. The
78 78 size used to determine whether or not to track a file as a
79 79 largefile is the size of the first version of the file. The
80 80 minimum size can be specified either with --size or in
81 81 configuration as ``largefiles.size``.
82 82
83 83 After running this command you will need to make sure that
84 84 largefiles is enabled anywhere you intend to push the new
85 85 repository.
86 86
87 87 Use --to-normal to convert largefiles back to normal files; after
88 88 this, the DEST repository can be used without largefiles at all."""
89 89
90 90 opts = pycompat.byteskwargs(opts)
91 91 if opts[b'to_normal']:
92 92 tolfile = False
93 93 else:
94 94 tolfile = True
95 95 size = lfutil.getminsize(ui, True, opts.get(b'size'), default=None)
96 96
97 97 if not hg.islocal(src):
98 98 raise error.Abort(_(b'%s is not a local Mercurial repo') % src)
99 99 if not hg.islocal(dest):
100 100 raise error.Abort(_(b'%s is not a local Mercurial repo') % dest)
101 101
102 102 rsrc = hg.repository(ui, src)
103 103 ui.status(_(b'initializing destination %s\n') % dest)
104 104 rdst = hg.repository(ui, dest, create=True)
105 105
106 106 success = False
107 107 dstwlock = dstlock = None
108 108 try:
109 109 # Get a list of all changesets in the source. The easy way to do this
110 110 # is to simply walk the changelog, using changelog.nodesbetween().
111 111 # Take a look at mercurial/revlog.py:639 for more details.
112 112 # Use a generator instead of a list to decrease memory usage
113 113 ctxs = (
114 114 rsrc[ctx]
115 115 for ctx in rsrc.changelog.nodesbetween(None, rsrc.heads())[0]
116 116 )
117 117 revmap = {rsrc.nullid: rdst.nullid}
118 118 if tolfile:
119 119 # Lock destination to prevent modification while it is converted to.
120 120 # Don't need to lock src because we are just reading from its
121 121 # history which can't change.
122 122 dstwlock = rdst.wlock()
123 123 dstlock = rdst.lock()
124 124
125 125 lfiles = set()
126 126 normalfiles = set()
127 127 if not pats:
128 128 pats = ui.configlist(lfutil.longname, b'patterns')
129 129 if pats:
130 130 matcher = matchmod.match(rsrc.root, b'', list(pats))
131 131 else:
132 132 matcher = None
133 133
134 134 lfiletohash = {}
135 135 with ui.makeprogress(
136 136 _(b'converting revisions'),
137 137 unit=_(b'revisions'),
138 138 total=rsrc[b'tip'].rev(),
139 139 ) as progress:
140 140 for ctx in ctxs:
141 141 progress.update(ctx.rev())
142 142 _lfconvert_addchangeset(
143 143 rsrc,
144 144 rdst,
145 145 ctx,
146 146 revmap,
147 147 lfiles,
148 148 normalfiles,
149 149 matcher,
150 150 size,
151 151 lfiletohash,
152 152 )
153 153
154 154 if rdst.wvfs.exists(lfutil.shortname):
155 155 rdst.wvfs.rmtree(lfutil.shortname)
156 156
157 157 for f in lfiletohash.keys():
158 158 if rdst.wvfs.isfile(f):
159 159 rdst.wvfs.unlink(f)
160 160 try:
161 161 rdst.wvfs.removedirs(rdst.wvfs.dirname(f))
162 162 except OSError:
163 163 pass
164 164
165 165 # If there were any files converted to largefiles, add largefiles
166 166 # to the destination repository's requirements.
167 167 if lfiles:
168 168 rdst.requirements.add(b'largefiles')
169 169 scmutil.writereporequirements(rdst)
170 170 else:
171 171
172 172 class lfsource(filemap.filemap_source):
173 173 def __init__(self, ui, source):
174 174 super(lfsource, self).__init__(ui, source, None)
175 175 self.filemapper.rename[lfutil.shortname] = b'.'
176 176
177 177 def getfile(self, name, rev):
178 178 realname, realrev = rev
179 179 f = super(lfsource, self).getfile(name, rev)
180 180
181 181 if (
182 182 not realname.startswith(lfutil.shortnameslash)
183 183 or f[0] is None
184 184 ):
185 185 return f
186 186
187 187 # Substitute in the largefile data for the hash
188 188 hash = f[0].strip()
189 189 path = lfutil.findfile(rsrc, hash)
190 190
191 191 if path is None:
192 192 raise error.Abort(
193 193 _(b"missing largefile for '%s' in %s")
194 194 % (realname, realrev)
195 195 )
196 196 return util.readfile(path), f[1]
197 197
198 198 class converter(convcmd.converter):
199 199 def __init__(self, ui, source, dest, revmapfile, opts):
200 200 src = lfsource(ui, source)
201 201
202 202 super(converter, self).__init__(
203 203 ui, src, dest, revmapfile, opts
204 204 )
205 205
206 206 found, missing = downloadlfiles(ui, rsrc)
207 207 if missing != 0:
208 208 raise error.Abort(_(b"all largefiles must be present locally"))
209 209
210 210 orig = convcmd.converter
211 211 convcmd.converter = converter
212 212
213 213 try:
214 214 convcmd.convert(
215 215 ui, src, dest, source_type=b'hg', dest_type=b'hg'
216 216 )
217 217 finally:
218 218 convcmd.converter = orig
219 219 success = True
220 220 finally:
221 221 if tolfile:
222 with rdst.dirstate.changing_files(rdst):
222 223 rdst.dirstate.clear()
223 224 release(dstlock, dstwlock)
224 225 if not success:
225 226 # we failed, remove the new directory
226 227 shutil.rmtree(rdst.root)
227 228
228 229
229 230 def _lfconvert_addchangeset(
230 231 rsrc, rdst, ctx, revmap, lfiles, normalfiles, matcher, size, lfiletohash
231 232 ):
232 233 # Convert src parents to dst parents
233 234 parents = _convertparents(ctx, revmap)
234 235
235 236 # Generate list of changed files
236 237 files = _getchangedfiles(ctx, parents)
237 238
238 239 dstfiles = []
239 240 for f in files:
240 241 if f not in lfiles and f not in normalfiles:
241 242 islfile = _islfile(f, ctx, matcher, size)
242 243 # If this file was renamed or copied then copy
243 244 # the largefile-ness of its predecessor
244 245 if f in ctx.manifest():
245 246 fctx = ctx.filectx(f)
246 247 renamed = fctx.copysource()
247 248 if renamed is None:
248 249 # the code below assumes renamed to be a boolean or a list
249 250 # and won't quite work with the value None
250 251 renamed = False
251 252 renamedlfile = renamed and renamed in lfiles
252 253 islfile |= renamedlfile
253 254 if b'l' in fctx.flags():
254 255 if renamedlfile:
255 256 raise error.Abort(
256 257 _(b'renamed/copied largefile %s becomes symlink')
257 258 % f
258 259 )
259 260 islfile = False
260 261 if islfile:
261 262 lfiles.add(f)
262 263 else:
263 264 normalfiles.add(f)
264 265
265 266 if f in lfiles:
266 267 fstandin = lfutil.standin(f)
267 268 dstfiles.append(fstandin)
268 269 # largefile in manifest if it has not been removed/renamed
269 270 if f in ctx.manifest():
270 271 fctx = ctx.filectx(f)
271 272 if b'l' in fctx.flags():
272 273 renamed = fctx.copysource()
273 274 if renamed and renamed in lfiles:
274 275 raise error.Abort(
275 276 _(b'largefile %s becomes symlink') % f
276 277 )
277 278
278 279 # largefile was modified, update standins
279 280 m = hashutil.sha1(b'')
280 281 m.update(ctx[f].data())
281 282 hash = hex(m.digest())
282 283 if f not in lfiletohash or lfiletohash[f] != hash:
283 284 rdst.wwrite(f, ctx[f].data(), ctx[f].flags())
284 285 executable = b'x' in ctx[f].flags()
285 286 lfutil.writestandin(rdst, fstandin, hash, executable)
286 287 lfiletohash[f] = hash
287 288 else:
288 289 # normal file
289 290 dstfiles.append(f)
290 291
291 292 def getfilectx(repo, memctx, f):
292 293 srcfname = lfutil.splitstandin(f)
293 294 if srcfname is not None:
294 295 # if the file isn't in the manifest then it was removed
295 296 # or renamed, return None to indicate this
296 297 try:
297 298 fctx = ctx.filectx(srcfname)
298 299 except error.LookupError:
299 300 return None
300 301 renamed = fctx.copysource()
301 302 if renamed:
302 303 # standin is always a largefile because largefile-ness
303 304 # doesn't change after rename or copy
304 305 renamed = lfutil.standin(renamed)
305 306
306 307 return context.memfilectx(
307 308 repo,
308 309 memctx,
309 310 f,
310 311 lfiletohash[srcfname] + b'\n',
311 312 b'l' in fctx.flags(),
312 313 b'x' in fctx.flags(),
313 314 renamed,
314 315 )
315 316 else:
316 317 return _getnormalcontext(repo, ctx, f, revmap)
317 318
318 319 # Commit
319 320 _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
320 321
321 322
322 323 def _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap):
323 324 mctx = context.memctx(
324 325 rdst,
325 326 parents,
326 327 ctx.description(),
327 328 dstfiles,
328 329 getfilectx,
329 330 ctx.user(),
330 331 ctx.date(),
331 332 ctx.extra(),
332 333 )
333 334 ret = rdst.commitctx(mctx)
334 335 lfutil.copyalltostore(rdst, ret)
335 336 rdst.setparents(ret)
336 337 revmap[ctx.node()] = rdst.changelog.tip()
337 338
338 339
339 340 # Generate list of changed files
340 341 def _getchangedfiles(ctx, parents):
341 342 files = set(ctx.files())
342 343 if ctx.repo().nullid not in parents:
343 344 mc = ctx.manifest()
344 345 for pctx in ctx.parents():
345 346 for fn in pctx.manifest().diff(mc):
346 347 files.add(fn)
347 348 return files
348 349
349 350
350 351 # Convert src parents to dst parents
351 352 def _convertparents(ctx, revmap):
352 353 parents = []
353 354 for p in ctx.parents():
354 355 parents.append(revmap[p.node()])
355 356 while len(parents) < 2:
356 357 parents.append(ctx.repo().nullid)
357 358 return parents
358 359
359 360
360 361 # Get memfilectx for a normal file
361 362 def _getnormalcontext(repo, ctx, f, revmap):
362 363 try:
363 364 fctx = ctx.filectx(f)
364 365 except error.LookupError:
365 366 return None
366 367 renamed = fctx.copysource()
367 368
368 369 data = fctx.data()
369 370 if f == b'.hgtags':
370 371 data = _converttags(repo.ui, revmap, data)
371 372 return context.memfilectx(
372 373 repo, ctx, f, data, b'l' in fctx.flags(), b'x' in fctx.flags(), renamed
373 374 )
374 375
375 376
376 377 # Remap tag data using a revision map
377 378 def _converttags(ui, revmap, data):
378 379 newdata = []
379 380 for line in data.splitlines():
380 381 try:
381 382 id, name = line.split(b' ', 1)
382 383 except ValueError:
383 384 ui.warn(_(b'skipping incorrectly formatted tag %s\n') % line)
384 385 continue
385 386 try:
386 387 newid = bin(id)
387 388 except binascii.Error:
388 389 ui.warn(_(b'skipping incorrectly formatted id %s\n') % id)
389 390 continue
390 391 try:
391 392 newdata.append(b'%s %s\n' % (hex(revmap[newid]), name))
392 393 except KeyError:
393 394 ui.warn(_(b'no mapping for id %s\n') % id)
394 395 continue
395 396 return b''.join(newdata)
396 397
397 398
398 399 def _islfile(file, ctx, matcher, size):
399 400 """Return true if file should be considered a largefile, i.e.
400 401 matcher matches it or it is larger than size."""
401 402 # never store special .hg* files as largefiles
402 403 if file == b'.hgtags' or file == b'.hgignore' or file == b'.hgsigs':
403 404 return False
404 405 if matcher and matcher(file):
405 406 return True
406 407 try:
407 408 return ctx.filectx(file).size() >= size * 1024 * 1024
408 409 except error.LookupError:
409 410 return False
410 411
411 412
412 413 def uploadlfiles(ui, rsrc, rdst, files):
413 414 '''upload largefiles to the central store'''
414 415
415 416 if not files:
416 417 return
417 418
418 419 store = storefactory.openstore(rsrc, rdst, put=True)
419 420
420 421 at = 0
421 422 ui.debug(b"sending statlfile command for %d largefiles\n" % len(files))
422 423 retval = store.exists(files)
423 424 files = [h for h in files if not retval[h]]
424 425 ui.debug(b"%d largefiles need to be uploaded\n" % len(files))
425 426
426 427 with ui.makeprogress(
427 428 _(b'uploading largefiles'), unit=_(b'files'), total=len(files)
428 429 ) as progress:
429 430 for hash in files:
430 431 progress.update(at)
431 432 source = lfutil.findfile(rsrc, hash)
432 433 if not source:
433 434 raise error.Abort(
434 435 _(
435 436 b'largefile %s missing from store'
436 437 b' (needs to be uploaded)'
437 438 )
438 439 % hash
439 440 )
440 441 # XXX check for errors here
441 442 store.put(source, hash)
442 443 at += 1
443 444
444 445
445 446 def verifylfiles(ui, repo, all=False, contents=False):
446 447 """Verify that every largefile revision in the current changeset
447 448 exists in the central store. With --contents, also verify that
448 449 the contents of each local largefile file revision are correct (SHA-1 hash
449 450 matches the revision ID). With --all, check every changeset in
450 451 this repository."""
451 452 if all:
452 453 revs = repo.revs(b'all()')
453 454 else:
454 455 revs = [b'.']
455 456
456 457 store = storefactory.openstore(repo)
457 458 return store.verify(revs, contents=contents)
458 459
459 460
460 461 def cachelfiles(ui, repo, node, filelist=None):
461 462 """cachelfiles ensures that all largefiles needed by the specified revision
462 463 are present in the repository's largefile cache.
463 464
464 465 returns a tuple (cached, missing). cached is the list of files downloaded
465 466 by this operation; missing is the list of files that were needed but could
466 467 not be found."""
467 468 lfiles = lfutil.listlfiles(repo, node)
468 469 if filelist:
469 470 lfiles = set(lfiles) & set(filelist)
470 471 toget = []
471 472
472 473 ctx = repo[node]
473 474 for lfile in lfiles:
474 475 try:
475 476 expectedhash = lfutil.readasstandin(ctx[lfutil.standin(lfile)])
476 477 except FileNotFoundError:
477 478 continue # node must be None and standin wasn't found in wctx
478 479 if not lfutil.findfile(repo, expectedhash):
479 480 toget.append((lfile, expectedhash))
480 481
481 482 if toget:
482 483 store = storefactory.openstore(repo)
483 484 ret = store.get(toget)
484 485 return ret
485 486
486 487 return ([], [])
487 488
488 489
489 490 def downloadlfiles(ui, repo):
490 491 tonode = repo.changelog.node
491 492 totalsuccess = 0
492 493 totalmissing = 0
493 494 for rev in repo.revs(b'file(%s)', b'path:' + lfutil.shortname):
494 495 success, missing = cachelfiles(ui, repo, tonode(rev))
495 496 totalsuccess += len(success)
496 497 totalmissing += len(missing)
497 498 ui.status(_(b"%d additional largefiles cached\n") % totalsuccess)
498 499 if totalmissing > 0:
499 500 ui.status(_(b"%d largefiles failed to download\n") % totalmissing)
500 501 return totalsuccess, totalmissing
501 502
502 503
503 504 def updatelfiles(
504 505 ui, repo, filelist=None, printmessage=None, normallookup=False
505 506 ):
506 507 """Update largefiles according to standins in the working directory
507 508
508 509 If ``printmessage`` is other than ``None``, it means "print (or
509 510 ignore, for false) message forcibly".
510 511 """
511 512 statuswriter = lfutil.getstatuswriter(ui, repo, printmessage)
512 513 with repo.wlock():
513 514 lfdirstate = lfutil.openlfdirstate(ui, repo)
514 515 lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate)
515 516
516 517 if filelist is not None:
517 518 filelist = set(filelist)
518 519 lfiles = [f for f in lfiles if f in filelist]
519 520
520 521 update = {}
521 522 dropped = set()
522 523 updated, removed = 0, 0
523 524 wvfs = repo.wvfs
524 525 wctx = repo[None]
525 526 for lfile in lfiles:
526 527 lfileorig = os.path.relpath(
527 528 scmutil.backuppath(ui, repo, lfile), start=repo.root
528 529 )
529 530 standin = lfutil.standin(lfile)
530 531 standinorig = os.path.relpath(
531 532 scmutil.backuppath(ui, repo, standin), start=repo.root
532 533 )
533 534 if wvfs.exists(standin):
534 535 if wvfs.exists(standinorig) and wvfs.exists(lfile):
535 536 shutil.copyfile(wvfs.join(lfile), wvfs.join(lfileorig))
536 537 wvfs.unlinkpath(standinorig)
537 538 expecthash = lfutil.readasstandin(wctx[standin])
538 539 if expecthash != b'':
539 540 if lfile not in wctx: # not switched to normal file
540 541 if repo.dirstate.get_entry(standin).any_tracked:
541 542 wvfs.unlinkpath(lfile, ignoremissing=True)
542 543 else:
543 544 dropped.add(lfile)
544 545
545 546 # allocate an entry in largefiles dirstate to prevent
546 547 # lfilesrepo.status() from reporting missing files as
547 548 # removed.
548 549 lfdirstate.hacky_extension_update_file(
549 550 lfile,
550 551 p1_tracked=True,
551 552 wc_tracked=True,
552 553 possibly_dirty=True,
553 554 )
554 555 update[lfile] = expecthash
555 556 else:
556 557 # Remove lfiles for which the standin is deleted, unless the
557 558 # lfile is added to the repository again. This happens when a
558 559 # largefile is converted back to a normal file: the standin
559 560 # disappears, but a new (normal) file appears as the lfile.
560 561 if (
561 562 wvfs.exists(lfile)
562 563 and repo.dirstate.normalize(lfile) not in wctx
563 564 ):
564 565 wvfs.unlinkpath(lfile)
565 566 removed += 1
566 567
567 568 # largefile processing might be slow and be interrupted - be prepared
568 569 lfdirstate.write(repo.currenttransaction())
569 570
570 571 if lfiles:
571 572 lfiles = [f for f in lfiles if f not in dropped]
572 573
573 574 for f in dropped:
574 575 repo.wvfs.unlinkpath(lfutil.standin(f))
575 576 # This needs to happen for dropped files, otherwise they stay in
576 577 # the M state.
577 578 lfdirstate._map.reset_state(f)
578 579
579 580 statuswriter(_(b'getting changed largefiles\n'))
580 581 cachelfiles(ui, repo, None, lfiles)
581 582
582 583 for lfile in lfiles:
583 584 update1 = 0
584 585
585 586 expecthash = update.get(lfile)
586 587 if expecthash:
587 588 if not lfutil.copyfromcache(repo, expecthash, lfile):
588 589 # failed ... but already removed and set to normallookup
589 590 continue
590 591 # Synchronize largefile dirstate to the last modified
591 592 # time of the file
592 593 lfdirstate.hacky_extension_update_file(
593 594 lfile,
594 595 p1_tracked=True,
595 596 wc_tracked=True,
596 597 )
597 598 update1 = 1
598 599
599 600 # copy the exec mode of largefile standin from the repository's
600 601 # dirstate to its state in the lfdirstate.
601 602 standin = lfutil.standin(lfile)
602 603 if wvfs.exists(standin):
603 604 # exec is decided by the users permissions using mask 0o100
604 605 standinexec = wvfs.stat(standin).st_mode & 0o100
605 606 st = wvfs.stat(lfile)
606 607 mode = st.st_mode
607 608 if standinexec != mode & 0o100:
608 609 # first remove all X bits, then shift all R bits to X
609 610 mode &= ~0o111
610 611 if standinexec:
611 612 mode |= (mode >> 2) & 0o111 & ~util.umask
612 613 wvfs.chmod(lfile, mode)
613 614 update1 = 1
614 615
615 616 updated += update1
616 617
617 618 lfutil.synclfdirstate(repo, lfdirstate, lfile, normallookup)
618 619
619 620 lfdirstate.write(repo.currenttransaction())
620 621 if lfiles:
621 622 statuswriter(
622 623 _(b'%d largefiles updated, %d removed\n') % (updated, removed)
623 624 )
624 625
625 626
626 627 @eh.command(
627 628 b'lfpull',
628 629 [(b'r', b'rev', [], _(b'pull largefiles for these revisions'))]
629 630 + cmdutil.remoteopts,
630 631 _(b'-r REV... [-e CMD] [--remotecmd CMD] [SOURCE]'),
631 632 )
632 633 def lfpull(ui, repo, source=b"default", **opts):
633 634 """pull largefiles for the specified revisions from the specified source
634 635
635 636 Pull largefiles that are referenced from local changesets but missing
636 637 locally, pulling from a remote repository to the local cache.
637 638
638 639 If SOURCE is omitted, the 'default' path will be used.
639 640 See :hg:`help urls` for more information.
640 641
641 642 .. container:: verbose
642 643
643 644 Some examples:
644 645
645 646 - pull largefiles for all branch heads::
646 647
647 648 hg lfpull -r "head() and not closed()"
648 649
649 650 - pull largefiles on the default branch::
650 651
651 652 hg lfpull -r "branch(default)"
652 653 """
653 654 repo.lfpullsource = source
654 655
655 656 revs = opts.get('rev', [])
656 657 if not revs:
657 658 raise error.Abort(_(b'no revisions specified'))
658 659 revs = logcmdutil.revrange(repo, revs)
659 660
660 661 numcached = 0
661 662 for rev in revs:
662 663 ui.note(_(b'pulling largefiles for revision %d\n') % rev)
663 664 (cached, missing) = cachelfiles(ui, repo, rev)
664 665 numcached += len(cached)
665 666 ui.status(_(b"%d largefiles cached\n") % numcached)
666 667
667 668
668 669 @eh.command(b'debuglfput', [] + cmdutil.remoteopts, _(b'FILE'))
669 670 def debuglfput(ui, repo, filepath, **kwargs):
670 671 hash = lfutil.hashfile(filepath)
671 672 storefactory.openstore(repo).put(filepath, hash)
672 673 ui.write(b'%s\n' % hash)
673 674 return 0
General Comments 0
You need to be logged in to leave comments. Login now