##// END OF EJS Templates
largefiles: rename match_ to matchmod import in lfcommands
liscju -
r29317:5ec25534 default
parent child Browse files
Show More
@@ -1,568 +1,568 b''
1 1 # Copyright 2009-2010 Gregory P. Ward
2 2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 3 # Copyright 2010-2011 Fog Creek Software
4 4 # Copyright 2010-2011 Unity Technologies
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 '''High-level command function for lfconvert, plus the cmdtable.'''
10 10 from __future__ import absolute_import
11 11
12 12 import errno
13 13 import os
14 14 import shutil
15 15
16 16 from mercurial.i18n import _
17 17
18 18 from mercurial import (
19 19 cmdutil,
20 20 commands,
21 21 context,
22 22 error,
23 23 hg,
24 24 lock,
25 match as match_,
25 match as matchmod,
26 26 node,
27 27 scmutil,
28 28 util,
29 29 )
30 30
31 31 from ..convert import (
32 32 convcmd,
33 33 filemap,
34 34 )
35 35
36 36 from . import (
37 37 lfutil,
38 38 storefactory
39 39 )
40 40
41 41 release = lock.release
42 42
43 43 # -- Commands ----------------------------------------------------------
44 44
45 45 cmdtable = {}
46 46 command = cmdutil.command(cmdtable)
47 47
48 48 @command('lfconvert',
49 49 [('s', 'size', '',
50 50 _('minimum size (MB) for files to be converted as largefiles'), 'SIZE'),
51 51 ('', 'to-normal', False,
52 52 _('convert from a largefiles repo to a normal repo')),
53 53 ],
54 54 _('hg lfconvert SOURCE DEST [FILE ...]'),
55 55 norepo=True,
56 56 inferrepo=True)
57 57 def lfconvert(ui, src, dest, *pats, **opts):
58 58 '''convert a normal repository to a largefiles repository
59 59
60 60 Convert repository SOURCE to a new repository DEST, identical to
61 61 SOURCE except that certain files will be converted as largefiles:
62 62 specifically, any file that matches any PATTERN *or* whose size is
63 63 above the minimum size threshold is converted as a largefile. The
64 64 size used to determine whether or not to track a file as a
65 65 largefile is the size of the first version of the file. The
66 66 minimum size can be specified either with --size or in
67 67 configuration as ``largefiles.size``.
68 68
69 69 After running this command you will need to make sure that
70 70 largefiles is enabled anywhere you intend to push the new
71 71 repository.
72 72
73 73 Use --to-normal to convert largefiles back to normal files; after
74 74 this, the DEST repository can be used without largefiles at all.'''
75 75
76 76 if opts['to_normal']:
77 77 tolfile = False
78 78 else:
79 79 tolfile = True
80 80 size = lfutil.getminsize(ui, True, opts.get('size'), default=None)
81 81
82 82 if not hg.islocal(src):
83 83 raise error.Abort(_('%s is not a local Mercurial repo') % src)
84 84 if not hg.islocal(dest):
85 85 raise error.Abort(_('%s is not a local Mercurial repo') % dest)
86 86
87 87 rsrc = hg.repository(ui, src)
88 88 ui.status(_('initializing destination %s\n') % dest)
89 89 rdst = hg.repository(ui, dest, create=True)
90 90
91 91 success = False
92 92 dstwlock = dstlock = None
93 93 try:
94 94 # Get a list of all changesets in the source. The easy way to do this
95 95 # is to simply walk the changelog, using changelog.nodesbetween().
96 96 # Take a look at mercurial/revlog.py:639 for more details.
97 97 # Use a generator instead of a list to decrease memory usage
98 98 ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None,
99 99 rsrc.heads())[0])
100 100 revmap = {node.nullid: node.nullid}
101 101 if tolfile:
102 102 # Lock destination to prevent modification while it is converted to.
103 103 # Don't need to lock src because we are just reading from its
104 104 # history which can't change.
105 105 dstwlock = rdst.wlock()
106 106 dstlock = rdst.lock()
107 107
108 108 lfiles = set()
109 109 normalfiles = set()
110 110 if not pats:
111 111 pats = ui.configlist(lfutil.longname, 'patterns', default=[])
112 112 if pats:
113 matcher = match_.match(rsrc.root, '', list(pats))
113 matcher = matchmod.match(rsrc.root, '', list(pats))
114 114 else:
115 115 matcher = None
116 116
117 117 lfiletohash = {}
118 118 for ctx in ctxs:
119 119 ui.progress(_('converting revisions'), ctx.rev(),
120 120 unit=_('revisions'), total=rsrc['tip'].rev())
121 121 _lfconvert_addchangeset(rsrc, rdst, ctx, revmap,
122 122 lfiles, normalfiles, matcher, size, lfiletohash)
123 123 ui.progress(_('converting revisions'), None)
124 124
125 125 if rdst.wvfs.exists(lfutil.shortname):
126 126 rdst.wvfs.rmtree(lfutil.shortname)
127 127
128 128 for f in lfiletohash.keys():
129 129 if rdst.wvfs.isfile(f):
130 130 rdst.wvfs.unlink(f)
131 131 try:
132 132 rdst.wvfs.removedirs(rdst.wvfs.dirname(f))
133 133 except OSError:
134 134 pass
135 135
136 136 # If there were any files converted to largefiles, add largefiles
137 137 # to the destination repository's requirements.
138 138 if lfiles:
139 139 rdst.requirements.add('largefiles')
140 140 rdst._writerequirements()
141 141 else:
142 142 class lfsource(filemap.filemap_source):
143 143 def __init__(self, ui, source):
144 144 super(lfsource, self).__init__(ui, source, None)
145 145 self.filemapper.rename[lfutil.shortname] = '.'
146 146
147 147 def getfile(self, name, rev):
148 148 realname, realrev = rev
149 149 f = super(lfsource, self).getfile(name, rev)
150 150
151 151 if (not realname.startswith(lfutil.shortnameslash)
152 152 or f[0] is None):
153 153 return f
154 154
155 155 # Substitute in the largefile data for the hash
156 156 hash = f[0].strip()
157 157 path = lfutil.findfile(rsrc, hash)
158 158
159 159 if path is None:
160 160 raise error.Abort(_("missing largefile for '%s' in %s")
161 161 % (realname, realrev))
162 162 return util.readfile(path), f[1]
163 163
164 164 class converter(convcmd.converter):
165 165 def __init__(self, ui, source, dest, revmapfile, opts):
166 166 src = lfsource(ui, source)
167 167
168 168 super(converter, self).__init__(ui, src, dest, revmapfile,
169 169 opts)
170 170
171 171 found, missing = downloadlfiles(ui, rsrc)
172 172 if missing != 0:
173 173 raise error.Abort(_("all largefiles must be present locally"))
174 174
175 175 orig = convcmd.converter
176 176 convcmd.converter = converter
177 177
178 178 try:
179 179 convcmd.convert(ui, src, dest)
180 180 finally:
181 181 convcmd.converter = orig
182 182 success = True
183 183 finally:
184 184 if tolfile:
185 185 rdst.dirstate.clear()
186 186 release(dstlock, dstwlock)
187 187 if not success:
188 188 # we failed, remove the new directory
189 189 shutil.rmtree(rdst.root)
190 190
191 191 def _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles, normalfiles,
192 192 matcher, size, lfiletohash):
193 193 # Convert src parents to dst parents
194 194 parents = _convertparents(ctx, revmap)
195 195
196 196 # Generate list of changed files
197 197 files = _getchangedfiles(ctx, parents)
198 198
199 199 dstfiles = []
200 200 for f in files:
201 201 if f not in lfiles and f not in normalfiles:
202 202 islfile = _islfile(f, ctx, matcher, size)
203 203 # If this file was renamed or copied then copy
204 204 # the largefile-ness of its predecessor
205 205 if f in ctx.manifest():
206 206 fctx = ctx.filectx(f)
207 207 renamed = fctx.renamed()
208 208 renamedlfile = renamed and renamed[0] in lfiles
209 209 islfile |= renamedlfile
210 210 if 'l' in fctx.flags():
211 211 if renamedlfile:
212 212 raise error.Abort(
213 213 _('renamed/copied largefile %s becomes symlink')
214 214 % f)
215 215 islfile = False
216 216 if islfile:
217 217 lfiles.add(f)
218 218 else:
219 219 normalfiles.add(f)
220 220
221 221 if f in lfiles:
222 222 dstfiles.append(lfutil.standin(f))
223 223 # largefile in manifest if it has not been removed/renamed
224 224 if f in ctx.manifest():
225 225 fctx = ctx.filectx(f)
226 226 if 'l' in fctx.flags():
227 227 renamed = fctx.renamed()
228 228 if renamed and renamed[0] in lfiles:
229 229 raise error.Abort(_('largefile %s becomes symlink') % f)
230 230
231 231 # largefile was modified, update standins
232 232 m = util.sha1('')
233 233 m.update(ctx[f].data())
234 234 hash = m.hexdigest()
235 235 if f not in lfiletohash or lfiletohash[f] != hash:
236 236 rdst.wwrite(f, ctx[f].data(), ctx[f].flags())
237 237 executable = 'x' in ctx[f].flags()
238 238 lfutil.writestandin(rdst, lfutil.standin(f), hash,
239 239 executable)
240 240 lfiletohash[f] = hash
241 241 else:
242 242 # normal file
243 243 dstfiles.append(f)
244 244
245 245 def getfilectx(repo, memctx, f):
246 246 if lfutil.isstandin(f):
247 247 # if the file isn't in the manifest then it was removed
248 248 # or renamed, raise IOError to indicate this
249 249 srcfname = lfutil.splitstandin(f)
250 250 try:
251 251 fctx = ctx.filectx(srcfname)
252 252 except error.LookupError:
253 253 return None
254 254 renamed = fctx.renamed()
255 255 if renamed:
256 256 # standin is always a largefile because largefile-ness
257 257 # doesn't change after rename or copy
258 258 renamed = lfutil.standin(renamed[0])
259 259
260 260 return context.memfilectx(repo, f, lfiletohash[srcfname] + '\n',
261 261 'l' in fctx.flags(), 'x' in fctx.flags(),
262 262 renamed)
263 263 else:
264 264 return _getnormalcontext(repo, ctx, f, revmap)
265 265
266 266 # Commit
267 267 _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
268 268
269 269 def _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap):
270 270 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
271 271 getfilectx, ctx.user(), ctx.date(), ctx.extra())
272 272 ret = rdst.commitctx(mctx)
273 273 lfutil.copyalltostore(rdst, ret)
274 274 rdst.setparents(ret)
275 275 revmap[ctx.node()] = rdst.changelog.tip()
276 276
277 277 # Generate list of changed files
278 278 def _getchangedfiles(ctx, parents):
279 279 files = set(ctx.files())
280 280 if node.nullid not in parents:
281 281 mc = ctx.manifest()
282 282 mp1 = ctx.parents()[0].manifest()
283 283 mp2 = ctx.parents()[1].manifest()
284 284 files |= (set(mp1) | set(mp2)) - set(mc)
285 285 for f in mc:
286 286 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
287 287 files.add(f)
288 288 return files
289 289
290 290 # Convert src parents to dst parents
291 291 def _convertparents(ctx, revmap):
292 292 parents = []
293 293 for p in ctx.parents():
294 294 parents.append(revmap[p.node()])
295 295 while len(parents) < 2:
296 296 parents.append(node.nullid)
297 297 return parents
298 298
299 299 # Get memfilectx for a normal file
300 300 def _getnormalcontext(repo, ctx, f, revmap):
301 301 try:
302 302 fctx = ctx.filectx(f)
303 303 except error.LookupError:
304 304 return None
305 305 renamed = fctx.renamed()
306 306 if renamed:
307 307 renamed = renamed[0]
308 308
309 309 data = fctx.data()
310 310 if f == '.hgtags':
311 311 data = _converttags (repo.ui, revmap, data)
312 312 return context.memfilectx(repo, f, data, 'l' in fctx.flags(),
313 313 'x' in fctx.flags(), renamed)
314 314
315 315 # Remap tag data using a revision map
316 316 def _converttags(ui, revmap, data):
317 317 newdata = []
318 318 for line in data.splitlines():
319 319 try:
320 320 id, name = line.split(' ', 1)
321 321 except ValueError:
322 322 ui.warn(_('skipping incorrectly formatted tag %s\n')
323 323 % line)
324 324 continue
325 325 try:
326 326 newid = node.bin(id)
327 327 except TypeError:
328 328 ui.warn(_('skipping incorrectly formatted id %s\n')
329 329 % id)
330 330 continue
331 331 try:
332 332 newdata.append('%s %s\n' % (node.hex(revmap[newid]),
333 333 name))
334 334 except KeyError:
335 335 ui.warn(_('no mapping for id %s\n') % id)
336 336 continue
337 337 return ''.join(newdata)
338 338
339 339 def _islfile(file, ctx, matcher, size):
340 340 '''Return true if file should be considered a largefile, i.e.
341 341 matcher matches it or it is larger than size.'''
342 342 # never store special .hg* files as largefiles
343 343 if file == '.hgtags' or file == '.hgignore' or file == '.hgsigs':
344 344 return False
345 345 if matcher and matcher(file):
346 346 return True
347 347 try:
348 348 return ctx.filectx(file).size() >= size * 1024 * 1024
349 349 except error.LookupError:
350 350 return False
351 351
352 352 def uploadlfiles(ui, rsrc, rdst, files):
353 353 '''upload largefiles to the central store'''
354 354
355 355 if not files:
356 356 return
357 357
358 358 store = storefactory._openstore(rsrc, rdst, put=True)
359 359
360 360 at = 0
361 361 ui.debug("sending statlfile command for %d largefiles\n" % len(files))
362 362 retval = store.exists(files)
363 363 files = filter(lambda h: not retval[h], files)
364 364 ui.debug("%d largefiles need to be uploaded\n" % len(files))
365 365
366 366 for hash in files:
367 367 ui.progress(_('uploading largefiles'), at, unit=_('files'),
368 368 total=len(files))
369 369 source = lfutil.findfile(rsrc, hash)
370 370 if not source:
371 371 raise error.Abort(_('largefile %s missing from store'
372 372 ' (needs to be uploaded)') % hash)
373 373 # XXX check for errors here
374 374 store.put(source, hash)
375 375 at += 1
376 376 ui.progress(_('uploading largefiles'), None)
377 377
378 378 def verifylfiles(ui, repo, all=False, contents=False):
379 379 '''Verify that every largefile revision in the current changeset
380 380 exists in the central store. With --contents, also verify that
381 381 the contents of each local largefile file revision are correct (SHA-1 hash
382 382 matches the revision ID). With --all, check every changeset in
383 383 this repository.'''
384 384 if all:
385 385 revs = repo.revs('all()')
386 386 else:
387 387 revs = ['.']
388 388
389 389 store = storefactory._openstore(repo)
390 390 return store.verify(revs, contents=contents)
391 391
392 392 def cachelfiles(ui, repo, node, filelist=None):
393 393 '''cachelfiles ensures that all largefiles needed by the specified revision
394 394 are present in the repository's largefile cache.
395 395
396 396 returns a tuple (cached, missing). cached is the list of files downloaded
397 397 by this operation; missing is the list of files that were needed but could
398 398 not be found.'''
399 399 lfiles = lfutil.listlfiles(repo, node)
400 400 if filelist:
401 401 lfiles = set(lfiles) & set(filelist)
402 402 toget = []
403 403
404 404 for lfile in lfiles:
405 405 try:
406 406 expectedhash = repo[node][lfutil.standin(lfile)].data().strip()
407 407 except IOError as err:
408 408 if err.errno == errno.ENOENT:
409 409 continue # node must be None and standin wasn't found in wctx
410 410 raise
411 411 if not lfutil.findfile(repo, expectedhash):
412 412 toget.append((lfile, expectedhash))
413 413
414 414 if toget:
415 415 store = storefactory._openstore(repo)
416 416 ret = store.get(toget)
417 417 return ret
418 418
419 419 return ([], [])
420 420
421 421 def downloadlfiles(ui, repo, rev=None):
422 422 matchfn = scmutil.match(repo[None],
423 423 [repo.wjoin(lfutil.shortname)], {})
424 424 def prepare(ctx, fns):
425 425 pass
426 426 totalsuccess = 0
427 427 totalmissing = 0
428 428 if rev != []: # walkchangerevs on empty list would return all revs
429 429 for ctx in cmdutil.walkchangerevs(repo, matchfn, {'rev' : rev},
430 430 prepare):
431 431 success, missing = cachelfiles(ui, repo, ctx.node())
432 432 totalsuccess += len(success)
433 433 totalmissing += len(missing)
434 434 ui.status(_("%d additional largefiles cached\n") % totalsuccess)
435 435 if totalmissing > 0:
436 436 ui.status(_("%d largefiles failed to download\n") % totalmissing)
437 437 return totalsuccess, totalmissing
438 438
439 439 def updatelfiles(ui, repo, filelist=None, printmessage=None,
440 440 normallookup=False):
441 441 '''Update largefiles according to standins in the working directory
442 442
443 443 If ``printmessage`` is other than ``None``, it means "print (or
444 444 ignore, for false) message forcibly".
445 445 '''
446 446 statuswriter = lfutil.getstatuswriter(ui, repo, printmessage)
447 447 with repo.wlock():
448 448 lfdirstate = lfutil.openlfdirstate(ui, repo)
449 449 lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate)
450 450
451 451 if filelist is not None:
452 452 filelist = set(filelist)
453 453 lfiles = [f for f in lfiles if f in filelist]
454 454
455 455 update = {}
456 456 updated, removed = 0, 0
457 457 wvfs = repo.wvfs
458 458 for lfile in lfiles:
459 459 rellfile = lfile
460 460 rellfileorig = os.path.relpath(
461 461 scmutil.origpath(ui, repo, wvfs.join(rellfile)),
462 462 start=repo.root)
463 463 relstandin = lfutil.standin(lfile)
464 464 relstandinorig = os.path.relpath(
465 465 scmutil.origpath(ui, repo, wvfs.join(relstandin)),
466 466 start=repo.root)
467 467 if wvfs.exists(relstandin):
468 468 if (wvfs.exists(relstandinorig) and
469 469 wvfs.exists(rellfile)):
470 470 shutil.copyfile(wvfs.join(rellfile),
471 471 wvfs.join(rellfileorig))
472 472 wvfs.unlinkpath(relstandinorig)
473 473 expecthash = lfutil.readstandin(repo, lfile)
474 474 if expecthash != '':
475 475 if lfile not in repo[None]: # not switched to normal file
476 476 wvfs.unlinkpath(rellfile, ignoremissing=True)
477 477 # use normallookup() to allocate an entry in largefiles
478 478 # dirstate to prevent lfilesrepo.status() from reporting
479 479 # missing files as removed.
480 480 lfdirstate.normallookup(lfile)
481 481 update[lfile] = expecthash
482 482 else:
483 483 # Remove lfiles for which the standin is deleted, unless the
484 484 # lfile is added to the repository again. This happens when a
485 485 # largefile is converted back to a normal file: the standin
486 486 # disappears, but a new (normal) file appears as the lfile.
487 487 if (wvfs.exists(rellfile) and
488 488 repo.dirstate.normalize(lfile) not in repo[None]):
489 489 wvfs.unlinkpath(rellfile)
490 490 removed += 1
491 491
492 492 # largefile processing might be slow and be interrupted - be prepared
493 493 lfdirstate.write()
494 494
495 495 if lfiles:
496 496 statuswriter(_('getting changed largefiles\n'))
497 497 cachelfiles(ui, repo, None, lfiles)
498 498
499 499 for lfile in lfiles:
500 500 update1 = 0
501 501
502 502 expecthash = update.get(lfile)
503 503 if expecthash:
504 504 if not lfutil.copyfromcache(repo, expecthash, lfile):
505 505 # failed ... but already removed and set to normallookup
506 506 continue
507 507 # Synchronize largefile dirstate to the last modified
508 508 # time of the file
509 509 lfdirstate.normal(lfile)
510 510 update1 = 1
511 511
512 512 # copy the state of largefile standin from the repository's
513 513 # dirstate to its state in the lfdirstate.
514 514 rellfile = lfile
515 515 relstandin = lfutil.standin(lfile)
516 516 if wvfs.exists(relstandin):
517 517 mode = wvfs.stat(relstandin).st_mode
518 518 if mode != wvfs.stat(rellfile).st_mode:
519 519 wvfs.chmod(rellfile, mode)
520 520 update1 = 1
521 521
522 522 updated += update1
523 523
524 524 lfutil.synclfdirstate(repo, lfdirstate, lfile, normallookup)
525 525
526 526 lfdirstate.write()
527 527 if lfiles:
528 528 statuswriter(_('%d largefiles updated, %d removed\n') % (updated,
529 529 removed))
530 530
531 531 @command('lfpull',
532 532 [('r', 'rev', [], _('pull largefiles for these revisions'))
533 533 ] + commands.remoteopts,
534 534 _('-r REV... [-e CMD] [--remotecmd CMD] [SOURCE]'))
535 535 def lfpull(ui, repo, source="default", **opts):
536 536 """pull largefiles for the specified revisions from the specified source
537 537
538 538 Pull largefiles that are referenced from local changesets but missing
539 539 locally, pulling from a remote repository to the local cache.
540 540
541 541 If SOURCE is omitted, the 'default' path will be used.
542 542 See :hg:`help urls` for more information.
543 543
544 544 .. container:: verbose
545 545
546 546 Some examples:
547 547
548 548 - pull largefiles for all branch heads::
549 549
550 550 hg lfpull -r "head() and not closed()"
551 551
552 552 - pull largefiles on the default branch::
553 553
554 554 hg lfpull -r "branch(default)"
555 555 """
556 556 repo.lfpullsource = source
557 557
558 558 revs = opts.get('rev', [])
559 559 if not revs:
560 560 raise error.Abort(_('no revisions specified'))
561 561 revs = scmutil.revrange(repo, revs)
562 562
563 563 numcached = 0
564 564 for rev in revs:
565 565 ui.note(_('pulling largefiles for revision %s\n') % rev)
566 566 (cached, missing) = cachelfiles(ui, repo, rev)
567 567 numcached += len(cached)
568 568 ui.status(_("%d largefiles cached\n") % numcached)
General Comments 0
You need to be logged in to leave comments. Login now