##// END OF EJS Templates
with: use context manager for wlock in updatelfiles
Bryan O'Sullivan -
r27820:d2e9cc9e default
parent child Browse files
Show More
@@ -1,547 +1,544 b''
1 1 # Copyright 2009-2010 Gregory P. Ward
2 2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 3 # Copyright 2010-2011 Fog Creek Software
4 4 # Copyright 2010-2011 Unity Technologies
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 '''High-level command function for lfconvert, plus the cmdtable.'''
10 10
11 11 import os, errno
12 12 import shutil
13 13
14 14 from mercurial import util, match as match_, hg, node, context, error, \
15 15 cmdutil, scmutil, commands
16 16 from mercurial.i18n import _
17 17 from mercurial.lock import release
18 18
19 19 from hgext.convert import convcmd
20 20 from hgext.convert import filemap
21 21
22 22 import lfutil
23 23 import basestore
24 24
25 25 # -- Commands ----------------------------------------------------------
26 26
27 27 cmdtable = {}
28 28 command = cmdutil.command(cmdtable)
29 29
30 30 @command('lfconvert',
31 31 [('s', 'size', '',
32 32 _('minimum size (MB) for files to be converted as largefiles'), 'SIZE'),
33 33 ('', 'to-normal', False,
34 34 _('convert from a largefiles repo to a normal repo')),
35 35 ],
36 36 _('hg lfconvert SOURCE DEST [FILE ...]'),
37 37 norepo=True,
38 38 inferrepo=True)
39 39 def lfconvert(ui, src, dest, *pats, **opts):
40 40 '''convert a normal repository to a largefiles repository
41 41
42 42 Convert repository SOURCE to a new repository DEST, identical to
43 43 SOURCE except that certain files will be converted as largefiles:
44 44 specifically, any file that matches any PATTERN *or* whose size is
45 45 above the minimum size threshold is converted as a largefile. The
46 46 size used to determine whether or not to track a file as a
47 47 largefile is the size of the first version of the file. The
48 48 minimum size can be specified either with --size or in
49 49 configuration as ``largefiles.size``.
50 50
51 51 After running this command you will need to make sure that
52 52 largefiles is enabled anywhere you intend to push the new
53 53 repository.
54 54
55 55 Use --to-normal to convert largefiles back to normal files; after
56 56 this, the DEST repository can be used without largefiles at all.'''
57 57
58 58 if opts['to_normal']:
59 59 tolfile = False
60 60 else:
61 61 tolfile = True
62 62 size = lfutil.getminsize(ui, True, opts.get('size'), default=None)
63 63
64 64 if not hg.islocal(src):
65 65 raise error.Abort(_('%s is not a local Mercurial repo') % src)
66 66 if not hg.islocal(dest):
67 67 raise error.Abort(_('%s is not a local Mercurial repo') % dest)
68 68
69 69 rsrc = hg.repository(ui, src)
70 70 ui.status(_('initializing destination %s\n') % dest)
71 71 rdst = hg.repository(ui, dest, create=True)
72 72
73 73 success = False
74 74 dstwlock = dstlock = None
75 75 try:
76 76 # Get a list of all changesets in the source. The easy way to do this
77 77 # is to simply walk the changelog, using changelog.nodesbetween().
78 78 # Take a look at mercurial/revlog.py:639 for more details.
79 79 # Use a generator instead of a list to decrease memory usage
80 80 ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None,
81 81 rsrc.heads())[0])
82 82 revmap = {node.nullid: node.nullid}
83 83 if tolfile:
84 84 # Lock destination to prevent modification while it is converted to.
85 85 # Don't need to lock src because we are just reading from its
86 86 # history which can't change.
87 87 dstwlock = rdst.wlock()
88 88 dstlock = rdst.lock()
89 89
90 90 lfiles = set()
91 91 normalfiles = set()
92 92 if not pats:
93 93 pats = ui.configlist(lfutil.longname, 'patterns', default=[])
94 94 if pats:
95 95 matcher = match_.match(rsrc.root, '', list(pats))
96 96 else:
97 97 matcher = None
98 98
99 99 lfiletohash = {}
100 100 for ctx in ctxs:
101 101 ui.progress(_('converting revisions'), ctx.rev(),
102 102 unit=_('revision'), total=rsrc['tip'].rev())
103 103 _lfconvert_addchangeset(rsrc, rdst, ctx, revmap,
104 104 lfiles, normalfiles, matcher, size, lfiletohash)
105 105 ui.progress(_('converting revisions'), None)
106 106
107 107 if os.path.exists(rdst.wjoin(lfutil.shortname)):
108 108 shutil.rmtree(rdst.wjoin(lfutil.shortname))
109 109
110 110 for f in lfiletohash.keys():
111 111 if os.path.isfile(rdst.wjoin(f)):
112 112 os.unlink(rdst.wjoin(f))
113 113 try:
114 114 os.removedirs(os.path.dirname(rdst.wjoin(f)))
115 115 except OSError:
116 116 pass
117 117
118 118 # If there were any files converted to largefiles, add largefiles
119 119 # to the destination repository's requirements.
120 120 if lfiles:
121 121 rdst.requirements.add('largefiles')
122 122 rdst._writerequirements()
123 123 else:
124 124 class lfsource(filemap.filemap_source):
125 125 def __init__(self, ui, source):
126 126 super(lfsource, self).__init__(ui, source, None)
127 127 self.filemapper.rename[lfutil.shortname] = '.'
128 128
129 129 def getfile(self, name, rev):
130 130 realname, realrev = rev
131 131 f = super(lfsource, self).getfile(name, rev)
132 132
133 133 if (not realname.startswith(lfutil.shortnameslash)
134 134 or f[0] is None):
135 135 return f
136 136
137 137 # Substitute in the largefile data for the hash
138 138 hash = f[0].strip()
139 139 path = lfutil.findfile(rsrc, hash)
140 140
141 141 if path is None:
142 142 raise error.Abort(_("missing largefile for '%s' in %s")
143 143 % (realname, realrev))
144 144 return util.readfile(path), f[1]
145 145
146 146 class converter(convcmd.converter):
147 147 def __init__(self, ui, source, dest, revmapfile, opts):
148 148 src = lfsource(ui, source)
149 149
150 150 super(converter, self).__init__(ui, src, dest, revmapfile,
151 151 opts)
152 152
153 153 found, missing = downloadlfiles(ui, rsrc)
154 154 if missing != 0:
155 155 raise error.Abort(_("all largefiles must be present locally"))
156 156
157 157 orig = convcmd.converter
158 158 convcmd.converter = converter
159 159
160 160 try:
161 161 convcmd.convert(ui, src, dest)
162 162 finally:
163 163 convcmd.converter = orig
164 164 success = True
165 165 finally:
166 166 if tolfile:
167 167 rdst.dirstate.clear()
168 168 release(dstlock, dstwlock)
169 169 if not success:
170 170 # we failed, remove the new directory
171 171 shutil.rmtree(rdst.root)
172 172
173 173 def _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles, normalfiles,
174 174 matcher, size, lfiletohash):
175 175 # Convert src parents to dst parents
176 176 parents = _convertparents(ctx, revmap)
177 177
178 178 # Generate list of changed files
179 179 files = _getchangedfiles(ctx, parents)
180 180
181 181 dstfiles = []
182 182 for f in files:
183 183 if f not in lfiles and f not in normalfiles:
184 184 islfile = _islfile(f, ctx, matcher, size)
185 185 # If this file was renamed or copied then copy
186 186 # the largefile-ness of its predecessor
187 187 if f in ctx.manifest():
188 188 fctx = ctx.filectx(f)
189 189 renamed = fctx.renamed()
190 190 renamedlfile = renamed and renamed[0] in lfiles
191 191 islfile |= renamedlfile
192 192 if 'l' in fctx.flags():
193 193 if renamedlfile:
194 194 raise error.Abort(
195 195 _('renamed/copied largefile %s becomes symlink')
196 196 % f)
197 197 islfile = False
198 198 if islfile:
199 199 lfiles.add(f)
200 200 else:
201 201 normalfiles.add(f)
202 202
203 203 if f in lfiles:
204 204 dstfiles.append(lfutil.standin(f))
205 205 # largefile in manifest if it has not been removed/renamed
206 206 if f in ctx.manifest():
207 207 fctx = ctx.filectx(f)
208 208 if 'l' in fctx.flags():
209 209 renamed = fctx.renamed()
210 210 if renamed and renamed[0] in lfiles:
211 211 raise error.Abort(_('largefile %s becomes symlink') % f)
212 212
213 213 # largefile was modified, update standins
214 214 m = util.sha1('')
215 215 m.update(ctx[f].data())
216 216 hash = m.hexdigest()
217 217 if f not in lfiletohash or lfiletohash[f] != hash:
218 218 rdst.wwrite(f, ctx[f].data(), ctx[f].flags())
219 219 executable = 'x' in ctx[f].flags()
220 220 lfutil.writestandin(rdst, lfutil.standin(f), hash,
221 221 executable)
222 222 lfiletohash[f] = hash
223 223 else:
224 224 # normal file
225 225 dstfiles.append(f)
226 226
227 227 def getfilectx(repo, memctx, f):
228 228 if lfutil.isstandin(f):
229 229 # if the file isn't in the manifest then it was removed
230 230 # or renamed, raise IOError to indicate this
231 231 srcfname = lfutil.splitstandin(f)
232 232 try:
233 233 fctx = ctx.filectx(srcfname)
234 234 except error.LookupError:
235 235 return None
236 236 renamed = fctx.renamed()
237 237 if renamed:
238 238 # standin is always a largefile because largefile-ness
239 239 # doesn't change after rename or copy
240 240 renamed = lfutil.standin(renamed[0])
241 241
242 242 return context.memfilectx(repo, f, lfiletohash[srcfname] + '\n',
243 243 'l' in fctx.flags(), 'x' in fctx.flags(),
244 244 renamed)
245 245 else:
246 246 return _getnormalcontext(repo, ctx, f, revmap)
247 247
248 248 # Commit
249 249 _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
250 250
251 251 def _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap):
252 252 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
253 253 getfilectx, ctx.user(), ctx.date(), ctx.extra())
254 254 ret = rdst.commitctx(mctx)
255 255 lfutil.copyalltostore(rdst, ret)
256 256 rdst.setparents(ret)
257 257 revmap[ctx.node()] = rdst.changelog.tip()
258 258
259 259 # Generate list of changed files
260 260 def _getchangedfiles(ctx, parents):
261 261 files = set(ctx.files())
262 262 if node.nullid not in parents:
263 263 mc = ctx.manifest()
264 264 mp1 = ctx.parents()[0].manifest()
265 265 mp2 = ctx.parents()[1].manifest()
266 266 files |= (set(mp1) | set(mp2)) - set(mc)
267 267 for f in mc:
268 268 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
269 269 files.add(f)
270 270 return files
271 271
272 272 # Convert src parents to dst parents
273 273 def _convertparents(ctx, revmap):
274 274 parents = []
275 275 for p in ctx.parents():
276 276 parents.append(revmap[p.node()])
277 277 while len(parents) < 2:
278 278 parents.append(node.nullid)
279 279 return parents
280 280
281 281 # Get memfilectx for a normal file
282 282 def _getnormalcontext(repo, ctx, f, revmap):
283 283 try:
284 284 fctx = ctx.filectx(f)
285 285 except error.LookupError:
286 286 return None
287 287 renamed = fctx.renamed()
288 288 if renamed:
289 289 renamed = renamed[0]
290 290
291 291 data = fctx.data()
292 292 if f == '.hgtags':
293 293 data = _converttags (repo.ui, revmap, data)
294 294 return context.memfilectx(repo, f, data, 'l' in fctx.flags(),
295 295 'x' in fctx.flags(), renamed)
296 296
297 297 # Remap tag data using a revision map
298 298 def _converttags(ui, revmap, data):
299 299 newdata = []
300 300 for line in data.splitlines():
301 301 try:
302 302 id, name = line.split(' ', 1)
303 303 except ValueError:
304 304 ui.warn(_('skipping incorrectly formatted tag %s\n')
305 305 % line)
306 306 continue
307 307 try:
308 308 newid = node.bin(id)
309 309 except TypeError:
310 310 ui.warn(_('skipping incorrectly formatted id %s\n')
311 311 % id)
312 312 continue
313 313 try:
314 314 newdata.append('%s %s\n' % (node.hex(revmap[newid]),
315 315 name))
316 316 except KeyError:
317 317 ui.warn(_('no mapping for id %s\n') % id)
318 318 continue
319 319 return ''.join(newdata)
320 320
321 321 def _islfile(file, ctx, matcher, size):
322 322 '''Return true if file should be considered a largefile, i.e.
323 323 matcher matches it or it is larger than size.'''
324 324 # never store special .hg* files as largefiles
325 325 if file == '.hgtags' or file == '.hgignore' or file == '.hgsigs':
326 326 return False
327 327 if matcher and matcher(file):
328 328 return True
329 329 try:
330 330 return ctx.filectx(file).size() >= size * 1024 * 1024
331 331 except error.LookupError:
332 332 return False
333 333
334 334 def uploadlfiles(ui, rsrc, rdst, files):
335 335 '''upload largefiles to the central store'''
336 336
337 337 if not files:
338 338 return
339 339
340 340 store = basestore._openstore(rsrc, rdst, put=True)
341 341
342 342 at = 0
343 343 ui.debug("sending statlfile command for %d largefiles\n" % len(files))
344 344 retval = store.exists(files)
345 345 files = filter(lambda h: not retval[h], files)
346 346 ui.debug("%d largefiles need to be uploaded\n" % len(files))
347 347
348 348 for hash in files:
349 349 ui.progress(_('uploading largefiles'), at, unit='largefile',
350 350 total=len(files))
351 351 source = lfutil.findfile(rsrc, hash)
352 352 if not source:
353 353 raise error.Abort(_('largefile %s missing from store'
354 354 ' (needs to be uploaded)') % hash)
355 355 # XXX check for errors here
356 356 store.put(source, hash)
357 357 at += 1
358 358 ui.progress(_('uploading largefiles'), None)
359 359
360 360 def verifylfiles(ui, repo, all=False, contents=False):
361 361 '''Verify that every largefile revision in the current changeset
362 362 exists in the central store. With --contents, also verify that
363 363 the contents of each local largefile file revision are correct (SHA-1 hash
364 364 matches the revision ID). With --all, check every changeset in
365 365 this repository.'''
366 366 if all:
367 367 revs = repo.revs('all()')
368 368 else:
369 369 revs = ['.']
370 370
371 371 store = basestore._openstore(repo)
372 372 return store.verify(revs, contents=contents)
373 373
374 374 def cachelfiles(ui, repo, node, filelist=None):
375 375 '''cachelfiles ensures that all largefiles needed by the specified revision
376 376 are present in the repository's largefile cache.
377 377
378 378 returns a tuple (cached, missing). cached is the list of files downloaded
379 379 by this operation; missing is the list of files that were needed but could
380 380 not be found.'''
381 381 lfiles = lfutil.listlfiles(repo, node)
382 382 if filelist:
383 383 lfiles = set(lfiles) & set(filelist)
384 384 toget = []
385 385
386 386 for lfile in lfiles:
387 387 try:
388 388 expectedhash = repo[node][lfutil.standin(lfile)].data().strip()
389 389 except IOError as err:
390 390 if err.errno == errno.ENOENT:
391 391 continue # node must be None and standin wasn't found in wctx
392 392 raise
393 393 if not lfutil.findfile(repo, expectedhash):
394 394 toget.append((lfile, expectedhash))
395 395
396 396 if toget:
397 397 store = basestore._openstore(repo)
398 398 ret = store.get(toget)
399 399 return ret
400 400
401 401 return ([], [])
402 402
403 403 def downloadlfiles(ui, repo, rev=None):
404 404 matchfn = scmutil.match(repo[None],
405 405 [repo.wjoin(lfutil.shortname)], {})
406 406 def prepare(ctx, fns):
407 407 pass
408 408 totalsuccess = 0
409 409 totalmissing = 0
410 410 if rev != []: # walkchangerevs on empty list would return all revs
411 411 for ctx in cmdutil.walkchangerevs(repo, matchfn, {'rev' : rev},
412 412 prepare):
413 413 success, missing = cachelfiles(ui, repo, ctx.node())
414 414 totalsuccess += len(success)
415 415 totalmissing += len(missing)
416 416 ui.status(_("%d additional largefiles cached\n") % totalsuccess)
417 417 if totalmissing > 0:
418 418 ui.status(_("%d largefiles failed to download\n") % totalmissing)
419 419 return totalsuccess, totalmissing
420 420
421 421 def updatelfiles(ui, repo, filelist=None, printmessage=None,
422 422 normallookup=False):
423 423 '''Update largefiles according to standins in the working directory
424 424
425 425 If ``printmessage`` is other than ``None``, it means "print (or
426 426 ignore, for false) message forcibly".
427 427 '''
428 428 statuswriter = lfutil.getstatuswriter(ui, repo, printmessage)
429 wlock = repo.wlock()
430 try:
429 with repo.wlock():
431 430 lfdirstate = lfutil.openlfdirstate(ui, repo)
432 431 lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate)
433 432
434 433 if filelist is not None:
435 434 filelist = set(filelist)
436 435 lfiles = [f for f in lfiles if f in filelist]
437 436
438 437 update = {}
439 438 updated, removed = 0, 0
440 439 for lfile in lfiles:
441 440 abslfile = repo.wjoin(lfile)
442 441 abslfileorig = scmutil.origpath(ui, repo, abslfile)
443 442 absstandin = repo.wjoin(lfutil.standin(lfile))
444 443 absstandinorig = scmutil.origpath(ui, repo, absstandin)
445 444 if os.path.exists(absstandin):
446 445 if (os.path.exists(absstandinorig) and
447 446 os.path.exists(abslfile)):
448 447 shutil.copyfile(abslfile, abslfileorig)
449 448 util.unlinkpath(absstandinorig)
450 449 expecthash = lfutil.readstandin(repo, lfile)
451 450 if expecthash != '':
452 451 if lfile not in repo[None]: # not switched to normal file
453 452 util.unlinkpath(abslfile, ignoremissing=True)
454 453 # use normallookup() to allocate an entry in largefiles
455 454 # dirstate to prevent lfilesrepo.status() from reporting
456 455 # missing files as removed.
457 456 lfdirstate.normallookup(lfile)
458 457 update[lfile] = expecthash
459 458 else:
460 459 # Remove lfiles for which the standin is deleted, unless the
461 460 # lfile is added to the repository again. This happens when a
462 461 # largefile is converted back to a normal file: the standin
463 462 # disappears, but a new (normal) file appears as the lfile.
464 463 if (os.path.exists(abslfile) and
465 464 repo.dirstate.normalize(lfile) not in repo[None]):
466 465 util.unlinkpath(abslfile)
467 466 removed += 1
468 467
469 468 # largefile processing might be slow and be interrupted - be prepared
470 469 lfdirstate.write()
471 470
472 471 if lfiles:
473 472 statuswriter(_('getting changed largefiles\n'))
474 473 cachelfiles(ui, repo, None, lfiles)
475 474
476 475 for lfile in lfiles:
477 476 update1 = 0
478 477
479 478 expecthash = update.get(lfile)
480 479 if expecthash:
481 480 if not lfutil.copyfromcache(repo, expecthash, lfile):
482 481 # failed ... but already removed and set to normallookup
483 482 continue
484 483 # Synchronize largefile dirstate to the last modified
485 484 # time of the file
486 485 lfdirstate.normal(lfile)
487 486 update1 = 1
488 487
489 488 # copy the state of largefile standin from the repository's
490 489 # dirstate to its state in the lfdirstate.
491 490 abslfile = repo.wjoin(lfile)
492 491 absstandin = repo.wjoin(lfutil.standin(lfile))
493 492 if os.path.exists(absstandin):
494 493 mode = os.stat(absstandin).st_mode
495 494 if mode != os.stat(abslfile).st_mode:
496 495 os.chmod(abslfile, mode)
497 496 update1 = 1
498 497
499 498 updated += update1
500 499
501 500 lfutil.synclfdirstate(repo, lfdirstate, lfile, normallookup)
502 501
503 502 lfdirstate.write()
504 503 if lfiles:
505 504 statuswriter(_('%d largefiles updated, %d removed\n') % (updated,
506 505 removed))
507 finally:
508 wlock.release()
509 506
510 507 @command('lfpull',
511 508 [('r', 'rev', [], _('pull largefiles for these revisions'))
512 509 ] + commands.remoteopts,
513 510 _('-r REV... [-e CMD] [--remotecmd CMD] [SOURCE]'))
514 511 def lfpull(ui, repo, source="default", **opts):
515 512 """pull largefiles for the specified revisions from the specified source
516 513
517 514 Pull largefiles that are referenced from local changesets but missing
518 515 locally, pulling from a remote repository to the local cache.
519 516
520 517 If SOURCE is omitted, the 'default' path will be used.
521 518 See :hg:`help urls` for more information.
522 519
523 520 .. container:: verbose
524 521
525 522 Some examples:
526 523
527 524 - pull largefiles for all branch heads::
528 525
529 526 hg lfpull -r "head() and not closed()"
530 527
531 528 - pull largefiles on the default branch::
532 529
533 530 hg lfpull -r "branch(default)"
534 531 """
535 532 repo.lfpullsource = source
536 533
537 534 revs = opts.get('rev', [])
538 535 if not revs:
539 536 raise error.Abort(_('no revisions specified'))
540 537 revs = scmutil.revrange(repo, revs)
541 538
542 539 numcached = 0
543 540 for rev in revs:
544 541 ui.note(_('pulling largefiles for revision %s\n') % rev)
545 542 (cached, missing) = cachelfiles(ui, repo, rev)
546 543 numcached += len(cached)
547 544 ui.status(_("%d largefiles cached\n") % numcached)
General Comments 0
You need to be logged in to leave comments. Login now