##// END OF EJS Templates
largefiles: use context for file closing...
Mads Kiilerich -
r30142:3dcaf1c4 default
parent child Browse files
Show More
@@ -91,15 +91,13 b' class basestore(object):'
91 storefilename = lfutil.storepath(self.repo, hash)
91 storefilename = lfutil.storepath(self.repo, hash)
92
92
93 tmpname = storefilename + '.tmp'
93 tmpname = storefilename + '.tmp'
94 tmpfile = util.atomictempfile(tmpname,
94 with util.atomictempfile(tmpname,
95 createmode=self.repo.store.createmode)
95 createmode=self.repo.store.createmode) as tmpfile:
96
96 try:
97 try:
97 gothash = self._getfile(tmpfile, filename, hash)
98 gothash = self._getfile(tmpfile, filename, hash)
98 except StoreError as err:
99 except StoreError as err:
99 self.ui.warn(err.longmessage())
100 self.ui.warn(err.longmessage())
100 gothash = ""
101 gothash = ""
102 tmpfile.close()
103
101
104 if gothash != hash:
102 if gothash != hash:
105 if gothash != "":
103 if gothash != "":
@@ -54,10 +54,10 b' def link(src, dest):'
54 util.oslink(src, dest)
54 util.oslink(src, dest)
55 except OSError:
55 except OSError:
56 # if hardlinks fail, fallback on atomic copy
56 # if hardlinks fail, fallback on atomic copy
57 dst = util.atomictempfile(dest)
57 with open(src, 'rb') as srcf:
58 for chunk in util.filechunkiter(open(src, 'rb')):
58 with util.atomictempfile(dest) as dstf:
59 dst.write(chunk)
59 for chunk in util.filechunkiter(srcf):
60 dst.close()
60 dstf.write(chunk)
61 os.chmod(dest, os.stat(src).st_mode)
61 os.chmod(dest, os.stat(src).st_mode)
62
62
63 def usercachepath(ui, hash):
63 def usercachepath(ui, hash):
@@ -264,11 +264,11 b' def copytostoreabsolute(repo, file, hash'
264 link(usercachepath(repo.ui, hash), storepath(repo, hash))
264 link(usercachepath(repo.ui, hash), storepath(repo, hash))
265 else:
265 else:
266 util.makedirs(os.path.dirname(storepath(repo, hash)))
266 util.makedirs(os.path.dirname(storepath(repo, hash)))
267 dst = util.atomictempfile(storepath(repo, hash),
267 with open(file, 'rb') as srcf:
268 createmode=repo.store.createmode)
268 with util.atomictempfile(storepath(repo, hash),
269 for chunk in util.filechunkiter(open(file, 'rb')):
269 createmode=repo.store.createmode) as dstf:
270 dst.write(chunk)
270 for chunk in util.filechunkiter(srcf):
271 dst.close()
271 dstf.write(chunk)
272 linktousercache(repo, hash)
272 linktousercache(repo, hash)
273
273
274 def linktousercache(repo, hash):
274 def linktousercache(repo, hash):
@@ -370,10 +370,9 b' def hashfile(file):'
370 if not os.path.exists(file):
370 if not os.path.exists(file):
371 return ''
371 return ''
372 hasher = hashlib.sha1('')
372 hasher = hashlib.sha1('')
373 fd = open(file, 'rb')
373 with open(file, 'rb') as fd:
374 for data in util.filechunkiter(fd, 128 * 1024):
374 for data in util.filechunkiter(fd, 128 * 1024):
375 hasher.update(data)
375 hasher.update(data)
376 fd.close()
377 return hasher.hexdigest()
376 return hasher.hexdigest()
378
377
379 def getexecutable(filename):
378 def getexecutable(filename):
@@ -883,11 +883,8 b' def hgclone(orig, ui, opts, *args, **kwa'
883
883
884 # If largefiles is required for this repo, permanently enable it locally
884 # If largefiles is required for this repo, permanently enable it locally
885 if 'largefiles' in repo.requirements:
885 if 'largefiles' in repo.requirements:
886 fp = repo.vfs('hgrc', 'a', text=True)
886 with repo.vfs('hgrc', 'a', text=True) as fp:
887 try:
888 fp.write('\n[extensions]\nlargefiles=\n')
887 fp.write('\n[extensions]\nlargefiles=\n')
889 finally:
890 fp.close()
891
888
892 # Caching is implicitly limited to 'rev' option, since the dest repo was
889 # Caching is implicitly limited to 'rev' option, since the dest repo was
893 # truncated at that point. The user may expect a download count with
890 # truncated at that point. The user may expect a download count with
@@ -1339,30 +1336,28 b' def overridecat(orig, ui, repo, file1, *'
1339 m.visitdir = lfvisitdirfn
1336 m.visitdir = lfvisitdirfn
1340
1337
1341 for f in ctx.walk(m):
1338 for f in ctx.walk(m):
1342 fp = cmdutil.makefileobj(repo, opts.get('output'), ctx.node(),
1339 with cmdutil.makefileobj(repo, opts.get('output'), ctx.node(),
1343 pathname=f)
1340 pathname=f) as fp:
1344 lf = lfutil.splitstandin(f)
1341 lf = lfutil.splitstandin(f)
1345 if lf is None or origmatchfn(f):
1342 if lf is None or origmatchfn(f):
1346 # duplicating unreachable code from commands.cat
1343 # duplicating unreachable code from commands.cat
1347 data = ctx[f].data()
1344 data = ctx[f].data()
1348 if opts.get('decode'):
1345 if opts.get('decode'):
1349 data = repo.wwritedata(f, data)
1346 data = repo.wwritedata(f, data)
1350 fp.write(data)
1347 fp.write(data)
1351 else:
1348 else:
1352 hash = lfutil.readstandin(repo, lf, ctx.rev())
1349 hash = lfutil.readstandin(repo, lf, ctx.rev())
1353 if not lfutil.inusercache(repo.ui, hash):
1350 if not lfutil.inusercache(repo.ui, hash):
1354 store = storefactory.openstore(repo)
1351 store = storefactory.openstore(repo)
1355 success, missing = store.get([(lf, hash)])
1352 success, missing = store.get([(lf, hash)])
1356 if len(success) != 1:
1353 if len(success) != 1:
1357 raise error.Abort(
1354 raise error.Abort(
1358 _('largefile %s is not in cache and could not be '
1355 _('largefile %s is not in cache and could not be '
1359 'downloaded') % lf)
1356 'downloaded') % lf)
1360 path = lfutil.usercachepath(repo.ui, hash)
1357 path = lfutil.usercachepath(repo.ui, hash)
1361 fpin = open(path, "rb")
1358 with open(path, "rb") as fpin:
1362 for chunk in util.filechunkiter(fpin, 128 * 1024):
1359 for chunk in util.filechunkiter(fpin, 128 * 1024):
1363 fp.write(chunk)
1360 fp.write(chunk)
1364 fpin.close()
1365 fp.close()
1366 err = 0
1361 err = 0
1367 return err
1362 return err
1368
1363
@@ -45,17 +45,13 b' class remotestore(basestore.basestore):'
45
45
46 def sendfile(self, filename, hash):
46 def sendfile(self, filename, hash):
47 self.ui.debug('remotestore: sendfile(%s, %s)\n' % (filename, hash))
47 self.ui.debug('remotestore: sendfile(%s, %s)\n' % (filename, hash))
48 fd = None
49 try:
48 try:
50 fd = lfutil.httpsendfile(self.ui, filename)
49 with lfutil.httpsendfile(self.ui, filename) as fd:
51 return self._put(hash, fd)
50 return self._put(hash, fd)
52 except IOError as e:
51 except IOError as e:
53 raise error.Abort(
52 raise error.Abort(
54 _('remotestore: could not open file %s: %s')
53 _('remotestore: could not open file %s: %s')
55 % (filename, str(e)))
54 % (filename, str(e)))
56 finally:
57 if fd:
58 fd.close()
59
55
60 def _getfile(self, tmpfile, filename, hash):
56 def _getfile(self, tmpfile, filename, hash):
61 try:
57 try:
@@ -499,6 +499,12 b' class _unclosablefile(object):'
499 def __getattr__(self, attr):
499 def __getattr__(self, attr):
500 return getattr(self._fp, attr)
500 return getattr(self._fp, attr)
501
501
502 def __enter__(self):
503 return self
504
505 def __exit__(self, exc_type, exc_value, exc_tb):
506 pass
507
502 def makefileobj(repo, pat, node=None, desc=None, total=None,
508 def makefileobj(repo, pat, node=None, desc=None, total=None,
503 seqno=None, revwidth=None, mode='wb', modemap=None,
509 seqno=None, revwidth=None, mode='wb', modemap=None,
504 pathname=None):
510 pathname=None):
@@ -58,6 +58,12 b' class httpsendfile(object):'
58 unit=_('kb'), total=self._total)
58 unit=_('kb'), total=self._total)
59 return ret
59 return ret
60
60
61 def __enter__(self):
62 return self
63
64 def __exit__(self, exc_type, exc_val, exc_tb):
65 self.close()
66
61 # moved here from url.py to avoid a cycle
67 # moved here from url.py to avoid a cycle
62 def readauthforuri(ui, uri, user):
68 def readauthforuri(ui, uri, user):
63 # Read configuration
69 # Read configuration
General Comments 0
You need to be logged in to leave comments. Login now