##// END OF EJS Templates
util: increase filechunkiter size to 128k...
Mads Kiilerich -
r30181:7356e6b1 default
parent child Browse files
Show More
@@ -372,7 +372,7 b' def hashfile(file):'
372 return ''
372 return ''
373 hasher = hashlib.sha1('')
373 hasher = hashlib.sha1('')
374 with open(file, 'rb') as fd:
374 with open(file, 'rb') as fd:
375 for data in util.filechunkiter(fd, 128 * 1024):
375 for data in util.filechunkiter(fd):
376 hasher.update(data)
376 hasher.update(data)
377 return hasher.hexdigest()
377 return hasher.hexdigest()
378
378
@@ -1356,7 +1356,7 b' def overridecat(orig, ui, repo, file1, *'
1356 'downloaded') % lf)
1356 'downloaded') % lf)
1357 path = lfutil.usercachepath(repo.ui, hash)
1357 path = lfutil.usercachepath(repo.ui, hash)
1358 with open(path, "rb") as fpin:
1358 with open(path, "rb") as fpin:
1359 for chunk in util.filechunkiter(fpin, 128 * 1024):
1359 for chunk in util.filechunkiter(fpin):
1360 fp.write(chunk)
1360 fp.write(chunk)
1361 err = 0
1361 err = 0
1362 return err
1362 return err
@@ -134,7 +134,7 b' def wirereposetup(ui, repo):'
134 length))
134 length))
135
135
136 # SSH streams will block if reading more than length
136 # SSH streams will block if reading more than length
137 for chunk in util.filechunkiter(stream, 128 * 1024, length):
137 for chunk in util.filechunkiter(stream, limit=length):
138 yield chunk
138 yield chunk
139 # HTTP streams must hit the end to process the last empty
139 # HTTP streams must hit the end to process the last empty
140 # chunk of Chunked-Encoding so the connection can be reused.
140 # chunk of Chunked-Encoding so the connection can be reused.
@@ -1684,9 +1684,9 b' class chunkbuffer(object):'
1684
1684
1685 return ''.join(buf)
1685 return ''.join(buf)
1686
1686
1687 def filechunkiter(f, size=65536, limit=None):
1687 def filechunkiter(f, size=131072, limit=None):
1688 """Create a generator that produces the data in the file size
1688 """Create a generator that produces the data in the file size
1689 (default 65536) bytes at a time, up to optional limit (default is
1689 (default 131072) bytes at a time, up to optional limit (default is
1690 to read all data). Chunks may be less than size bytes if the
1690 to read all data). Chunks may be less than size bytes if the
1691 chunk is the last chunk in the file, or the file is a socket or
1691 chunk is the last chunk in the file, or the file is a socket or
1692 some other type of file that sometimes reads less data than is
1692 some other type of file that sometimes reads less data than is
@@ -11,7 +11,7 b' Test how largefiles abort in case the di'
11 > shutil.copyfileobj = copyfileobj
11 > shutil.copyfileobj = copyfileobj
12 > #
12 > #
13 > # this makes the rewritten code abort:
13 > # this makes the rewritten code abort:
14 > def filechunkiter(f, size=65536, limit=None):
14 > def filechunkiter(f, size=131072, limit=None):
15 > yield f.read(4)
15 > yield f.read(4)
16 > raise IOError(errno.ENOSPC, os.strerror(errno.ENOSPC))
16 > raise IOError(errno.ENOSPC, os.strerror(errno.ENOSPC))
17 > util.filechunkiter = filechunkiter
17 > util.filechunkiter = filechunkiter
General Comments 0
You need to be logged in to leave comments. Login now