##// END OF EJS Templates
Minor cleanups.
Bryan O'Sullivan -
r1200:333de1d5 default
parent child Browse files
Show More
@@ -356,35 +356,31 b' else:'
356 class chunkbuffer(object):
356 class chunkbuffer(object):
357 """Allow arbitrary sized chunks of data to be efficiently read from an
357 """Allow arbitrary sized chunks of data to be efficiently read from an
358 iterator over chunks of arbitrary size."""
358 iterator over chunks of arbitrary size."""
359
359 def __init__(self, in_iter, targetsize = 2**16):
360 def __init__(self, in_iter, targetsize = 2**16):
360 """in_iter is the iterator that's iterating over the input chunks.
361 """in_iter is the iterator that's iterating over the input chunks.
361 targetsize is how big a buffer to try to maintain."""
362 targetsize is how big a buffer to try to maintain."""
362 self.in_iter = iter(in_iter)
363 self.in_iter = iter(in_iter)
363 self.buf = ''
364 self.buf = ''
364 targetsize = int(targetsize)
365 if (targetsize <= 0):
366 raise ValueError("targetsize must be greater than 0, was %d" % targetsize)
367 self.targetsize = int(targetsize)
365 self.targetsize = int(targetsize)
366 if self.targetsize <= 0:
367 raise ValueError("targetsize must be greater than 0, was %d" %
368 targetsize)
368 self.iterempty = False
369 self.iterempty = False
370
369 def fillbuf(self):
371 def fillbuf(self):
370 """x.fillbuf()
372 """Ignore target size; read every chunk from iterator until empty."""
371
372 Ignore the target size, and just read every chunk from the iterator
373 until it's empty."""
374 if not self.iterempty:
373 if not self.iterempty:
375 collector = cStringIO.StringIO()
374 collector = cStringIO.StringIO()
376 collector.write(self.buf)
375 collector.write(self.buf)
377 for ch in self.in_iter:
376 for ch in self.in_iter:
378 collector.write(ch)
377 collector.write(ch)
379 self.buf = collector.getvalue()
378 self.buf = collector.getvalue()
380 collector.close()
381 collector = None
382 self.iterempty = True
379 self.iterempty = True
383
380
384 def read(self, l):
381 def read(self, l):
385 """x.read(l) -> str
382 """Read L bytes of data from the iterator of chunks of data.
386 Read l bytes of data from the iterator of chunks of data. Returns less
383 Returns less than L bytes if the iterator runs dry."""
387 than l bytes if the iterator runs dry."""
388 if l > len(self.buf) and not self.iterempty:
384 if l > len(self.buf) and not self.iterempty:
389 # Clamp to a multiple of self.targetsize
385 # Clamp to a multiple of self.targetsize
390 targetsize = self.targetsize * ((l // self.targetsize) + 1)
386 targetsize = self.targetsize * ((l // self.targetsize) + 1)
@@ -399,23 +395,15 b' class chunkbuffer(object):'
399 if collected < targetsize:
395 if collected < targetsize:
400 self.iterempty = True
396 self.iterempty = True
401 self.buf = collector.getvalue()
397 self.buf = collector.getvalue()
402 collector.close()
398 s, self.buf = self.buf[:l], buffer(self.buf, l)
403 collector = None
404 s = self.buf[:l]
405 self.buf = buffer(self.buf, l)
406 return s
399 return s
407 def __repr__(self):
408 return "<%s.%s targetsize = %u buffered = %u bytes>" % \
409 (self.__class__.__module__, self.__class__.__name__,
410 self.targetsize, len(self.buf))
411
400
412 def filechunkiter(f, size = 65536):
401 def filechunkiter(f, size = 65536):
413 """filechunkiter(file[, size]) -> generator
402 """Create a generator that produces all the data in the file size
414
403 (default 65536) bytes at a time. Chunks may be less than size
415 Create a generator that produces all the data in the file size (default
404 bytes if the chunk is the last chunk in the file, or the file is a
416 65536) bytes at a time. Chunks may be less than size bytes if the
405 socket or some other type of file that sometimes reads less data
417 chunk is the last chunk in the file, or the file is a socket or some
406 than is requested."""
418 other type of file that sometimes reads less data than is requested."""
419 s = f.read(size)
407 s = f.read(size)
420 while len(s) >= 0:
408 while len(s) >= 0:
421 yield s
409 yield s
General Comments 0
You need to be logged in to leave comments. Login now