Show More
@@ -888,21 +888,7 b' class localrepository:' | |||||
888 | return remote.addchangegroup(cg) |
|
888 | return remote.addchangegroup(cg) | |
889 |
|
889 | |||
890 | def changegroup(self, basenodes): |
|
890 | def changegroup(self, basenodes): | |
891 | class genread: |
|
891 | genread = util.chunkbuffer | |
892 | def __init__(self, generator): |
|
|||
893 | self.g = generator |
|
|||
894 | self.buf = "" |
|
|||
895 | def fillbuf(self): |
|
|||
896 | self.buf += "".join(self.g) |
|
|||
897 |
|
||||
898 | def read(self, l): |
|
|||
899 | while l > len(self.buf): |
|
|||
900 | try: |
|
|||
901 | self.buf += self.g.next() |
|
|||
902 | except StopIteration: |
|
|||
903 | break |
|
|||
904 | d, self.buf = self.buf[:l], self.buf[l:] |
|
|||
905 | return d |
|
|||
906 |
|
892 | |||
907 | def gengroup(): |
|
893 | def gengroup(): | |
908 | nodes = self.newer(basenodes) |
|
894 | nodes = self.newer(basenodes) |
@@ -12,7 +12,7 b' platform-specific details from the core.' | |||||
12 |
|
12 | |||
13 | import os, errno |
|
13 | import os, errno | |
14 | from demandload import * |
|
14 | from demandload import * | |
15 | demandload(globals(), "re") |
|
15 | demandload(globals(), "re cStringIO") | |
16 |
|
16 | |||
17 | def binary(s): |
|
17 | def binary(s): | |
18 | """return true if a string is binary data using diff's heuristic""" |
|
18 | """return true if a string is binary data using diff's heuristic""" | |
@@ -352,3 +352,71 b' else:' | |||||
352 | val = os.WSTOPSIG(code) |
|
352 | val = os.WSTOPSIG(code) | |
353 | return "stopped by signal %d" % val, val |
|
353 | return "stopped by signal %d" % val, val | |
354 | raise ValueError("invalid exit code") |
|
354 | raise ValueError("invalid exit code") | |
|
355 | ||||
|
356 | class chunkbuffer(object): | |||
|
357 | """Allow arbitrary sized chunks of data to be efficiently read from an | |||
|
358 | iterator over chunks of arbitrary size.""" | |||
|
359 | def __init__(self, in_iter, targetsize = 2**16): | |||
|
360 | """in_iter is the iterator that's iterating over the input chunks. | |||
|
361 | targetsize is how big a buffer to try to maintain.""" | |||
|
362 | self.in_iter = iter(in_iter) | |||
|
363 | self.buf = '' | |||
|
364 | targetsize = int(targetsize) | |||
|
365 | if (targetsize <= 0): | |||
|
366 | raise ValueError("targetsize must be greater than 0, was %d" % targetsize) | |||
|
367 | self.targetsize = int(targetsize) | |||
|
368 | self.iterempty = False | |||
|
369 | def fillbuf(self): | |||
|
370 | """x.fillbuf() | |||
|
371 | ||||
|
372 | Ignore the target size, and just read every chunk from the iterator | |||
|
373 | until it's empty.""" | |||
|
374 | if not self.iterempty: | |||
|
375 | collector = cStringIO.StringIO() | |||
|
376 | collector.write(self.buf) | |||
|
377 | for ch in self.in_iter: | |||
|
378 | collector.write(ch) | |||
|
379 | self.buf = collector.getvalue() | |||
|
380 | collector.close() | |||
|
381 | collector = None | |||
|
382 | self.iterempty = True | |||
|
383 | ||||
|
384 | def read(self, l): | |||
|
385 | """x.read(l) -> str | |||
|
386 | Read l bytes of data from the iterator of chunks of data. Returns less | |||
|
387 | than l bytes if the iterator runs dry.""" | |||
|
388 | if l > len(self.buf) and not self.iterempty: | |||
|
389 | # Clamp to a multiple of self.targetsize | |||
|
390 | targetsize = self.targetsize * ((l // self.targetsize) + 1) | |||
|
391 | collector = cStringIO.StringIO() | |||
|
392 | collector.write(self.buf) | |||
|
393 | collected = len(self.buf) | |||
|
394 | for chunk in self.in_iter: | |||
|
395 | collector.write(chunk) | |||
|
396 | collected += len(chunk) | |||
|
397 | if collected >= targetsize: | |||
|
398 | break | |||
|
399 | if collected < targetsize: | |||
|
400 | self.iterempty = True | |||
|
401 | self.buf = collector.getvalue() | |||
|
402 | collector.close() | |||
|
403 | collector = None | |||
|
404 | s = self.buf[:l] | |||
|
405 | self.buf = buffer(self.buf, l) | |||
|
406 | return s | |||
|
407 | def __repr__(self): | |||
|
408 | return "<%s.%s targetsize = %u buffered = %u bytes>" % \ | |||
|
409 | (self.__class__.__module__, self.__class__.__name__, | |||
|
410 | self.targetsize, len(self.buf)) | |||
|
411 | ||||
|
412 | def filechunkiter(f, size = 65536): | |||
|
413 | """filechunkiter(file[, size]) -> generator | |||
|
414 | ||||
|
415 | Create a generator that produces all the data in the file size (default | |||
|
416 | 65536) bytes at a time. Chunks may be less than size bytes if the | |||
|
417 | chunk is the last chunk in the file, or the file is a socket or some | |||
|
418 | other type of file that sometimes reads less data than is requested.""" | |||
|
419 | s = f.read(size) | |||
|
420 | while len(s) >= 0: | |||
|
421 | yield s | |||
|
422 | s = f.read(size) |
General Comments 0
You need to be logged in to leave comments.
Login now