handling BlockingIOError when writing a gzip stream in python 3 -
i want create gzip compressed stream via python 3 io libs. after compression want split stream in arbitrary sized chunks (later concatenated, cut not need respect compression blocks).
however not understand how handle blockingioerror , why bufferedwriter calls write after gzipfile writes header resulting in 10 byte call instead of full buffer_size buffer.
#!/usr/local/bin/python3 import sys import io import gzip class mybuf(io.rawiobase): def write(self,b): print("we got chunk of length %s" % len(b)) def writable(self): return true um = io.bufferedwriter(mybuf(), buffer_size=1024) outf = gzip.gzipfile(fileobj=um, mode='w') bw = io.textiowrapper(outf) wrapped_stdin = io.bufferedreader(sys.stdin) line in wrapped_stdin: l = str.rstrip( line) try: bw.write(l) except blockingioerror: print("why happening?") um.flush() #raises blockingioerror
Comments
Post a Comment