zenfenan commented on a change in pull request #11307: [FLINK-16371] [BulkWriter] Fix Hadoop Compression BulkWriter URL: https://github.com/apache/flink/pull/11307#discussion_r390087719
########## File path: flink-formats/flink-compress/src/main/java/org/apache/flink/formats/compress/writers/HadoopCompressionBulkWriter.java ########## @@ -19,48 +19,40 @@ package org.apache.flink.formats.compress.writers; import org.apache.flink.api.common.serialization.BulkWriter; -import org.apache.flink.core.fs.FSDataOutputStream; import org.apache.flink.formats.compress.extractor.Extractor; -import org.apache.hadoop.io.compress.CompressionCodec; import org.apache.hadoop.io.compress.CompressionOutputStream; import java.io.IOException; /** - * A {@link BulkWriter} implementation that compresses data using Hadoop codecs. + * A {@link BulkWriter} implementation that writes data that have been + * compressed using Hadoop {@link org.apache.hadoop.io.compress.CompressionCodec}. * * @param <T> The type of element to write. */ public class HadoopCompressionBulkWriter<T> implements BulkWriter<T> { private Extractor<T> extractor; - private FSDataOutputStream outputStream; - private CompressionOutputStream compressor; + private CompressionOutputStream out; - public HadoopCompressionBulkWriter( - FSDataOutputStream outputStream, - Extractor<T> extractor, - CompressionCodec compressionCodec) throws Exception { - this.outputStream = outputStream; + public HadoopCompressionBulkWriter(CompressionOutputStream out, Extractor<T> extractor) { + this.out = out; Review comment: Done. ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: us...@infra.apache.org With regards, Apache Git Services