From e81328280cb2eee2bdfddadbe537249b525f7e73 Mon Sep 17 00:00:00 2001 From: Gabor Szadovszky Date: Mon, 4 Mar 2024 17:42:33 +0100 Subject: [PATCH] Add comments about the tested codecs --- .../java/org/apache/parquet/hadoop/TestParquetWriterError.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestParquetWriterError.java b/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestParquetWriterError.java index f282724f75..89873b4972 100644 --- a/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestParquetWriterError.java +++ b/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestParquetWriterError.java @@ -157,6 +157,7 @@ public void release(ByteBuffer b) { } public static void main(String[] args) throws Throwable { + // Codecs supported by the direct codec factory by default (without specific hadoop native libs) CompressionCodecName[] codecs = { CompressionCodecName.UNCOMPRESSED, CompressionCodecName.GZIP, @@ -173,6 +174,7 @@ public static void main(String[] args) throws Throwable { .withAllocator(allocator) .withCodecFactory(CodecFactory.createDirectCodecFactory( new Configuration(), allocator, ParquetProperties.DEFAULT_PAGE_SIZE)) + // Also validating the different direct codecs which might also have issues if an OOM happens .withCompressionCodec(codecs[RANDOM.nextInt(codecs.length)]) .build()) { for (int i = 0; i < 100_000; ++i) {