Skip to content

Commit

Permalink
Fix read chunk compaction performer null pointer exception (apache#11960
Browse files Browse the repository at this point in the history
)
  • Loading branch information
shuwenwei authored Jan 24, 2024
1 parent c52da2b commit 39b5893
Show file tree
Hide file tree
Showing 2 changed files with 53 additions and 1 deletion.
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,6 @@ private void collectValueColumnSchemaList() throws IOException {
chunkHeader.getCompressionType());
measurementSchemaMap.put(chunkMetadata.getMeasurementUid(), schema);
}
break;
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -277,6 +277,59 @@ public void testSimpleCompactionWithNotExistColumnByFlushChunk()
CompactionCheckerUtils.readFiles(Collections.singletonList(targetResource)));
}

@Test
public void testSimpleCompactionWithNullColumn()
throws IOException, StorageEngineException, InterruptedException, MetadataException,
PageException {
TsFileResource seqResource1 = createEmptyFileAndResource(true);
try (CompactionTestFileWriter writer = new CompactionTestFileWriter(seqResource1)) {
writer.startChunkGroup("d0");
writer.generateSimpleAlignedSeriesToCurrentDeviceWithNullValue(
Arrays.asList("s0", "s1", "s2"),
new TimeRange[] {new TimeRange(100000, 200000)},
TSEncoding.PLAIN,
CompressionType.LZ4,
Arrays.asList(false, false, true));
writer.generateSimpleAlignedSeriesToCurrentDeviceWithNullValue(
Arrays.asList("s0", "s1", "s2"),
new TimeRange[] {new TimeRange(300000, 500000)},
TSEncoding.PLAIN,
CompressionType.LZ4,
Arrays.asList(false, false, false));
writer.endChunkGroup();
writer.endFile();
}
seqResources.add(seqResource1);

TsFileResource seqResource2 =
generateSingleAlignedSeriesFile(
"d0",
Arrays.asList("s0", "s1"),
new TimeRange[] {new TimeRange(600000, 700000), new TimeRange(800000, 900000)},
TSEncoding.PLAIN,
CompressionType.LZ4,
Arrays.asList(false, false),
true);
seqResources.add(seqResource2);

tsFileManager.addAll(seqResources, true);
TsFileResource targetResource =
TsFileNameGenerator.getInnerCompactionTargetFileResource(seqResources, true);

ReadChunkCompactionPerformer performer = new ReadChunkCompactionPerformer();
CompactionTaskSummary summary = new CompactionTaskSummary();
performer.setSummary(summary);
performer.setSourceFiles(seqResources);
performer.setTargetFiles(Collections.singletonList(targetResource));
performer.perform();
CompactionUtils.moveTargetFile(
Collections.singletonList(targetResource), true, COMPACTION_TEST_SG);
TsFileResourceUtils.validateTsFileDataCorrectness(targetResource);
Assert.assertEquals(
CompactionCheckerUtils.readFiles(seqResources),
CompactionCheckerUtils.readFiles(Collections.singletonList(targetResource)));
}

@Test
public void testSimpleCompactionWithPartialDeletedColumnByFlushChunk()
throws IOException, StorageEngineException, InterruptedException, MetadataException,
Expand Down

0 comments on commit 39b5893

Please sign in to comment.