-
Notifications
You must be signed in to change notification settings - Fork 3k
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Fix incorrect partitionValues_parsed with id & name column mapping in Delta Lake #24129
base: master
Are you sure you want to change the base?
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -161,7 +161,7 @@ public void write(CheckpointEntries entries, TrinoOutputFile outputFile) | |
} | ||
List<DeltaLakeColumnHandle> partitionColumns = extractPartitionColumns(entries.metadataEntry(), entries.protocolEntry(), typeManager); | ||
List<RowType.Field> partitionValuesParsedFieldTypes = partitionColumns.stream() | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Let's add a corresponding test in There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Can you share scenarios you want to cover in the class? I intentionally avoided that. Both TestCheckpointWriter & TestCheckpointEntryIterator are not suitable to verify There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I was thinking about a test similar to |
||
.map(column -> RowType.field(column.columnName(), column.type())) | ||
.map(column -> RowType.field(column.basePhysicalColumnName(), column.type())) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Note the missing dashes. |
||
.collect(toImmutableList()); | ||
for (AddFileEntry addFileEntry : entries.addFileEntries()) { | ||
writeAddFileEntry(pageBuilder, addEntryType, addFileEntry, entries.metadataEntry(), entries.protocolEntry(), partitionColumns, partitionValuesParsedFieldTypes, writeStatsAsJson, writeStatsAsStruct); | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -30,14 +30,20 @@ | |
import io.trino.parquet.metadata.FileMetadata; | ||
import io.trino.parquet.metadata.ParquetMetadata; | ||
import io.trino.parquet.reader.MetadataReader; | ||
import io.trino.parquet.reader.ParquetReader; | ||
import io.trino.plugin.deltalake.transactionlog.AddFileEntry; | ||
import io.trino.plugin.deltalake.transactionlog.DeletionVectorEntry; | ||
import io.trino.plugin.deltalake.transactionlog.DeltaLakeSchemaSupport.ColumnMappingMode; | ||
import io.trino.plugin.deltalake.transactionlog.DeltaLakeTransactionLogEntry; | ||
import io.trino.plugin.deltalake.transactionlog.MetadataEntry; | ||
import io.trino.plugin.deltalake.transactionlog.ProtocolEntry; | ||
import io.trino.plugin.deltalake.transactionlog.checkpoint.CheckpointSchemaManager; | ||
import io.trino.plugin.deltalake.transactionlog.statistics.DeltaLakeFileStatistics; | ||
import io.trino.plugin.hive.FileFormatDataSourceStats; | ||
import io.trino.plugin.hive.parquet.TrinoParquetDataSource; | ||
import io.trino.spi.Page; | ||
import io.trino.spi.block.Block; | ||
import io.trino.spi.type.RowType; | ||
import io.trino.spi.type.TimeZoneKey; | ||
import io.trino.testing.AbstractTestQueryFramework; | ||
import io.trino.testing.MaterializedRow; | ||
|
@@ -58,6 +64,7 @@ | |
import java.nio.file.Files; | ||
import java.nio.file.Path; | ||
import java.time.ZoneId; | ||
import java.util.ArrayList; | ||
import java.util.List; | ||
import java.util.Map; | ||
import java.util.Optional; | ||
|
@@ -71,13 +78,16 @@ | |
import static com.google.common.collect.MoreCollectors.onlyElement; | ||
import static com.google.common.io.MoreFiles.deleteRecursively; | ||
import static com.google.common.io.RecursiveDeleteOption.ALLOW_INSECURE; | ||
import static io.trino.parquet.ParquetTestUtils.createParquetReader; | ||
import static io.trino.plugin.deltalake.DeltaTestingConnectorSession.SESSION; | ||
import static io.trino.plugin.deltalake.TestingDeltaLakeUtils.copyDirectoryContents; | ||
import static io.trino.plugin.deltalake.transactionlog.DeltaLakeSchemaSupport.extractPartitionColumns; | ||
import static io.trino.plugin.deltalake.transactionlog.DeltaLakeSchemaSupport.getColumnsMetadata; | ||
import static io.trino.plugin.deltalake.transactionlog.checkpoint.TransactionLogTail.getEntriesFromJson; | ||
import static io.trino.plugin.hive.HiveTestUtils.HDFS_ENVIRONMENT; | ||
import static io.trino.plugin.hive.HiveTestUtils.HDFS_FILE_SYSTEM_STATS; | ||
import static io.trino.testing.TestingNames.randomNameSuffix; | ||
import static io.trino.type.InternalTypeManager.TESTING_TYPE_MANAGER; | ||
import static java.lang.String.format; | ||
import static java.time.ZoneOffset.UTC; | ||
import static org.assertj.core.api.Assertions.assertThat; | ||
|
@@ -270,6 +280,70 @@ private void testAddNestedColumnWithColumnMappingMode(String columnMappingMode) | |
.containsPattern("(delta\\.columnMapping\\.physicalName.*?){11}"); | ||
} | ||
|
||
@Test // regression test for https://github.com/trinodb/trino/issues/24121 | ||
void testPartitionValuesParsedCheckpoint() | ||
throws Exception | ||
{ | ||
testPartitionValuesParsedCheckpoint(ColumnMappingMode.ID); | ||
testPartitionValuesParsedCheckpoint(ColumnMappingMode.NAME); | ||
testPartitionValuesParsedCheckpoint(ColumnMappingMode.NONE); | ||
} | ||
|
||
private void testPartitionValuesParsedCheckpoint(ColumnMappingMode columnMappingMode) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Should we have also product test in |
||
throws Exception | ||
{ | ||
try (TestTable table = new TestTable( | ||
getQueryRunner()::execute, | ||
"test_checkpoint", | ||
"(x int, part int) WITH (checkpoint_interval = 3, column_mapping_mode = '" + columnMappingMode + "', partitioned_by = ARRAY['part'])")) { | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Can you add also test for other types (like Date) which has different representation in |
||
assertUpdate("INSERT INTO " + table.getName() + " VALUES (1, 10)", 1); | ||
assertUpdate("INSERT INTO " + table.getName() + " VALUES (2, 20)", 1); | ||
assertUpdate("INSERT INTO " + table.getName() + " VALUES (3, 30)", 1); | ||
|
||
Path tableLocation = Path.of(getTableLocation(table.getName()).replace("file://", "")); | ||
Path checkpoint = tableLocation.resolve("_delta_log/00000000000000000003.checkpoint.parquet"); | ||
|
||
MetadataEntry metadataEntry = loadMetadataEntry(0, tableLocation); | ||
ProtocolEntry protocolEntry = loadProtocolEntry(0, tableLocation); | ||
|
||
DeltaLakeColumnHandle partitionColumn = extractPartitionColumns(metadataEntry, protocolEntry, TESTING_TYPE_MANAGER).stream().collect(onlyElement()); | ||
String physicalColumnName = partitionColumn.basePhysicalColumnName(); | ||
if (columnMappingMode == ColumnMappingMode.ID || columnMappingMode == ColumnMappingMode.NAME) { | ||
assertThat(physicalColumnName).matches(PHYSICAL_COLUMN_NAME_PATTERN); | ||
} | ||
else { | ||
assertThat(physicalColumnName).isEqualTo("part"); | ||
} | ||
|
||
int partitionValuesParsedFieldPosition = 6; | ||
RowType addEntryType = new CheckpointSchemaManager(TESTING_TYPE_MANAGER).getAddEntryType(metadataEntry, protocolEntry, _ -> true, true, true, true); | ||
|
||
RowType.Field partitionValuesParsedField = addEntryType.getFields().get(partitionValuesParsedFieldPosition); | ||
assertThat(partitionValuesParsedField.getName().orElseThrow()).matches("partitionValues_parsed"); | ||
RowType partitionValuesParsedType = (RowType) partitionValuesParsedField.getType(); | ||
assertThat(partitionValuesParsedType.getFields().stream().collect(onlyElement()).getName().orElseThrow()).isEqualTo(physicalColumnName); | ||
|
||
TrinoParquetDataSource dataSource = new TrinoParquetDataSource(new LocalInputFile(checkpoint.toFile()), new ParquetReaderOptions(), new FileFormatDataSourceStats()); | ||
ParquetMetadata parquetMetadata = MetadataReader.readFooter(dataSource, Optional.empty()); | ||
try (ParquetReader reader = createParquetReader(dataSource, parquetMetadata, ImmutableList.of(addEntryType), List.of("add"))) { | ||
List<Integer> actual = new ArrayList<>(); | ||
Page page = reader.nextPage(); | ||
while (page != null) { | ||
Block block = page.getBlock(0); | ||
for (int i = 0; i < block.getPositionCount(); i++) { | ||
List<?> add = (List<?>) addEntryType.getObjectValue(SESSION, block, i); | ||
if (add == null) { | ||
continue; | ||
} | ||
actual.add((Integer) ((List<?>) add.get(partitionValuesParsedFieldPosition)).stream().collect(onlyElement())); | ||
} | ||
page = reader.nextPage(); | ||
} | ||
assertThat(actual).containsExactlyInAnyOrder(10, 20, 30); | ||
} | ||
} | ||
} | ||
|
||
/** | ||
* @see deltalake.column_mapping_mode_id | ||
* @see deltalake.column_mapping_mode_name | ||
|
@@ -2136,6 +2210,16 @@ private static MetadataEntry loadMetadataEntry(long entryNumber, Path tableLocat | |
return transactionLog.getMetaData(); | ||
} | ||
|
||
private static ProtocolEntry loadProtocolEntry(long entryNumber, Path tableLocation) | ||
throws IOException | ||
{ | ||
TrinoFileSystem fileSystem = new HdfsFileSystemFactory(HDFS_ENVIRONMENT, HDFS_FILE_SYSTEM_STATS).create(SESSION); | ||
DeltaLakeTransactionLogEntry transactionLog = getEntriesFromJson(entryNumber, tableLocation.resolve("_delta_log").toString(), fileSystem).orElseThrow().stream() | ||
.filter(log -> log.getProtocol() != null) | ||
.collect(onlyElement()); | ||
return transactionLog.getProtocol(); | ||
} | ||
|
||
private String getTableLocation(String tableName) | ||
{ | ||
Pattern locationPattern = Pattern.compile(".*location = '(.*?)'.*", Pattern.DOTALL); | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Add test in
io.trino.plugin.deltalake.transactionlog.checkpoint.TestCheckpointEntryIterator