-
Notifications
You must be signed in to change notification settings - Fork 3.2k
Spark: Fix NPE in RemoveOrphanFiles with prefix_listing for root table location(#16350) #16351
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from 1 commit
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -19,6 +19,7 @@ | |
| package org.apache.iceberg.util; | ||
|
|
||
| import static org.assertj.core.api.Assertions.assertThat; | ||
| import static org.assertj.core.api.Assertions.assertThatCode; | ||
|
|
||
| import java.io.IOException; | ||
| import java.nio.file.Files; | ||
|
|
@@ -33,6 +34,10 @@ | |
| import org.apache.iceberg.Schema; | ||
| import org.apache.iceberg.hadoop.HadoopFileIO; | ||
| import org.apache.iceberg.io.FileInfo; | ||
| import org.apache.iceberg.io.InputFile; | ||
| import org.apache.iceberg.io.OutputFile; | ||
| import org.apache.iceberg.io.SupportsPrefixOperations; | ||
| import org.apache.iceberg.relocated.com.google.common.collect.ImmutableList; | ||
| import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; | ||
| import org.apache.iceberg.relocated.com.google.common.collect.Lists; | ||
| import org.apache.iceberg.types.Types; | ||
|
|
@@ -242,4 +247,109 @@ private void listFilesRecursively( | |
|
|
||
| assertThat(remainingDirs).isEmpty(); | ||
| } | ||
|
|
||
| @Test | ||
| public void testListDirRecursivelyWithFileIOBucketRootBaseDir() { | ||
| List<FileInfo> entries = | ||
| ImmutableList.of( | ||
| new FileInfo("s3://bucket/data/a.parquet", 1L, 0L), | ||
| new FileInfo("s3://bucket/data/b.parquet", 1L, 0L), | ||
| new FileInfo("s3://bucket/top.parquet", 1L, 0L)); | ||
| SupportsPrefixOperations mockIO = new StaticPrefixFileIO(entries); | ||
|
|
||
| List<String> foundFiles = Lists.newArrayList(); | ||
| Predicate<FileInfo> fileFilter = fileInfo -> fileInfo.location().endsWith(".parquet"); | ||
|
|
||
| assertThatCode( | ||
| () -> | ||
| FileSystemWalker.listDirRecursivelyWithFileIO( | ||
| mockIO, "s3://bucket/", null, fileFilter, foundFiles::add)) | ||
| .doesNotThrowAnyException(); | ||
|
|
||
| assertThat(foundFiles) | ||
| .containsExactlyInAnyOrder( | ||
| "s3://bucket/data/a.parquet", "s3://bucket/data/b.parquet", "s3://bucket/top.parquet"); | ||
| } | ||
|
|
||
| /** | ||
| * Regression test: with a bucket-root base directory, files living under a hidden top-level | ||
| * directory (e.g. {@code _temporary}) must still be filtered out, and no NPE should occur. | ||
| */ | ||
| @Test | ||
| public void testListDirRecursivelyWithFileIOBucketRootFiltersHiddenDir() { | ||
| List<FileInfo> entries = | ||
| ImmutableList.of( | ||
| new FileInfo("s3://bucket/data/a.parquet", 1L, 0L), | ||
| new FileInfo("s3://bucket/_temporary/attempt_x/b.parquet", 1L, 0L), | ||
| new FileInfo("s3://bucket/.staging/c.parquet", 1L, 0L)); | ||
| SupportsPrefixOperations mockIO = new StaticPrefixFileIO(entries); | ||
|
|
||
| List<String> foundFiles = Lists.newArrayList(); | ||
| Predicate<FileInfo> fileFilter = fileInfo -> fileInfo.location().endsWith(".parquet"); | ||
|
|
||
| assertThatCode( | ||
| () -> | ||
| FileSystemWalker.listDirRecursivelyWithFileIO( | ||
| mockIO, "s3://bucket/", null, fileFilter, foundFiles::add)) | ||
| .doesNotThrowAnyException(); | ||
|
|
||
| assertThat(foundFiles).containsExactly("s3://bucket/data/a.parquet"); | ||
| } | ||
|
|
||
| /** | ||
| * Regression test: when the base directory is passed without a trailing slash (e.g. {@code | ||
| * oss://bucket}), the walk should still work correctly without NPE. | ||
| */ | ||
| @Test | ||
| public void testListDirRecursivelyWithFileIOBucketRootNoTrailingSlash() { | ||
| List<FileInfo> entries = | ||
| ImmutableList.of( | ||
| new FileInfo("s3://bucket/data/a.parquet", 1L, 0L), | ||
| new FileInfo("s3://bucket/_hidden/b.parquet", 1L, 0L)); | ||
| SupportsPrefixOperations mockIO = new StaticPrefixFileIO(entries); | ||
|
|
||
| List<String> foundFiles = Lists.newArrayList(); | ||
| Predicate<FileInfo> fileFilter = fileInfo -> fileInfo.location().endsWith(".parquet"); | ||
|
|
||
| assertThatCode( | ||
| () -> | ||
| FileSystemWalker.listDirRecursivelyWithFileIO( | ||
| mockIO, "s3://bucket", null, fileFilter, foundFiles::add)) | ||
| .doesNotThrowAnyException(); | ||
|
|
||
| assertThat(foundFiles).containsExactly("s3://bucket/data/a.parquet"); | ||
| } | ||
|
|
||
| private static class StaticPrefixFileIO implements SupportsPrefixOperations { | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Based on IntelliJ, this could be a java record
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I've seen this alert as well, but I don't think we should do a one-off java record usage here.
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I agree with nssalian. I think it's not quite appropriate to use java record. WDYT? |
||
| private final Iterable<FileInfo> entries; | ||
|
|
||
| StaticPrefixFileIO(Iterable<FileInfo> entries) { | ||
| this.entries = entries; | ||
| } | ||
|
|
||
| @Override | ||
| public Iterable<FileInfo> listPrefix(String prefix) { | ||
| return entries; | ||
| } | ||
|
|
||
| @Override | ||
| public void deletePrefix(String prefix) { | ||
| throw new UnsupportedOperationException(); | ||
| } | ||
|
|
||
| @Override | ||
| public InputFile newInputFile(String path) { | ||
| throw new UnsupportedOperationException(); | ||
| } | ||
|
|
||
| @Override | ||
| public OutputFile newOutputFile(String path) { | ||
| throw new UnsupportedOperationException(); | ||
| } | ||
|
|
||
| @Override | ||
| public void deleteFile(String path) { | ||
| throw new UnsupportedOperationException(); | ||
| } | ||
| } | ||
| } | ||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Much of the code is duplicated. Can we refactor that out?
Maybe something like:
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Thanks for reviewing, addressed this comment in the latest commit. PTAL.