Skip to content

Commit d52209a

Browse files
committed
Fix other tests where buffer leaks surfaced.
This makes the tests pass but doesn't address the issue that file:// reads will return sliced subsets of buffers; this is due to how checksums are being verified.
1 parent 6e53e20 commit d52209a

3 files changed

Lines changed: 18 additions & 0 deletions

File tree

parquet-hadoop/src/test/java/org/apache/parquet/crypto/TestPropertiesDrivenEncryption.java

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@
2828
import java.io.FileInputStream;
2929
import java.io.IOException;
3030
import java.io.InputStream;
31+
import java.io.UncheckedIOException;
3132
import java.nio.charset.StandardCharsets;
3233
import java.util.ArrayList;
3334
import java.util.Arrays;
@@ -41,6 +42,7 @@
4142
import java.util.concurrent.TimeUnit;
4243
import org.apache.hadoop.conf.Configuration;
4344
import org.apache.hadoop.fs.FileSystem;
45+
import org.apache.hadoop.fs.LocalFileSystem;
4446
import org.apache.hadoop.fs.Path;
4547
import org.apache.parquet.bytes.DirectByteBufferAllocator;
4648
import org.apache.parquet.bytes.HeapByteBufferAllocator;
@@ -529,6 +531,14 @@ private void writeEncryptedParquetFile(
529531
} catch (Exception e) {
530532
addErrorToErrorCollectorAndLog("Failed writing " + file.toString(), e, encryptionConfiguration, null);
531533
}
534+
// remove the CRC file so that Hadoop local filesystem doesn't slice buffers on
535+
// vector reads.
536+
try {
537+
final LocalFileSystem local = FileSystem.getLocal(new Configuration());
538+
local.delete(local.getChecksumFile(file), false);
539+
} catch (IOException e) {
540+
throw new UncheckedIOException(e);
541+
}
532542
}
533543

534544
private Path getFileName(Path root, EncryptionConfiguration encryptionConfiguration, int threadNumber) {

parquet-hadoop/src/test/java/org/apache/parquet/filter2/recordlevel/TestRecordLevelFilters.java

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -130,6 +130,8 @@ public static List<User> makeUsers() {
130130
public static void setup() throws IOException {
131131
users = makeUsers();
132132
phonebookFile = PhoneBookWriter.writeToFile(users);
133+
// remove the CRC file
134+
new File(phonebookFile.getParentFile(), "." + phonebookFile.getName() + ".crc").delete();
133135
}
134136

135137
private static interface UserFilter {

parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestColumnIndexFiltering.java

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -64,6 +64,8 @@
6464
import java.util.stream.Collectors;
6565
import java.util.stream.Stream;
6666
import org.apache.hadoop.conf.Configuration;
67+
import org.apache.hadoop.fs.FileSystem;
68+
import org.apache.hadoop.fs.LocalFileSystem;
6769
import org.apache.hadoop.fs.Path;
6870
import org.apache.parquet.bytes.HeapByteBufferAllocator;
6971
import org.apache.parquet.bytes.TrackingByteBufferAllocator;
@@ -363,6 +365,10 @@ private static void writePhoneBookToFile(
363365
.withWriterVersion(parquetVersion),
364366
DATA);
365367
}
368+
// remove the CRC file so that Hadoop local filesystem doesn't slice buffers on
369+
// vector reads.
370+
final LocalFileSystem local = FileSystem.getLocal(new Configuration());
371+
local.delete(local.getChecksumFile(file), false);
366372
}
367373

368374
private static FileEncryptionProperties getFileEncryptionProperties() {

0 commit comments

Comments
 (0)