Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
31 commits
Select commit Hold shift + click to select a range
9892a13
Create VariantSchema
cashmand Mar 25, 2025
ff0f110
Fixing
cashmand May 6, 2025
e9c4839
Tests pass
cashmand May 6, 2025
f740a68
Fix and update tests for byte/short/int/long
cashmand May 6, 2025
c834b7b
Cleanup
cashmand May 6, 2025
e73a0d1
Cleanup
cashmand May 6, 2025
b242081
Code review feedback
cashmand May 6, 2025
d01a912
Refactor
cashmand May 7, 2025
eef2a55
Cleanup
cashmand May 7, 2025
19cc666
Formatting
cashmand May 7, 2025
09a6da4
Code review feedback
cashmand May 8, 2025
5922bee
spotless
cashmand May 8, 2025
dffa73c
Switch to rootBuilder
cashmand May 8, 2025
c67dcbe
Simple code review feedback
cashmand May 14, 2025
e6573d6
Refactor metadata.
cashmand May 14, 2025
b6ceb7d
Rename file
cashmand May 14, 2025
240e043
Remove builder holders
cashmand May 14, 2025
b2b5d1d
Refactor object and write pos.
cashmand May 14, 2025
710dc64
Create a BinaryConverter base class
cashmand May 14, 2025
7c56822
Cleanup
cashmand May 14, 2025
5ed5536
Fix
cashmand May 14, 2025
d10b34a
Switch value reader from pull to push
cashmand May 14, 2025
955fe9a
Code review feedback
cashmand May 14, 2025
e9f74a9
Variant: Refactor readers to use a parent handler.
rdblue May 16, 2025
c32ece0
Revert "Code review feedback"
cashmand May 20, 2025
5150b5b
Revert "Switch value reader from pull to push"
cashmand May 20, 2025
8fbde65
Merge branch 'ryan_shredding_changes' into variant_shredding_avro_reader
cashmand May 20, 2025
ebfe322
Remove rootBuilder
cashmand May 20, 2025
cc0b38f
spotless
cashmand May 20, 2025
cebcf37
Test out of order value and metadata
cashmand May 20, 2025
f6c2eec
Fix
cashmand May 20, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions parquet-avro/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,11 @@
<artifactId>parquet-common</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-variant</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.avro</groupId>
<artifactId>avro</artifactId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
import org.apache.parquet.io.api.GroupConverter;
import org.apache.parquet.io.api.PrimitiveConverter;
import org.apache.parquet.schema.GroupType;
import org.apache.parquet.schema.LogicalTypeAnnotation;
import org.apache.parquet.schema.MessageType;
import org.apache.parquet.schema.Type;

Expand Down Expand Up @@ -168,7 +169,11 @@ private static Converter newConverter(Schema schema, Type type, GenericData mode
case MAP:
return new MapConverter(parent, type.asGroupType(), schema, model);
case RECORD:
return new AvroIndexedRecordConverter<>(parent, type.asGroupType(), schema, model);
if (type.getLogicalTypeAnnotation() instanceof LogicalTypeAnnotation.VariantLogicalTypeAnnotation) {
return new AvroVariantConverter(parent, type.asGroupType(), schema, model);
} else {
return new AvroIndexedRecordConverter<>(parent, type.asGroupType(), schema, model);
}
case STRING:
return new AvroConverters.FieldStringConverter(parent);
case UNION:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,7 @@
import org.apache.parquet.io.api.Converter;
import org.apache.parquet.io.api.GroupConverter;
import org.apache.parquet.schema.GroupType;
import org.apache.parquet.schema.LogicalTypeAnnotation;
import org.apache.parquet.schema.MessageType;
import org.apache.parquet.schema.Type;
import org.slf4j.Logger;
Expand Down Expand Up @@ -394,7 +395,11 @@ private static Converter newConverter(
}
return newStringConverter(schema, model, parent, validator);
case RECORD:
return new AvroRecordConverter(parent, type.asGroupType(), schema, model, validator);
if (type.getLogicalTypeAnnotation() instanceof LogicalTypeAnnotation.VariantLogicalTypeAnnotation) {
return new AvroVariantConverter(parent, type.asGroupType(), schema, model);
} else {
return new AvroRecordConverter(parent, type.asGroupType(), schema, model, validator);
}
case ENUM:
return new AvroConverters.FieldEnumConverter(parent, schema, model);
case ARRAY:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -466,6 +466,16 @@ public Optional<Schema> visit(
LogicalTypeAnnotation.EnumLogicalTypeAnnotation enumLogicalType) {
return of(Schema.create(Schema.Type.STRING));
}

@Override
public Optional<Schema> visit(
LogicalTypeAnnotation.VariantLogicalTypeAnnotation variantLogicalType) {
String name = parquetGroupType.getName();
Comment thread
cashmand marked this conversation as resolved.
List<Schema.Field> fields = new ArrayList<>();
fields.add(new Schema.Field("metadata", Schema.create(Schema.Type.BYTES)));
fields.add(new Schema.Field("value", Schema.create(Schema.Type.BYTES)));
return of(Schema.createRecord(name, null, namespace(name, names), false, fields));
}
})
.orElseThrow(
() -> new UnsupportedOperationException("Cannot convert Parquet type " + parquetType));
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.parquet.avro;

import java.nio.ByteBuffer;
import java.util.function.Consumer;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericData;
import org.apache.parquet.Preconditions;
import org.apache.parquet.io.api.Converter;
import org.apache.parquet.io.api.GroupConverter;
import org.apache.parquet.schema.GroupType;
import org.apache.parquet.variant.ImmutableMetadata;
import org.apache.parquet.variant.VariantBuilder;
import org.apache.parquet.variant.VariantConverters;

/**
* Converter for Variant values.
*/
class AvroVariantConverter extends GroupConverter implements VariantConverters.ParentConverter<VariantBuilder> {
private final ParentValueContainer parent;
private final Schema avroSchema;
private final GenericData model;
private final int metadataPos;
private final int valuePos;
private final GroupConverter wrappedConverter;

private VariantBuilder builder = null;
private ImmutableMetadata metadata = null;

AvroVariantConverter(ParentValueContainer parent, GroupType variantGroup, Schema avroSchema, GenericData model) {
this.parent = parent;
this.avroSchema = avroSchema;
this.metadataPos = avroSchema.getField("metadata").pos();
this.valuePos = avroSchema.getField("value").pos();
this.model = model;
this.wrappedConverter = VariantConverters.newVariantConverter(variantGroup, this::setMetadata, this);
}

@Override
public void build(Consumer<VariantBuilder> consumer) {
Preconditions.checkState(builder != null, "Cannot build variant: builder has not been initialized");
consumer.accept(builder);
}

@Override
public Converter getConverter(int fieldIndex) {
return wrappedConverter.getConverter(fieldIndex);
}

@Override
public void start() {
wrappedConverter.start();
}

@Override
public void end() {
wrappedConverter.end();

Preconditions.checkState(metadata != null, "Cannot build variant: missing metadata");

builder.appendNullIfEmpty();

Object record = model.newRecord(null, avroSchema);
model.setField(record, "metadata", metadataPos, metadata.getEncodedBuffer());
model.setField(record, "value", valuePos, builder.encodedValue());
parent.add(record);

this.builder = null;
}

void setMetadata(ByteBuffer metadataBuffer) {
// If the metadata hasn't changed, we don't need to rebuild the map.
if (metadata == null || metadata.getEncodedBuffer() != metadataBuffer) {
this.metadata = new ImmutableMetadata(metadataBuffer);
}

this.builder = new VariantBuilder(metadata);
}
}
Loading