You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hudi.apache.org by GitBox <gi...@apache.org> on 2022/09/26 03:54:43 UTC

[GitHub] [hudi] the-other-tim-brown commented on a diff in pull request #6761: [HUDI-4904] Add support for unraveling proto schemas in ProtoClassBasedSchemaProvider

the-other-tim-brown commented on code in PR #6761:
URL: https://github.com/apache/hudi/pull/6761#discussion_r979540417


##########
hudi-utilities/src/test/java/org/apache/hudi/utilities/sources/helpers/TestProtoConversionUtil.java:
##########
@@ -35,24 +38,99 @@
 import com.google.protobuf.UInt64Value;
 import org.apache.avro.Schema;
 import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.generic.GenericDatumWriter;
 import org.apache.avro.generic.GenericRecord;
 import com.google.protobuf.util.Timestamps;
+import org.apache.avro.io.BinaryDecoder;
+import org.apache.avro.io.BinaryEncoder;
+import org.apache.avro.io.DecoderFactory;
+import org.apache.avro.io.EncoderFactory;
 import org.junit.jupiter.api.Assertions;
 import org.junit.jupiter.api.Test;
 
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.UncheckedIOException;
 import java.nio.ByteBuffer;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-import java.util.Scanner;
 import java.util.function.Function;
 import java.util.stream.Collectors;
 
 public class TestProtoConversionUtil {
   @Test
-  public void allFieldsSet_wellKnownTypesAreNested() {
+  public void allFieldsSet_wellKnownTypesAreNested() throws IOException {
+    Schema.Parser parser = new Schema.Parser();
+    Schema convertedSchema = parser.parse(getClass().getClassLoader().getResourceAsStream("schema-provider/proto/sample_schema_nested.avsc"));
+    Pair<Sample, GenericRecord> inputAndOutput = createInputOutputSampleWithWellKnownTypesNested(convertedSchema);
+    GenericRecord actual = serializeAndDeserializeAvro(ProtoConversionUtil.convertToAvro(convertedSchema, inputAndOutput.getLeft()), convertedSchema);
+    Assertions.assertEquals(inputAndOutput.getRight(), actual);
+  }
+
+  @Test
+  public void noFieldsSet_wellKnownTypesAreNested() throws IOException {
+    Sample sample = Sample.newBuilder().build();
+    Schema.Parser parser = new Schema.Parser();
+    Schema convertedSchema = parser.parse(getClass().getClassLoader().getResourceAsStream("schema-provider/proto/sample_schema_nested.avsc"));
+    GenericRecord actual = serializeAndDeserializeAvro(ProtoConversionUtil.convertToAvro(convertedSchema, sample), convertedSchema);
+    Assertions.assertEquals(createDefaultOutputWithWellKnownTypesNested(convertedSchema), actual);
+  }
+
+  @Test
+  public void allFieldsSet_wellKnownTypesAreFlattened() throws IOException {
+    Schema.Parser parser = new Schema.Parser();
+    Schema convertedSchema = parser.parse(getClass().getClassLoader().getResourceAsStream("schema-provider/proto/sample_schema_flattened.avsc"));
+    Pair<Sample, GenericRecord> inputAndOutput = createInputOutputSampleWithWellKnownTypesFlattened(convertedSchema);
+    GenericRecord actual = serializeAndDeserializeAvro(ProtoConversionUtil.convertToAvro(convertedSchema, inputAndOutput.getLeft()), convertedSchema);
+    Assertions.assertEquals(inputAndOutput.getRight(), actual);
+  }
+
+  @Test
+  public void noFieldsSet_wellKnownTypesAreFlattened() throws IOException {
+    Sample sample = Sample.newBuilder().build();
+    Schema.Parser parser = new Schema.Parser();
+    Schema convertedSchema = parser.parse(getClass().getClassLoader().getResourceAsStream("schema-provider/proto/sample_schema_flattened.avsc"));
+    GenericRecord actual = serializeAndDeserializeAvro(ProtoConversionUtil.convertToAvro(convertedSchema, sample), convertedSchema);
+    Assertions.assertEquals(createDefaultOutputWithWellKnownTypesFlattened(convertedSchema), actual);
+  }
+
+  @Test
+  public void recursiveSchema_noOverflow() throws IOException {
+    Schema.Parser parser = new Schema.Parser();
+    Schema convertedSchema = parser.parse(getClass().getClassLoader().getResourceAsStream("schema-provider/proto/parent_schema_recursive.avsc"));
+    Pair<Parent, GenericRecord> inputAndOutput = createInputOutputForRecursiveSchemaNoOverflow(convertedSchema);
+    GenericRecord actual = serializeAndDeserializeAvro(ProtoConversionUtil.convertToAvro(convertedSchema, inputAndOutput.getLeft()), convertedSchema);
+    Assertions.assertEquals(inputAndOutput.getRight(), actual);
+  }
+
+  @Test
+  public void recursiveSchema_withOverflow() throws Exception {

Review Comment:
   Not in this test class. This is just testing whether we can convert into a schema. The recursion depth only comes into play when creating the schema.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: commits-unsubscribe@hudi.apache.org

For queries about this service, please contact Infrastructure at:
users@infra.apache.org