You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@avro.apache.org by dk...@apache.org on 2019/08/16 18:37:55 UTC

[avro] 02/04: Add some tests to validate the include namespace functionality

This is an automated email from the ASF dual-hosted git repository.

dkulp pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/avro.git

commit 1267c4c8ce898143d58b18469aa9ea11726fcdd4
Author: Juan Sebastián Urrego Escobar <ju...@gmail.com>
AuthorDate: Thu Aug 1 20:57:06 2019 +0200

    Add some tests to validate the include namespace functionality
---
 .../test/java/org/apache/avro/io/TestEncoders.java | 154 +++++++++++++++------
 1 file changed, 108 insertions(+), 46 deletions(-)

diff --git a/lang/java/avro/src/test/java/org/apache/avro/io/TestEncoders.java b/lang/java/avro/src/test/java/org/apache/avro/io/TestEncoders.java
index dbe2d6a..9f0a585 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/io/TestEncoders.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/io/TestEncoders.java
@@ -17,25 +17,11 @@
  */
 package org.apache.avro.io;
 
-import static java.util.Arrays.asList;
-import static org.hamcrest.Matchers.equalTo;
-import static org.hamcrest.Matchers.is;
-import static org.junit.Assert.assertThat;
-
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.nio.ByteBuffer;
-import java.nio.MappedByteBuffer;
-import java.nio.channels.FileChannel;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.nio.file.StandardOpenOption;
-
 import com.fasterxml.jackson.core.JsonEncoding;
 import com.fasterxml.jackson.core.JsonFactory;
 import com.fasterxml.jackson.core.JsonGenerator;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
 import org.apache.avro.AvroTypeException;
 import org.apache.avro.Schema;
 import org.apache.avro.Schema.Type;
@@ -46,6 +32,24 @@ import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.TemporaryFolder;
 
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.ByteBuffer;
+import java.nio.MappedByteBuffer;
+import java.nio.channels.FileChannel;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.nio.file.StandardOpenOption;
+
+import static java.util.Arrays.asList;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.is;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertThat;
+
 public class TestEncoders {
   private static final int ENCODER_BUFFER_SIZE = 32;
   private static final int EXAMPLE_DATA_SIZE = 17;
@@ -126,6 +130,32 @@ public class TestEncoders {
   }
 
   @Test
+  public void testJsonEncoderWhenIncludeNamespaceOptionIsFalse() throws IOException {
+    String value = "{\"b\": {\"string\":\"myVal\"}, \"a\": 1}";
+    String schemaStr = "{\"type\": \"record\", \"name\": \"ab\", \"fields\": ["
+      + "{\"name\": \"a\", \"type\": \"int\"}, {\"name\": \"b\", \"type\": [\"null\", \"string\"]}" + "]}";
+    Schema schema = new Schema.Parser().parse(schemaStr);
+    byte[] avroBytes = fromJsonToAvro(value, schema);
+    ObjectMapper mapper = new ObjectMapper();
+
+    assertEquals(mapper.readTree("{\"b\":\"myVal\",\"a\":1}"),
+      mapper.readTree(fromAvroToJson(avroBytes, schema, false)));
+  }
+
+  @Test
+  public void testJsonEncoderWhenIncludeNamespaceOptionIsTrue() throws IOException {
+    String value = "{\"b\": {\"string\":\"myVal\"}, \"a\": 1}";
+    String schemaStr = "{\"type\": \"record\", \"name\": \"ab\", \"fields\": ["
+      + "{\"name\": \"a\", \"type\": \"int\"}, {\"name\": \"b\", \"type\": [\"null\", \"string\"]}" + "]}";
+    Schema schema = new Schema.Parser().parse(schemaStr);
+    byte[] avroBytes = fromJsonToAvro(value, schema);
+    ObjectMapper mapper = new ObjectMapper();
+
+    assertEquals(mapper.readTree("{\"b\":{\"string\":\"myVal\"},\"a\":1}"),
+      mapper.readTree(fromAvroToJson(avroBytes, schema, true)));
+  }
+
+  @Test
   public void testValidatingEncoderInit() throws IOException {
     Schema s = new Schema.Parser().parse("\"int\"");
     OutputStream out = new ByteArrayOutputStream();
@@ -137,7 +167,7 @@ public class TestEncoders {
   public void testJsonRecordOrdering() throws IOException {
     String value = "{\"b\": 2, \"a\": 1}";
     Schema schema = new Schema.Parser().parse("{\"type\": \"record\", \"name\": \"ab\", \"fields\": ["
-        + "{\"name\": \"a\", \"type\": \"int\"}, {\"name\": \"b\", \"type\": \"int\"}" + "]}");
+      + "{\"name\": \"a\", \"type\": \"int\"}, {\"name\": \"b\", \"type\": \"int\"}" + "]}");
     GenericDatumReader<Object> reader = new GenericDatumReader<>(schema);
     Decoder decoder = DecoderFactory.get().jsonDecoder(schema, value);
     Object o = reader.read(null, decoder);
@@ -148,11 +178,11 @@ public class TestEncoders {
   public void testJsonExcessFields() throws IOException {
     String value = "{\"b\": { \"b3\": 1.4, \"b2\": 3.14, \"b1\": \"h\"}, \"a\": {\"a0\": 45, \"a2\":true, \"a1\": null}}";
     Schema schema = new Schema.Parser().parse("{\"type\": \"record\", \"name\": \"ab\", \"fields\": [\n"
-        + "{\"name\": \"a\", \"type\": {\"type\":\"record\",\"name\":\"A\",\"fields\":\n"
-        + "[{\"name\":\"a1\", \"type\":\"null\"}, {\"name\":\"a2\", \"type\":\"boolean\"}]}},\n"
-        + "{\"name\": \"b\", \"type\": {\"type\":\"record\",\"name\":\"B\",\"fields\":\n"
-        + "[{\"name\":\"b1\", \"type\":\"string\"}, {\"name\":\"b2\", \"type\":\"float\"}, {\"name\":\"b3\", \"type\":\"double\"}]}}\n"
-        + "]}");
+      + "{\"name\": \"a\", \"type\": {\"type\":\"record\",\"name\":\"A\",\"fields\":\n"
+      + "[{\"name\":\"a1\", \"type\":\"null\"}, {\"name\":\"a2\", \"type\":\"boolean\"}]}},\n"
+      + "{\"name\": \"b\", \"type\": {\"type\":\"record\",\"name\":\"B\",\"fields\":\n"
+      + "[{\"name\":\"b1\", \"type\":\"string\"}, {\"name\":\"b2\", \"type\":\"float\"}, {\"name\":\"b3\", \"type\":\"double\"}]}}\n"
+      + "]}");
     GenericDatumReader<Object> reader = new GenericDatumReader<>(schema);
     Decoder decoder = DecoderFactory.get().jsonDecoder(schema, value);
     reader.read(null, decoder);
@@ -162,30 +192,30 @@ public class TestEncoders {
   public void testJsonRecordOrdering2() throws IOException {
     String value = "{\"b\": { \"b3\": 1.4, \"b2\": 3.14, \"b1\": \"h\"}, \"a\": {\"a2\":true, \"a1\": null}}";
     Schema schema = new Schema.Parser().parse("{\"type\": \"record\", \"name\": \"ab\", \"fields\": [\n"
-        + "{\"name\": \"a\", \"type\": {\"type\":\"record\",\"name\":\"A\",\"fields\":\n"
-        + "[{\"name\":\"a1\", \"type\":\"null\"}, {\"name\":\"a2\", \"type\":\"boolean\"}]}},\n"
-        + "{\"name\": \"b\", \"type\": {\"type\":\"record\",\"name\":\"B\",\"fields\":\n"
-        + "[{\"name\":\"b1\", \"type\":\"string\"}, {\"name\":\"b2\", \"type\":\"float\"}, {\"name\":\"b3\", \"type\":\"double\"}]}}\n"
-        + "]}");
+      + "{\"name\": \"a\", \"type\": {\"type\":\"record\",\"name\":\"A\",\"fields\":\n"
+      + "[{\"name\":\"a1\", \"type\":\"null\"}, {\"name\":\"a2\", \"type\":\"boolean\"}]}},\n"
+      + "{\"name\": \"b\", \"type\": {\"type\":\"record\",\"name\":\"B\",\"fields\":\n"
+      + "[{\"name\":\"b1\", \"type\":\"string\"}, {\"name\":\"b2\", \"type\":\"float\"}, {\"name\":\"b3\", \"type\":\"double\"}]}}\n"
+      + "]}");
     GenericDatumReader<Object> reader = new GenericDatumReader<>(schema);
     Decoder decoder = DecoderFactory.get().jsonDecoder(schema, value);
     Object o = reader.read(null, decoder);
     Assert.assertEquals("{\"a\": {\"a1\": null, \"a2\": true}, \"b\": {\"b1\": \"h\", \"b2\": 3.14, \"b3\": 1.4}}",
-        o.toString());
+      o.toString());
   }
 
   @Test
   public void testJsonRecordOrderingWithProjection() throws IOException {
     String value = "{\"b\": { \"b3\": 1.4, \"b2\": 3.14, \"b1\": \"h\"}, \"a\": {\"a2\":true, \"a1\": null}}";
     Schema writerSchema = new Schema.Parser().parse("{\"type\": \"record\", \"name\": \"ab\", \"fields\": [\n"
-        + "{\"name\": \"a\", \"type\": {\"type\":\"record\",\"name\":\"A\",\"fields\":\n"
-        + "[{\"name\":\"a1\", \"type\":\"null\"}, {\"name\":\"a2\", \"type\":\"boolean\"}]}},\n"
-        + "{\"name\": \"b\", \"type\": {\"type\":\"record\",\"name\":\"B\",\"fields\":\n"
-        + "[{\"name\":\"b1\", \"type\":\"string\"}, {\"name\":\"b2\", \"type\":\"float\"}, {\"name\":\"b3\", \"type\":\"double\"}]}}\n"
-        + "]}");
+      + "{\"name\": \"a\", \"type\": {\"type\":\"record\",\"name\":\"A\",\"fields\":\n"
+      + "[{\"name\":\"a1\", \"type\":\"null\"}, {\"name\":\"a2\", \"type\":\"boolean\"}]}},\n"
+      + "{\"name\": \"b\", \"type\": {\"type\":\"record\",\"name\":\"B\",\"fields\":\n"
+      + "[{\"name\":\"b1\", \"type\":\"string\"}, {\"name\":\"b2\", \"type\":\"float\"}, {\"name\":\"b3\", \"type\":\"double\"}]}}\n"
+      + "]}");
     Schema readerSchema = new Schema.Parser().parse("{\"type\": \"record\", \"name\": \"ab\", \"fields\": [\n"
-        + "{\"name\": \"a\", \"type\": {\"type\":\"record\",\"name\":\"A\",\"fields\":\n"
-        + "[{\"name\":\"a1\", \"type\":\"null\"}, {\"name\":\"a2\", \"type\":\"boolean\"}]}}\n" + "]}");
+      + "{\"name\": \"a\", \"type\": {\"type\":\"record\",\"name\":\"A\",\"fields\":\n"
+      + "[{\"name\":\"a1\", \"type\":\"null\"}, {\"name\":\"a2\", \"type\":\"boolean\"}]}}\n" + "]}");
     GenericDatumReader<Object> reader = new GenericDatumReader<>(writerSchema, readerSchema);
     Decoder decoder = DecoderFactory.get().jsonDecoder(writerSchema, value);
     Object o = reader.read(null, decoder);
@@ -196,14 +226,14 @@ public class TestEncoders {
   public void testJsonRecordOrderingWithProjection2() throws IOException {
     String value = "{\"b\": { \"b1\": \"h\", \"b2\": [3.14, 3.56], \"b3\": 1.4}, \"a\": {\"a2\":true, \"a1\": null}}";
     Schema writerSchema = new Schema.Parser().parse("{\"type\": \"record\", \"name\": \"ab\", \"fields\": [\n"
-        + "{\"name\": \"a\", \"type\": {\"type\":\"record\",\"name\":\"A\",\"fields\":\n"
-        + "[{\"name\":\"a1\", \"type\":\"null\"}, {\"name\":\"a2\", \"type\":\"boolean\"}]}},\n"
-        + "{\"name\": \"b\", \"type\": {\"type\":\"record\",\"name\":\"B\",\"fields\":\n"
-        + "[{\"name\":\"b1\", \"type\":\"string\"}, {\"name\":\"b2\", \"type\":{\"type\":\"array\", \"items\":\"float\"}}, {\"name\":\"b3\", \"type\":\"double\"}]}}\n"
-        + "]}");
+      + "{\"name\": \"a\", \"type\": {\"type\":\"record\",\"name\":\"A\",\"fields\":\n"
+      + "[{\"name\":\"a1\", \"type\":\"null\"}, {\"name\":\"a2\", \"type\":\"boolean\"}]}},\n"
+      + "{\"name\": \"b\", \"type\": {\"type\":\"record\",\"name\":\"B\",\"fields\":\n"
+      + "[{\"name\":\"b1\", \"type\":\"string\"}, {\"name\":\"b2\", \"type\":{\"type\":\"array\", \"items\":\"float\"}}, {\"name\":\"b3\", \"type\":\"double\"}]}}\n"
+      + "]}");
     Schema readerSchema = new Schema.Parser().parse("{\"type\": \"record\", \"name\": \"ab\", \"fields\": [\n"
-        + "{\"name\": \"a\", \"type\": {\"type\":\"record\",\"name\":\"A\",\"fields\":\n"
-        + "[{\"name\":\"a1\", \"type\":\"null\"}, {\"name\":\"a2\", \"type\":\"boolean\"}]}}\n" + "]}");
+      + "{\"name\": \"a\", \"type\": {\"type\":\"record\",\"name\":\"A\",\"fields\":\n"
+      + "[{\"name\":\"a1\", \"type\":\"null\"}, {\"name\":\"a2\", \"type\":\"boolean\"}]}}\n" + "]}");
     GenericDatumReader<Object> reader = new GenericDatumReader<>(writerSchema, readerSchema);
     Decoder decoder = DecoderFactory.get().jsonDecoder(writerSchema, value);
     Object o = reader.read(null, decoder);
@@ -222,7 +252,7 @@ public class TestEncoders {
     Path file = Paths.get(DIR.getRoot().getPath() + "testMappedByteBuffer.avro");
     Files.write(file, someBytes(EXAMPLE_DATA_SIZE));
     MappedByteBuffer buffer = FileChannel.open(file, StandardOpenOption.READ).map(FileChannel.MapMode.READ_ONLY, 0,
-        EXAMPLE_DATA_SIZE);
+      EXAMPLE_DATA_SIZE);
 
     testWithBuffer(buffer);
   }
@@ -240,9 +270,9 @@ public class TestEncoders {
 
     assertThat(output.toByteArray(), equalTo(avroEncoded(someBytes(EXAMPLE_DATA_SIZE))));
     assertThat(asList(buffer.position(), buffer.remaining()), is(asList(0, EXAMPLE_DATA_SIZE))); // fails if buffer is
-                                                                                                 // not array-backed and
-                                                                                                 // buffer overflow
-                                                                                                 // occurs
+    // not array-backed and
+    // buffer overflow
+    // occurs
   }
 
   private byte[] someBytes(int size) {
@@ -260,4 +290,36 @@ public class TestEncoders {
     System.arraycopy(bytes, 0, result, 1, bytes.length);
     return result;
   }
+
+  private byte[] fromJsonToAvro(String json, Schema schema) throws IOException {
+    DatumReader<Object> reader = new GenericDatumReader<>(schema);
+    GenericDatumWriter<Object> writer = new GenericDatumWriter<>(schema);
+    ByteArrayOutputStream output = new ByteArrayOutputStream();
+
+    Decoder decoder = DecoderFactory.get().jsonDecoder(schema, json);
+    Encoder encoder = EncoderFactory.get().binaryEncoder(output, null);
+
+    Object datum = reader.read(null, decoder);
+
+    writer.write(datum, encoder);
+    encoder.flush();
+
+    return output.toByteArray();
+  }
+
+  private String fromAvroToJson(byte[] avroBytes, Schema schema, boolean includeNamespace) throws IOException {
+    GenericDatumReader<Object> reader = new GenericDatumReader<>(schema);
+    DatumWriter<Object> writer = new GenericDatumWriter<>(schema);
+    ByteArrayOutputStream output = new ByteArrayOutputStream();
+
+    JsonEncoder encoder = factory.jsonEncoder(schema, output);
+    encoder.setIncludeNamespace(includeNamespace);
+    Decoder decoder = DecoderFactory.get().binaryDecoder(avroBytes, null);
+    Object datum = reader.read(null, decoder);
+    writer.write(datum, encoder);
+    encoder.flush();
+    output.flush();
+
+    return new String(output.toByteArray(), StandardCharsets.UTF_8.name());
+  }
 }