You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hawq.apache.org by od...@apache.org on 2016/05/10 20:29:44 UTC
incubator-hawq git commit: HAWQ-703. Serialize HCatalog Complex Types
to plain text (as Hive profile).
Repository: incubator-hawq
Updated Branches:
refs/heads/master e60c805f4 -> 00f6074bd
HAWQ-703. Serialize HCatalog Complex Types to plain text (as Hive profile).
Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/00f6074b
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/00f6074b
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/00f6074b
Branch: refs/heads/master
Commit: 00f6074bd0e25ca6ce8224ffcca4ef7a571eee10
Parents: e60c805
Author: Oleksandr Diachenko <od...@pivotal.io>
Authored: Tue May 10 13:28:54 2016 -0700
Committer: Oleksandr Diachenko <od...@pivotal.io>
Committed: Tue May 10 13:28:54 2016 -0700
----------------------------------------------------------------------
.../java/org/apache/hawq/pxf/api/Metadata.java | 32 ++++--
.../hawq/pxf/api/utilities/EnumHawqType.java | 105 +++++++++++++++++
.../org/apache/hawq/pxf/api/MetadataTest.java | 60 ++++++++++
.../hive/utilities/EnumHiveToHawqType.java | 113 ++++++++++++++++++
.../plugins/hive/utilities/HiveUtilities.java | 114 +++++++------------
.../plugins/hive/HiveMetadataFetcherTest.java | 12 +-
.../hive/utilities/HiveUtilitiesTest.java | 56 +++++++--
.../hawq/pxf/service/MetadataResponse.java | 6 +-
.../pxf/service/MetadataResponseFormatter.java | 3 +-
.../service/MetadataResponseFormatterTest.java | 60 ++++++----
src/backend/catalog/external/externalmd.c | 57 +++++++++-
src/backend/utils/adt/pxf_functions.c | 9 +-
src/bin/psql/describe.c | 31 ++++-
src/include/catalog/external/itemmd.h | 3 +
src/include/catalog/pg_proc.h | 4 +-
src/include/catalog/pg_proc.sql | 2 +-
.../data/hcatalog/invalid_numeric_range.json | 2 +-
.../hcatalog/invalid_typemod_timestamp.json | 2 +-
src/test/regress/data/hcatalog/multi_table.json | 2 +-
.../data/hcatalog/multi_table_duplicates.json | 2 +-
.../regress/data/hcatalog/null_field_name.json | 1 +
.../data/hcatalog/null_field_source_type.json | 1 +
.../regress/data/hcatalog/null_field_type.json | 1 +
src/test/regress/data/hcatalog/null_fields.json | 1 +
src/test/regress/data/hcatalog/null_item.json | 1 +
.../regress/data/hcatalog/null_item_name.json | 1 +
.../regress/data/hcatalog/null_item_path.json | 1 +
.../regress/data/hcatalog/single_table.json | 2 +-
src/test/regress/input/json_load.source | 35 ++++++
src/test/regress/output/hcatalog_lookup.source | 8 +-
src/test/regress/output/json_load.source | 35 ++++++
31 files changed, 617 insertions(+), 145 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/00f6074b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/Metadata.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/Metadata.java b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/Metadata.java
index 4fc510d..9e1c137 100644
--- a/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/Metadata.java
+++ b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/Metadata.java
@@ -23,6 +23,7 @@ package org.apache.hawq.pxf.api;
import java.util.ArrayList;
import java.util.List;
+import org.apache.hawq.pxf.api.utilities.EnumHawqType;
import org.apache.commons.lang.StringUtils;
/**
@@ -67,25 +68,32 @@ public class Metadata {
}
/**
- * Class representing item field - name and type.
+ * Class representing item field - name, type, source type, modifiers.
+ * Type - exposed type of field
+ * Source type - type of field in underlying source
+ * Modifiers - additional attributes which describe type or field
*/
public static class Field {
private String name;
- private String type; // TODO: change to enum
+ private EnumHawqType type; // field type which PXF exposes
+ private String sourceType; // field type PXF reads from
private String[] modifiers; // type modifiers, optional field
- public Field(String name, String type) {
-
- if (StringUtils.isBlank(name) || StringUtils.isBlank(type)) {
- throw new IllegalArgumentException("Field name and type cannot be empty");
+ public Field(String name, EnumHawqType type, String sourceType) {
+ if (StringUtils.isBlank(name)
+ || type == null
+ || StringUtils.isBlank(sourceType)) {
+ throw new IllegalArgumentException(
+ "Field name, type and source type cannot be empty");
}
-
this.name = name;
this.type = type;
+ this.sourceType = sourceType;
}
- public Field(String name, String type, String[] modifiers) {
- this(name, type);
+ public Field(String name, EnumHawqType type, String sourceType,
+ String[] modifiers) {
+ this(name, type, sourceType);
this.modifiers = modifiers;
}
@@ -93,10 +101,14 @@ public class Metadata {
return name;
}
- public String getType() {
+ public EnumHawqType getType() {
return type;
}
+ public String getSourceType() {
+ return sourceType;
+ }
+
public String[] getModifiers() {
return modifiers;
}
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/00f6074b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/utilities/EnumHawqType.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/utilities/EnumHawqType.java b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/utilities/EnumHawqType.java
new file mode 100644
index 0000000..b5a94c6
--- /dev/null
+++ b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/utilities/EnumHawqType.java
@@ -0,0 +1,105 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.hawq.pxf.api.utilities;
+
+import java.io.IOException;
+import org.codehaus.jackson.JsonGenerator;
+import org.codehaus.jackson.map.JsonSerializer;
+import org.codehaus.jackson.map.annotate.JsonSerialize;
+import org.codehaus.jackson.map.SerializerProvider;
+import org.codehaus.jackson.JsonProcessingException;
+
+class EnumHawqTypeSerializer extends JsonSerializer<EnumHawqType> {
+
+ @Override
+ public void serialize(EnumHawqType value, JsonGenerator generator,
+ SerializerProvider provider) throws IOException,
+ JsonProcessingException {
+ generator.writeString(value.getTypeName());
+ }
+ }
+
+/**
+ *
+ * HAWQ types which could be used in plugins.
+ *
+ */
+@JsonSerialize(using = EnumHawqTypeSerializer.class)
+public enum EnumHawqType {
+ Int2Type("int2"),
+ Int4Type("int4"),
+ Int8Type("int8"),
+ Float4Type("float4"),
+ Float8Type("float8"),
+ TextType("text"),
+ VarcharType("varchar", (byte) 1, true),
+ ByteaType("bytea"),
+ DateType("date"),
+ TimestampType("timestamp"),
+ BoolType("bool"),
+ NumericType("numeric", (byte) 2, true),
+ BpcharType("bpchar", (byte) 1, true);
+
+ private String typeName;
+ private byte modifiersNum;
+ private boolean validateIntegerModifiers;
+
+ EnumHawqType(String typeName) {
+ this.typeName = typeName;
+ }
+
+ EnumHawqType(String typeName, byte modifiersNum) {
+ this(typeName);
+ this.modifiersNum = modifiersNum;
+ }
+
+ EnumHawqType(String typeName, byte modifiersNum, boolean validateIntegerModifiers) {
+ this(typeName);
+ this.modifiersNum = modifiersNum;
+ this.validateIntegerModifiers = validateIntegerModifiers;
+ }
+
+ /**
+ *
+ * @return name of type
+ */
+ public String getTypeName() {
+ return this.typeName;
+ }
+
+ /**
+ *
+ * @return number of modifiers for type
+ */
+ public byte getModifiersNum() {
+ return this.modifiersNum;
+ }
+
+ /**
+ *
+ * @return whether modifiers should be integers
+ */
+ public boolean getValidateIntegerModifiers() {
+ return this.validateIntegerModifiers;
+ }
+}
+
+
+
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/00f6074b/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/MetadataTest.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/MetadataTest.java b/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/MetadataTest.java
new file mode 100644
index 0000000..327a15b
--- /dev/null
+++ b/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/MetadataTest.java
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.hawq.pxf.api;
+
+import org.apache.hawq.pxf.api.Metadata;
+import org.apache.hawq.pxf.api.utilities.EnumHawqType;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+import org.junit.Test;
+
+public class MetadataTest {
+
+ @Test
+ public void createFieldEmptyNameType() {
+ try {
+ Metadata.Field field = new Metadata.Field(null, null, null, null);
+ fail("Empty name, type and source type shouldn't be allowed.");
+ } catch (IllegalArgumentException e) {
+ assertEquals("Field name, type and source type cannot be empty", e.getMessage());
+ }
+ }
+
+ @Test
+ public void createFieldNullType() {
+ try {
+ Metadata.Field field = new Metadata.Field("col1", null, "string");
+ fail("Empty name, type and source type shouldn't be allowed.");
+ } catch (IllegalArgumentException e) {
+ assertEquals("Field name, type and source type cannot be empty", e.getMessage());
+ }
+ }
+ @Test
+ public void createItemEmptyNameType() {
+ try {
+ Metadata.Item item = new Metadata.Item(null, null);
+ fail("Empty item name and path shouldn't be allowed.");
+ } catch (IllegalArgumentException e) {
+ assertEquals("Item or path name cannot be empty", e.getMessage());
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/00f6074b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/EnumHiveToHawqType.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/EnumHiveToHawqType.java b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/EnumHiveToHawqType.java
new file mode 100644
index 0000000..a747bd5
--- /dev/null
+++ b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/EnumHiveToHawqType.java
@@ -0,0 +1,113 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.hawq.pxf.plugins.hive.utilities;
+
+import org.apache.hawq.pxf.api.utilities.EnumHawqType;
+import org.apache.hawq.pxf.api.UnsupportedTypeException;
+
+/**
+ *
+ * Hive types, which are supported by plugin, mapped to HAWQ's types
+ * @see EnumHawqType
+ */
+public enum EnumHiveToHawqType {
+
+ TinyintType("tinyint", EnumHawqType.Int2Type),
+ SmallintType("smallint", EnumHawqType.Int2Type),
+ IntType("int", EnumHawqType.Int4Type),
+ BigintType("bigint", EnumHawqType.Int8Type),
+ BooleanType("boolean", EnumHawqType.BoolType),
+ FloatType("float", EnumHawqType.Float4Type),
+ DoubleType("double", EnumHawqType.Float8Type),
+ StringType("string", EnumHawqType.TextType),
+ BinaryType("binary", EnumHawqType.ByteaType),
+ TimestampType("timestamp", EnumHawqType.TimestampType),
+ DateType("date", EnumHawqType.DateType),
+ DecimalType("decimal", EnumHawqType.NumericType, "[(,)]"),
+ VarcharType("varchar", EnumHawqType.VarcharType, "[(,)]"),
+ CharType("char", EnumHawqType.BpcharType, "[(,)]"),
+ ArrayType("array", EnumHawqType.TextType, "[<,>]"),
+ MapType("map", EnumHawqType.TextType, "[<,>]"),
+ StructType("struct", EnumHawqType.TextType, "[<,>]"),
+ UnionType("uniontype", EnumHawqType.TextType, "[<,>]");
+
+ private String typeName;
+ private EnumHawqType hawqType;
+ private String splitExpression;
+
+ EnumHiveToHawqType(String typeName, EnumHawqType hawqType) {
+ this.typeName = typeName;
+ this.hawqType = hawqType;
+ }
+
+ EnumHiveToHawqType(String typeName, EnumHawqType hawqType, String splitExpression) {
+ this(typeName, hawqType);
+ this.splitExpression = splitExpression;
+ }
+
+ /**
+ *
+ * @return name of type
+ */
+ public String getTypeName() {
+ return this.typeName;
+ }
+
+ /**
+ *
+ * @return corresponding HAWQ type
+ */
+ public EnumHawqType getHawqType() {
+ return this.hawqType;
+ }
+
+ /**
+ *
+ * @return split by expression
+ */
+ public String getSplitExpression() {
+ return this.splitExpression;
+ }
+
+ /**
+ * Returns Hive to HAWQ type mapping entry for given Hive type
+ *
+ * @param hiveType full Hive type with modifiers, for example - decimal(10, 0), char(5), binary, array<string>, map<string,float> etc
+ * @return corresponding Hive to HAWQ type mapping entry
+ * @throws UnsupportedTypeException if there is no corresponding HAWQ type
+ */
+ public static EnumHiveToHawqType getHiveToHawqType(String hiveType) {
+ for (EnumHiveToHawqType t : values()) {
+ String hiveTypeName = hiveType;
+ String splitExpression = t.getSplitExpression();
+ if (splitExpression != null) {
+ String[] tokens = hiveType.split(splitExpression);
+ hiveTypeName = tokens[0];
+ }
+
+ if (t.getTypeName().toLowerCase().equals(hiveTypeName.toLowerCase())) {
+ return t;
+ }
+ }
+ throw new UnsupportedTypeException("Unable to map Hive's type: "
+ + hiveType + " to HAWQ's type");
+ }
+
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/00f6074b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilities.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilities.java b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilities.java
index 7dfe410..096c0ff 100644
--- a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilities.java
+++ b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilities.java
@@ -20,6 +20,7 @@ package org.apache.hawq.pxf.plugins.hive.utilities;
*/
+import java.util.Arrays;
import java.util.List;
import java.util.ArrayList;
@@ -32,9 +33,10 @@ import org.apache.hadoop.hive.metastore.TableType;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.Table;
-
import org.apache.hawq.pxf.api.Metadata;
import org.apache.hawq.pxf.api.UnsupportedTypeException;
+import org.apache.hawq.pxf.api.utilities.EnumHawqType;
+import org.apache.hawq.pxf.plugins.hive.utilities.EnumHiveToHawqType;
/**
* Class containing helper functions connecting
@@ -83,11 +85,10 @@ public class HiveUtilities {
}
/**
- * Checks if hive type is supported, and if so
- * return its matching HAWQ type.
- * Unsupported types will result in an exception.
- * <br>
- * The supported mappings are:<ul>
+ * Checks if hive type is supported, and if so return its matching HAWQ
+ * type. Unsupported types will result in an exception. <br>
+ * The supported mappings are:
+ * <ul>
* <li>{@code tinyint -> int2}</li>
* <li>{@code smallint -> int2}</li>
* <li>{@code int -> int4}</li>
@@ -102,80 +103,49 @@ public class HiveUtilities {
* <li>{@code decimal(precision, scale) -> numeric(precision, scale)}</li>
* <li>{@code varchar(size) -> varchar(size)}</li>
* <li>{@code char(size) -> bpchar(size)}</li>
+ * <li>{@code array<dataType> -> text}</li>
+ * <li>{@code map<keyDataType, valueDataType> -> text}</li>
+ * <li>{@code struct<field1:dataType,...,fieldN:dataType> -> text}</li>
+ * <li>{@code uniontype<...> -> text}</li>
* </ul>
*
- * @param hiveColumn hive column schema
+ * @param hiveColumn
+ * hive column schema
* @return field with mapped HAWQ type and modifiers
- * @throws UnsupportedTypeException if the column type is not supported
+ * @throws UnsupportedTypeException
+ * if the column type is not supported
+ * @see EnumHiveToHawqType
*/
public static Metadata.Field mapHiveType(FieldSchema hiveColumn) throws UnsupportedTypeException {
String fieldName = hiveColumn.getName();
- String hiveType = hiveColumn.getType();
- String mappedType;
- String[] modifiers = null;
+ String hiveType = hiveColumn.getType(); // Type name and modifiers if any
+ String hiveTypeName; // Type name
+ String[] modifiers = null; // Modifiers
+ EnumHiveToHawqType hiveToHawqType = EnumHiveToHawqType.getHiveToHawqType(hiveType);
+ EnumHawqType hawqType = hiveToHawqType.getHawqType();
- // check parameterized types:
- if (hiveType.startsWith("varchar(") ||
- hiveType.startsWith("char(")) {
- String[] toks = hiveType.split("[(,)]");
- if (toks.length != 2) {
- throw new UnsupportedTypeException( "HAWQ does not support type " + hiveType + " (Field " + fieldName + "), " +
- "expected type of the form <type name>(<parameter>)");
- }
- mappedType = toks[0];
- if (mappedType.equals("char")) {
- mappedType = "bpchar";
- }
- modifiers = new String[] {toks[1]};
- } else if (hiveType.startsWith("decimal(")) {
- String[] toks = hiveType.split("[(,)]");
- if (toks.length != 3) {
- throw new UnsupportedTypeException( "HAWQ does not support type " + hiveType + " (Field " + fieldName + "), " +
- "expected type of the form <type name>(<parameter>,<parameter>)");
+ if (hiveToHawqType.getSplitExpression() != null) {
+ String[] tokens = hiveType.split(hiveToHawqType.getSplitExpression());
+ hiveTypeName = tokens[0];
+ if (hawqType.getModifiersNum() > 0) {
+ modifiers = Arrays.copyOfRange(tokens, 1, tokens.length);
+ if (modifiers.length != hawqType.getModifiersNum()) {
+ throw new UnsupportedTypeException(
+ "HAWQ does not support type " + hiveType
+ + " (Field " + fieldName + "), "
+ + "expected number of modifiers: "
+ + hawqType.getModifiersNum()
+ + ", actual number of modifiers: "
+ + modifiers.length);
+ }
+ if (hawqType.getValidateIntegerModifiers() && !verifyIntegerModifiers(modifiers)) {
+ throw new UnsupportedTypeException("HAWQ does not support type " + hiveType + " (Field " + fieldName + "), modifiers should be integers");
+ }
}
- mappedType = "numeric";
- modifiers = new String[] {toks[1], toks[2]};
- } else {
+ } else
+ hiveTypeName = hiveType;
- switch (hiveType) {
- case "tinyint":
- case "smallint":
- mappedType = "int2";
- break;
- case "int":
- mappedType = "int4";
- break;
- case "bigint":
- mappedType = "int8";
- break;
- case "boolean":
- mappedType = "bool";
- break;
- case "timestamp":
- case "date":
- mappedType = hiveType;
- break;
- case "float":
- mappedType = "float4";
- break;
- case "double":
- mappedType = "float8";
- break;
- case "string":
- mappedType = "text";
- break;
- case "binary":
- mappedType = "bytea";
- break;
- default:
- throw new UnsupportedTypeException(
- "HAWQ does not support type " + hiveType + " (Field " + fieldName + ")");
- }
- }
- if (!verifyModifers(modifiers)) {
- throw new UnsupportedTypeException("HAWQ does not support type " + hiveType + " (Field " + fieldName + "), modifiers should be integers");
- }
- return new Metadata.Field(fieldName, mappedType, modifiers);
+ return new Metadata.Field(fieldName, hawqType, hiveTypeName, modifiers);
}
/**
@@ -186,7 +156,7 @@ public class HiveUtilities {
* @param modifiers type modifiers to be verified
* @return whether modifiers are null or integers
*/
- private static boolean verifyModifers(String[] modifiers) {
+ private static boolean verifyIntegerModifiers(String[] modifiers) {
if (modifiers == null) {
return true;
}
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/00f6074b/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveMetadataFetcherTest.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveMetadataFetcherTest.java b/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveMetadataFetcherTest.java
index 1323eea..d9d97fc 100644
--- a/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveMetadataFetcherTest.java
+++ b/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveMetadataFetcherTest.java
@@ -149,10 +149,10 @@ public class HiveMetadataFetcherTest {
assertEquals(2, resultFields.size());
Metadata.Field field = resultFields.get(0);
assertEquals("field1", field.getName());
- assertEquals("text", field.getType()); // converted type
+ assertEquals("text", field.getType().getTypeName()); // converted type
field = resultFields.get(1);
assertEquals("field2", field.getName());
- assertEquals("int4", field.getType());
+ assertEquals("int4", field.getType().getTypeName());
}
@Test
@@ -204,10 +204,10 @@ public class HiveMetadataFetcherTest {
assertEquals(2, resultFields.size());
Metadata.Field field = resultFields.get(0);
assertEquals("field1", field.getName());
- assertEquals("text", field.getType()); // converted type
+ assertEquals("text", field.getType().getTypeName()); // converted type
field = resultFields.get(1);
assertEquals("field2", field.getName());
- assertEquals("int4", field.getType());
+ assertEquals("int4", field.getType().getTypeName());
}
}
@@ -258,10 +258,10 @@ public class HiveMetadataFetcherTest {
assertEquals(2, resultFields.size());
Metadata.Field field = resultFields.get(0);
assertEquals("field1", field.getName());
- assertEquals("text", field.getType()); // converted type
+ assertEquals("text", field.getType().getTypeName()); // converted type
field = resultFields.get(1);
assertEquals("field2", field.getName());
- assertEquals("int4", field.getType());
+ assertEquals("int4", field.getType().getTypeName());
}
private void prepareConstruction() throws Exception {
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/00f6074b/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilitiesTest.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilitiesTest.java b/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilitiesTest.java
index 466dedb..e9b024a 100644
--- a/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilitiesTest.java
+++ b/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilitiesTest.java
@@ -24,9 +24,9 @@ import static org.junit.Assert.*;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.junit.Test;
-
import org.apache.hawq.pxf.api.Metadata;
import org.apache.hawq.pxf.api.UnsupportedTypeException;
+import org.apache.hawq.pxf.plugins.hive.utilities.EnumHiveToHawqType;
public class HiveUtilitiesTest {
@@ -54,17 +54,23 @@ public class HiveUtilitiesTest {
{"char(40)", "bpchar", "40"},
};
+ static String[][] complexTypes = {
+ {"ArraY<string>", "text"},
+ {"MaP<stRing, float>", "text"},
+ {"Struct<street:string, city:string, state:string, zip:int>", "text"},
+ {"UnionType<array<string>, string,int>", "text"}
+ };
+
@Test
public void mapHiveTypeUnsupported() throws Exception {
- hiveColumn = new FieldSchema("complex", "array", null);
+ hiveColumn = new FieldSchema("complex", "someTypeWeDontSupport", null);
try {
HiveUtilities.mapHiveType(hiveColumn);
fail("unsupported type");
} catch (UnsupportedTypeException e) {
- assertEquals("HAWQ does not support type " + hiveColumn.getType() + " (Field " + hiveColumn.getName() + ")",
- e.getMessage());
+ assertEquals("Unable to map Hive's type: " + hiveColumn.getType() + " to HAWQ's type", e.getMessage());
}
}
@@ -85,11 +91,11 @@ public class HiveUtilitiesTest {
*/
for (String[] line: typesMappings) {
String hiveType = line[0];
- String expectedType = line[1];
+ String hawqTypeName = line[1];
hiveColumn = new FieldSchema("field" + hiveType, hiveType, null);
Metadata.Field result = HiveUtilities.mapHiveType(hiveColumn);
assertEquals("field" + hiveType, result.getName());
- assertEquals(expectedType, result.getType());
+ assertEquals(hawqTypeName, result.getType().getTypeName());
assertNull(result.getModifiers());
}
}
@@ -109,7 +115,7 @@ public class HiveUtilitiesTest {
hiveColumn = new FieldSchema("field" + hiveType, hiveType, null);
Metadata.Field result = HiveUtilities.mapHiveType(hiveColumn);
assertEquals("field" + hiveType, result.getName());
- assertEquals(expectedType, result.getType());
+ assertEquals(expectedType, result.getType().getTypeName());
assertArrayEquals(expectedModifiers, result.getModifiers());
}
}
@@ -124,7 +130,7 @@ public class HiveUtilitiesTest {
fail("should fail with bad numeric type error");
} catch (UnsupportedTypeException e) {
String errorMsg = "HAWQ does not support type " + badHiveType + " (Field badNumeric), " +
- "expected type of the form <type name>(<parameter>,<parameter>)";
+ "expected number of modifiers: 2, actual number of modifiers: 1";
assertEquals(errorMsg, e.getMessage());
}
@@ -135,7 +141,7 @@ public class HiveUtilitiesTest {
fail("should fail with bad char type error");
} catch (UnsupportedTypeException e) {
String errorMsg = "HAWQ does not support type " + badHiveType + " (Field badChar), " +
- "expected type of the form <type name>(<parameter>)";
+ "expected number of modifiers: 1, actual number of modifiers: 3";
assertEquals(errorMsg, e.getMessage());
}
@@ -152,6 +158,38 @@ public class HiveUtilitiesTest {
}
@Test
+ public void mapHiveTypeInvalidModifiers() throws Exception {
+ String badHiveType = "decimal(abc, xyz)";
+ hiveColumn = new FieldSchema("numericColumn", badHiveType, null);
+ try {
+ HiveUtilities.mapHiveType(hiveColumn);
+ fail("should fail with bad modifiers error");
+ } catch (UnsupportedTypeException e) {
+ String errorMsg = "HAWQ does not support type " + badHiveType + " (Field numericColumn), modifiers should be integers";
+ assertEquals(errorMsg, e.getMessage());
+ }
+ }
+
+ @Test
+ public void mapHiveTypeComplex() throws Exception {
+ /*
+ * array<dataType> -> text
+ * map<keyDataType, valueDataType> -> text
+ * struct<fieldName1:dataType, ..., fieldNameN:dataType> -> text
+ * uniontype<...> -> text
+ */
+ for (String[] line: complexTypes) {
+ String hiveType = line[0];
+ String expectedType = line[1];
+ hiveColumn = new FieldSchema("field" + hiveType, hiveType, null);
+ Metadata.Field result = HiveUtilities.mapHiveType(hiveColumn);
+ assertEquals("field" + hiveType, result.getName());
+ assertEquals(expectedType, result.getType().getTypeName());
+ assertNull(result.getModifiers());
+ }
+ }
+
+ @Test
public void parseTableQualifiedNameNoDbName() throws Exception {
String name = "orphan";
tblDesc = HiveUtilities.extractTableFromName(name);
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/00f6074b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponse.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponse.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponse.java
index ff73499..741e201 100644
--- a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponse.java
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponse.java
@@ -22,9 +22,9 @@ package org.apache.hawq.pxf.service;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.OutputStream;
+
import java.util.List;
-import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.StreamingOutput;
import org.apache.commons.logging.Log;
@@ -61,10 +61,10 @@ public class MetadataResponse implements StreamingOutput {
* Serializes the metadata list in JSON, To be used as the result string for HAWQ.
*/
@Override
- public void write(OutputStream output) throws IOException,
- WebApplicationException {
+ public void write(OutputStream output) throws IOException {
DataOutputStream dos = new DataOutputStream(output);
ObjectMapper mapper = new ObjectMapper();
+ mapper.configure(org.codehaus.jackson.map.SerializationConfig.Feature.USE_ANNOTATIONS, true); // enable annotations for serialization
mapper.setSerializationInclusion(Inclusion.NON_EMPTY); // ignore empty fields
if(metadataList == null || metadataList.isEmpty()) {
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/00f6074b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponseFormatter.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponseFormatter.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponseFormatter.java
index 55a6da5..8225ec5 100644
--- a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponseFormatter.java
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponseFormatter.java
@@ -85,7 +85,8 @@ public class MetadataResponseFormatter {
for (Metadata.Field field : metadata.getFields()) {
result.append("Field #").append(++i).append(": [")
.append("Name: ").append(field.getName())
- .append(", Type: ").append(field.getType()).append("] ");
+ .append(", Type: ").append(field.getType().getTypeName())
+ .append(", Source type: ").append(field.getSourceType()).append("] ");
}
}
LOG.debug(result);
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/00f6074b/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/MetadataResponseFormatterTest.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/MetadataResponseFormatterTest.java b/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/MetadataResponseFormatterTest.java
index 0182835..21bf423 100644
--- a/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/MetadataResponseFormatterTest.java
+++ b/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/MetadataResponseFormatterTest.java
@@ -25,8 +25,9 @@ import java.util.ArrayList;
import java.util.List;
import static org.junit.Assert.*;
-import org.apache.hawq.pxf.api.Metadata;
+import org.apache.hawq.pxf.api.Metadata;
+import org.apache.hawq.pxf.api.utilities.EnumHawqType;
import org.junit.Test;
public class MetadataResponseFormatterTest {
@@ -49,14 +50,14 @@ public class MetadataResponseFormatterTest {
List<Metadata.Field> fields = new ArrayList<Metadata.Field>();
Metadata.Item itemName = new Metadata.Item("default", "table1");
Metadata metadata = new Metadata(itemName, fields);
- fields.add(new Metadata.Field("field1", "int"));
- fields.add(new Metadata.Field("field2", "text"));
+ fields.add(new Metadata.Field("field1", EnumHawqType.Int8Type, "bigint"));
+ fields.add(new Metadata.Field("field2", EnumHawqType.TextType, "string"));
metadataList.add(metadata);
response = MetadataResponseFormatter.formatResponse(metadataList, "path.file");
StringBuilder expected = new StringBuilder("{\"PXFMetadata\":[{");
expected.append("\"item\":{\"path\":\"default\",\"name\":\"table1\"},")
- .append("\"fields\":[{\"name\":\"field1\",\"type\":\"int\"},{\"name\":\"field2\",\"type\":\"text\"}]}]}");
+ .append("\"fields\":[{\"name\":\"field1\",\"type\":\"int8\",\"sourceType\":\"bigint\"},{\"name\":\"field2\",\"type\":\"text\",\"sourceType\":\"string\"}]}]}");
assertEquals(expected.toString(), convertResponseToString(response));
}
@@ -67,14 +68,14 @@ public class MetadataResponseFormatterTest {
List<Metadata.Field> fields = new ArrayList<Metadata.Field>();
Metadata.Item itemName = new Metadata.Item("default", "table1");
Metadata metadata = new Metadata(itemName, fields);
- fields.add(new Metadata.Field("field1", "int", null));
- fields.add(new Metadata.Field("field2", "text", new String[] {}));
+ fields.add(new Metadata.Field("field1", EnumHawqType.Int8Type, "bigint", null));
+ fields.add(new Metadata.Field("field2", EnumHawqType.TextType, "string", new String[] {}));
metadataList.add(metadata);
response = MetadataResponseFormatter.formatResponse(metadataList, "path.file");
StringBuilder expected = new StringBuilder("{\"PXFMetadata\":[{");
expected.append("\"item\":{\"path\":\"default\",\"name\":\"table1\"},")
- .append("\"fields\":[{\"name\":\"field1\",\"type\":\"int\"},{\"name\":\"field2\",\"type\":\"text\"}]}]}");
+ .append("\"fields\":[{\"name\":\"field1\",\"type\":\"int8\",\"sourceType\":\"bigint\"},{\"name\":\"field2\",\"type\":\"text\",\"sourceType\":\"string\"}]}]}");
assertEquals(expected.toString(), convertResponseToString(response));
}
@@ -85,10 +86,10 @@ public class MetadataResponseFormatterTest {
List<Metadata.Field> fields = new ArrayList<Metadata.Field>();
Metadata.Item itemName = new Metadata.Item("default", "table1");
Metadata metadata = new Metadata(itemName, fields);
- fields.add(new Metadata.Field("field1", "int"));
- fields.add(new Metadata.Field("field2", "numeric",
+ fields.add(new Metadata.Field("field1", EnumHawqType.Int8Type, "bigint"));
+ fields.add(new Metadata.Field("field2", EnumHawqType.NumericType, "decimal",
new String[] {"1349", "1789"}));
- fields.add(new Metadata.Field("field3", "char",
+ fields.add(new Metadata.Field("field3", EnumHawqType.BpcharType, "char",
new String[] {"50"}));
metadataList.add(metadata);
@@ -96,9 +97,28 @@ public class MetadataResponseFormatterTest {
StringBuilder expected = new StringBuilder("{\"PXFMetadata\":[{");
expected.append("\"item\":{\"path\":\"default\",\"name\":\"table1\"},")
.append("\"fields\":[")
- .append("{\"name\":\"field1\",\"type\":\"int\"},")
- .append("{\"name\":\"field2\",\"type\":\"numeric\",\"modifiers\":[\"1349\",\"1789\"]},")
- .append("{\"name\":\"field3\",\"type\":\"char\",\"modifiers\":[\"50\"]}")
+ .append("{\"name\":\"field1\",\"type\":\"int8\",\"sourceType\":\"bigint\"},")
+ .append("{\"name\":\"field2\",\"type\":\"numeric\",\"sourceType\":\"decimal\",\"modifiers\":[\"1349\",\"1789\"]},")
+ .append("{\"name\":\"field3\",\"type\":\"bpchar\",\"sourceType\":\"char\",\"modifiers\":[\"50\"]}")
+ .append("]}]}");
+
+ assertEquals(expected.toString(), convertResponseToString(response));
+ }
+
+ @Test
+ public void formatResponseStringWithSourceType() throws Exception {
+ List<Metadata> metadataList = new ArrayList<Metadata>();
+ List<Metadata.Field> fields = new ArrayList<Metadata.Field>();
+ Metadata.Item itemName = new Metadata.Item("default", "table1");
+ Metadata metadata = new Metadata(itemName, fields);
+ fields.add(new Metadata.Field("field1", EnumHawqType.Float8Type, "double"));
+ metadataList.add(metadata);
+
+ response = MetadataResponseFormatter.formatResponse(metadataList, "path.file");
+ StringBuilder expected = new StringBuilder("{\"PXFMetadata\":[{");
+ expected.append("\"item\":{\"path\":\"default\",\"name\":\"table1\"},")
+ .append("\"fields\":[")
+ .append("{\"name\":\"field1\",\"type\":\"float8\",\"sourceType\":\"double\"}")
.append("]}]}");
assertEquals(expected.toString(), convertResponseToString(response));
@@ -146,7 +166,7 @@ public class MetadataResponseFormatterTest {
List<Metadata.Field> fields = new ArrayList<Metadata.Field>();
Metadata.Item itemName = new Metadata.Item("default", "table1");
Metadata metadata = new Metadata(itemName, fields);
- fields.add(new Metadata.Field("field1", "int"));
+ fields.add(new Metadata.Field("field1", EnumHawqType.Int8Type, "bigint"));
metadataList.add(null);
metadataList.add(metadata);
try {
@@ -165,8 +185,8 @@ public class MetadataResponseFormatterTest {
List<Metadata.Field> fields = new ArrayList<Metadata.Field>();
Metadata.Item itemName = new Metadata.Item("default", "table"+i);
Metadata metadata = new Metadata(itemName, fields);
- fields.add(new Metadata.Field("field1", "int"));
- fields.add(new Metadata.Field("field2", "text"));
+ fields.add(new Metadata.Field("field1", EnumHawqType.Int8Type, "bigint"));
+ fields.add(new Metadata.Field("field2", EnumHawqType.TextType, "string"));
metdataList.add(metadata);
}
response = MetadataResponseFormatter.formatResponse(metdataList, "path.file");
@@ -179,7 +199,7 @@ public class MetadataResponseFormatterTest {
expected.append(",");
}
expected.append("{\"item\":{\"path\":\"default\",\"name\":\"table").append(i).append("\"},");
- expected.append("\"fields\":[{\"name\":\"field1\",\"type\":\"int\"},{\"name\":\"field2\",\"type\":\"text\"}]}");
+ expected.append("\"fields\":[{\"name\":\"field1\",\"type\":\"int8\",\"sourceType\":\"bigint\"},{\"name\":\"field2\",\"type\":\"text\",\"sourceType\":\"string\"}]}");
}
expected.append("]}");
@@ -193,8 +213,8 @@ public class MetadataResponseFormatterTest {
List<Metadata.Field> fields = new ArrayList<Metadata.Field>();
Metadata.Item itemName = new Metadata.Item("default"+i, "table"+i);
Metadata metadata = new Metadata(itemName, fields);
- fields.add(new Metadata.Field("field1", "int"));
- fields.add(new Metadata.Field("field2", "text"));
+ fields.add(new Metadata.Field("field1", EnumHawqType.Int8Type, "bigint"));
+ fields.add(new Metadata.Field("field2", EnumHawqType.TextType, "string"));
metdataList.add(metadata);
}
response = MetadataResponseFormatter.formatResponse(metdataList, "path.file");
@@ -206,7 +226,7 @@ public class MetadataResponseFormatterTest {
expected.append(",");
}
expected.append("{\"item\":{\"path\":\"default").append(i).append("\",\"name\":\"table").append(i).append("\"},");
- expected.append("\"fields\":[{\"name\":\"field1\",\"type\":\"int\"},{\"name\":\"field2\",\"type\":\"text\"}]}");
+ expected.append("\"fields\":[{\"name\":\"field1\",\"type\":\"int8\",\"sourceType\":\"bigint\"},{\"name\":\"field2\",\"type\":\"text\",\"sourceType\":\"string\"}]}");
}
expected.append("]}");
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/00f6074b/src/backend/catalog/external/externalmd.c
----------------------------------------------------------------------
diff --git a/src/backend/catalog/external/externalmd.c b/src/backend/catalog/external/externalmd.c
index e65d741..aca07c4 100644
--- a/src/backend/catalog/external/externalmd.c
+++ b/src/backend/catalog/external/externalmd.c
@@ -96,6 +96,11 @@ List *ParsePxfEntries(StringInfo json, char *profile, Oid dboid)
* ParsePxfItem
* Parse the given json object representing a single PXF item into the internal
* representation
+ * Reports error and exits if any of mandatory attributes in given json are missing
+ * Input JSON schema:
+ *
+ * {"PXFMetadata":[{"item":{"path":"<ITEM_PATH>","name":"<ITEM_NAME>"},"fields":[{"name":"<FIELD_NAME>","type":"<FIELD_TYPE>","sourceType":"<SOURCE_TYPE>"[,"modifiers":["<MODIFIER1>","<MODIFIER2>"]]},...]}, ...]}
+ *
*/
static PxfItem *ParsePxfItem(struct json_object *pxfMD, char* profile)
{
@@ -103,17 +108,37 @@ static PxfItem *ParsePxfItem(struct json_object *pxfMD, char* profile)
/* parse item name */
struct json_object *jsonItem = json_object_object_get(pxfMD, "item");
- char *itemPath = pstrdup(json_object_get_string(json_object_object_get(jsonItem, "path")));
- char *itemName = pstrdup(json_object_get_string(json_object_object_get(jsonItem, "name")));
-
+ if (NULL == jsonItem)
+ ereport(ERROR,
+ (errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
+ errmsg("Could not parse PXF item, expected not null value for attribute \"item\"")));
+
+ struct json_object *itemPath = json_object_object_get(jsonItem, "path");
+ if (NULL == itemPath)
+ ereport(ERROR,
+ (errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
+ errmsg("Could not parse PXF item, expected not null value for attribute \"path\"")));
+
+ struct json_object *itemName = json_object_object_get(jsonItem, "name");
+ if (NULL == itemName)
+ ereport(ERROR,
+ (errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
+ errmsg("Could not parse PXF item, expected not null value for attribute \"name\"")));
+
pxfItem->profile = profile;
- pxfItem->path = itemPath;
- pxfItem->name = itemName;
+ pxfItem->path = pstrdup(json_object_get_string(itemPath));
+ pxfItem->name = pstrdup(json_object_get_string(itemName));
elog(DEBUG1, "Parsed item %s, namespace %s", itemName, itemPath);
/* parse columns */
struct json_object *jsonFields = json_object_object_get(pxfMD, "fields");
+
+ if (NULL == jsonFields)
+ ereport(ERROR,
+ (errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
+ errmsg("Could not parse PXF item, expected not null value for attribute \"fields\"")));
+
const int numFields = json_object_array_length(jsonFields);
for (int i = 0; i < numFields; i++)
{
@@ -121,10 +146,32 @@ static PxfItem *ParsePxfItem(struct json_object *pxfMD, char* profile)
struct json_object *jsonCol = json_object_array_get_idx(jsonFields, i);
struct json_object *fieldName = json_object_object_get(jsonCol, "name");
+
+ if (NULL == fieldName)
+ ereport(ERROR,
+ (errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
+ errmsg("Could not parse PXF item, expected not null value for attribute \"name\"")));
+
pxfField->name = pstrdup(json_object_get_string(fieldName));
struct json_object *fieldType = json_object_object_get(jsonCol, "type");
+
+ if (NULL == fieldType)
+ ereport(ERROR,
+ (errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
+ errmsg("Could not parse PXF item, expected not null value for attribute \"type\"")));
+
pxfField->type = pstrdup(json_object_get_string(fieldType));
+
+ struct json_object *sourceFieldType = json_object_object_get(jsonCol, "sourceType");
+
+ if (NULL == sourceFieldType)
+ ereport(ERROR,
+ (errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
+ errmsg("Could not parse PXF item, expected not null value for attribute \"sourceType\"")));
+
+ pxfField->sourceType = pstrdup(json_object_get_string(sourceFieldType));
+
pxfField->nTypeModifiers = 0;
elog(DEBUG1, "Parsing field %s, type %s", pxfField->name, pxfField->type);
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/00f6074b/src/backend/utils/adt/pxf_functions.c
----------------------------------------------------------------------
diff --git a/src/backend/utils/adt/pxf_functions.c b/src/backend/utils/adt/pxf_functions.c
index ee19a8b..806565a 100644
--- a/src/backend/utils/adt/pxf_functions.c
+++ b/src/backend/utils/adt/pxf_functions.c
@@ -86,8 +86,8 @@ Datum pxf_get_item_fields(PG_FUNCTION_ARGS)
FuncCallContext *funcctx;
HeapTuple tuple;
Datum result;
- Datum values[4];
- bool nulls[4];
+ Datum values[5];
+ bool nulls[5];
ItemContext *item_context;
@@ -126,7 +126,7 @@ Datum pxf_get_item_fields(PG_FUNCTION_ARGS)
* build tupdesc for result tuples. This must match this function's
* pg_proc entry!
*/
- tupdesc = CreateTemplateTupleDesc(4, false);
+ tupdesc = CreateTemplateTupleDesc(5, false);
TupleDescInitEntry(tupdesc, (AttrNumber) 1, "path",
TEXTOID, -1, 0);
TupleDescInitEntry(tupdesc, (AttrNumber) 2, "itemname",
@@ -135,6 +135,8 @@ Datum pxf_get_item_fields(PG_FUNCTION_ARGS)
TEXTOID, -1, 0);
TupleDescInitEntry(tupdesc, (AttrNumber) 4, "fieldtype",
TEXTOID, -1, 0);
+ TupleDescInitEntry(tupdesc, (AttrNumber) 5, "sourcefieldtype",
+ TEXTOID, -1, 0);
funcctx->tuple_desc = BlessTupleDesc(tupdesc);
MemoryContextSwitchTo(oldcontext);
@@ -169,6 +171,7 @@ Datum pxf_get_item_fields(PG_FUNCTION_ARGS)
values[1] = CStringGetTextDatum(item->name);
values[2] = CStringGetTextDatum(field->name);
values[3] = CStringGetTextDatum(field->type);
+ values[4] = CStringGetTextDatum(field->sourceType);
tuple = heap_form_tuple(funcctx->tuple_desc, values, nulls);
result = HeapTupleGetDatum(tuple);
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/00f6074b/src/bin/psql/describe.c
----------------------------------------------------------------------
diff --git a/src/bin/psql/describe.c b/src/bin/psql/describe.c
index 00cfe33..85012b2 100644
--- a/src/bin/psql/describe.c
+++ b/src/bin/psql/describe.c
@@ -4263,8 +4263,12 @@ describePxfTable(const char *profile, const char *pattern, bool verbose)
printQueryOpt myopt = pset.popt;
printTableContent cont;
int cols = 0;
+ if (verbose)
+ cols = 3;
+ else
+ cols = 2;
int total_numrows = 0;
- char *headers[2];
+ char *headers[cols];
bool printTableInitialized = false;
char *previous_path = NULL;
@@ -4274,11 +4278,15 @@ describePxfTable(const char *profile, const char *pattern, bool verbose)
char *itemname;
char *fieldname;
char *fieldtype;
+ char *sourcefieldtype;
int total_fields = 0; //needed to know how much memory allocate for current table
initPQExpBuffer(&buf);
- printfPQExpBuffer(&buf, "SELECT t.*, COUNT() OVER(PARTITION BY path, itemname) as total_fields FROM\n"
+ printfPQExpBuffer(&buf, "SELECT t.path, t.itemname, t.fieldname, t.fieldtype,");
+ if (verbose)
+ appendPQExpBuffer(&buf, " sourcefieldtype, ");
+ appendPQExpBuffer(&buf,"COUNT() OVER(PARTITION BY path, itemname) as total_fields FROM\n"
"pxf_get_item_fields('%s', '%s') t\n", profile, pattern);
res = PSQLexec(buf.data, false);
@@ -4294,7 +4302,9 @@ describePxfTable(const char *profile, const char *pattern, bool verbose)
/* Header */
headers[0] = gettext_noop("Column");
headers[1] = gettext_noop("Type");
- cols = 2;
+ if (verbose)
+ headers[2] = gettext_noop("Source type");
+
for (int i = 0; i < total_numrows; i++)
{
@@ -4303,7 +4313,14 @@ describePxfTable(const char *profile, const char *pattern, bool verbose)
itemname = PQgetvalue(res, i, 1);
fieldname = PQgetvalue(res, i, 2);
fieldtype = PQgetvalue(res, i, 3);
- total_fields = PQgetvalue(res, i, 4);
+ if (verbose)
+ {
+ sourcefieldtype = PQgetvalue(res, i, 4);
+ total_fields = PQgetvalue(res, i, 5);
+ } else
+ {
+ total_fields = PQgetvalue(res, i, 4);
+ }
/* First row for current table */
if (previous_itemname == NULL
@@ -4342,6 +4359,12 @@ describePxfTable(const char *profile, const char *pattern, bool verbose)
/* Type */
printTableAddCell(&cont, fieldtype, false, false);
+ if (verbose)
+ {
+ /*Source type */
+ printTableAddCell(&cont, sourcefieldtype, false, false);
+ }
+
previous_path = path;
previous_itemname = itemname;
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/00f6074b/src/include/catalog/external/itemmd.h
----------------------------------------------------------------------
diff --git a/src/include/catalog/external/itemmd.h b/src/include/catalog/external/itemmd.h
index a841d63..e6dad63 100644
--- a/src/include/catalog/external/itemmd.h
+++ b/src/include/catalog/external/itemmd.h
@@ -41,6 +41,9 @@ typedef struct PxfField
/* type name */
char *type;
+ /*source type name */
+ char *sourceType;
+
/* type modifiers, e.g. max length or precision */
int typeModifiers[2];
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/00f6074b/src/include/catalog/pg_proc.h
----------------------------------------------------------------------
diff --git a/src/include/catalog/pg_proc.h b/src/include/catalog/pg_proc.h
index f3c5e77..e818909 100644
--- a/src/include/catalog/pg_proc.h
+++ b/src/include/catalog/pg_proc.h
@@ -10129,8 +10129,8 @@ DESCR("bitmap(internal)");
DATA(insert OID = 3011 ( bmoptions PGNSP PGUID 12 f f t f s 2 17 f "1009 16" _null_ _null_ _null_ bmoptions - _null_ n ));
DESCR("btree(internal)");
-/* pxf_get_item_fields(text, text, OUT text, OUT text, OUT text, OUT text) => SETOF pg_catalog.record */
-DATA(insert OID = 9996 ( pxf_get_item_fields PGNSP PGUID 12 f f t t v 2 2249 f "25 25" "{25,25,25,25,25,25}" "{i,i,o,o,o,o}" "{profile,pattern,path,itemname,fieldname,fieldtype}" pxf_get_item_fields - _null_ r ));
+/* pxf_get_item_fields(text, text, OUT text, OUT text, OUT text, OUT text, OUT text) => SETOF pg_catalog.record */
+DATA(insert OID = 9996 ( pxf_get_item_fields PGNSP PGUID 12 f f t t v 2 2249 f "25 25" "{25,25,25,25,25,25,25}" "{i,i,o,o,o,o,o}" "{profile,pattern,path,itemname,fieldname,fieldtype,sourcefieldtype}" pxf_get_item_fields - _null_ r ));
DESCR("Returns the metadata fields of external object from PXF");
/* raises deprecation error */
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/00f6074b/src/include/catalog/pg_proc.sql
----------------------------------------------------------------------
diff --git a/src/include/catalog/pg_proc.sql b/src/include/catalog/pg_proc.sql
index fc475e2..987b802 100644
--- a/src/include/catalog/pg_proc.sql
+++ b/src/include/catalog/pg_proc.sql
@@ -5348,7 +5348,7 @@
CREATE FUNCTION bmoptions(_text, bool) RETURNS bytea LANGUAGE internal STABLE STRICT AS 'bmoptions' WITH (OID=3011, DESCRIPTION="btree(internal)");
- CREATE FUNCTION pxf_get_item_fields(IN profile text, IN pattern text, OUT path text, OUT itemname text, OUT fieldname text, OUT fieldtype text) RETURNS SETOF pg_catalog.record LANGUAGE internal VOLATILE STRICT AS 'pxf_get_object_fields' WITH (OID=9996, DESCRIPTION="Returns the metadata fields of external object from PXF");
+ CREATE FUNCTION pxf_get_item_fields(IN profile text, IN pattern text, OUT path text, OUT itemname text, OUT fieldname text, OUT fieldtype text, OUT sourcefieldtype text) RETURNS SETOF pg_catalog.record LANGUAGE internal VOLATILE STRICT AS 'pxf_get_object_fields' WITH (OID=9996, DESCRIPTION="Returns the metadata fields of external object from PXF");
-- raises deprecation error
CREATE FUNCTION gp_deprecated() RETURNS void LANGUAGE internal IMMUTABLE AS 'gp_deprecated' WITH (OID=9997, DESCRIPTION="raises function deprecation error");
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/00f6074b/src/test/regress/data/hcatalog/invalid_numeric_range.json
----------------------------------------------------------------------
diff --git a/src/test/regress/data/hcatalog/invalid_numeric_range.json b/src/test/regress/data/hcatalog/invalid_numeric_range.json
index 1888777..41149eb 100644
--- a/src/test/regress/data/hcatalog/invalid_numeric_range.json
+++ b/src/test/regress/data/hcatalog/invalid_numeric_range.json
@@ -1 +1 @@
-{"PXFMetadata":[{"item":{"path":"default","name":"mytable2"},"fields":[{"name":"n5","type":"numeric","modifiers":["30","40"]}]}]}
+{"PXFMetadata":[{"item":{"path":"default","name":"mytable2"},"fields":[{"name":"n5","type":"numeric","modifiers":["30","40"],"sourceType":"decimal"}]}]}
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/00f6074b/src/test/regress/data/hcatalog/invalid_typemod_timestamp.json
----------------------------------------------------------------------
diff --git a/src/test/regress/data/hcatalog/invalid_typemod_timestamp.json b/src/test/regress/data/hcatalog/invalid_typemod_timestamp.json
index 8e3c570..66e9041 100644
--- a/src/test/regress/data/hcatalog/invalid_typemod_timestamp.json
+++ b/src/test/regress/data/hcatalog/invalid_typemod_timestamp.json
@@ -1 +1 @@
-{"PXFMetadata":[{"item":{"path":"default","name":"mytable3"},"fields":[{"name":"n5","type":"timestamp","modifiers":["30","40"]}]}]}
+{"PXFMetadata":[{"item":{"path":"default","name":"mytable3"},"fields":[{"name":"n5","type":"timestamp","modifiers":["30","40"],"sourceType":"decimal"}]}]}
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/00f6074b/src/test/regress/data/hcatalog/multi_table.json
----------------------------------------------------------------------
diff --git a/src/test/regress/data/hcatalog/multi_table.json b/src/test/regress/data/hcatalog/multi_table.json
index 5d5c0ff..82e70b0 100644
--- a/src/test/regress/data/hcatalog/multi_table.json
+++ b/src/test/regress/data/hcatalog/multi_table.json
@@ -1 +1 @@
-{"PXFMetadata":[{"item":{"path":"db1","name":"ht1"},"fields":[{"name":"c1","type":"bpchar","modifiers":["3"]},{"name":"vc2","type":"varchar","modifiers":["3"]}]},{"item":{"path":"db2","name":"ht1"},"fields":[{"name":"c1","type":"bpchar","modifiers":["3"]},{"name":"vc2","type":"varchar","modifiers":["3"]}]},{"item":{"path":"db2","name":"ht2"},"fields":[{"name":"c1","type":"bpchar","modifiers":["3"]},{"name":"vc2","type":"varchar","modifiers":["3"]}]}]}
+{"PXFMetadata":[{"item":{"path":"db1","name":"ht1"},"fields":[{"name":"c1","type":"bpchar","modifiers":["3"],"sourceType":"char"},{"name":"vc2","type":"varchar","modifiers":["3"],"sourceType":"varchar"}]},{"item":{"path":"db2","name":"ht1"},"fields":[{"name":"c1","type":"bpchar","modifiers":["3"],"sourceType":"char"},{"name":"vc2","type":"varchar","modifiers":["3"],"sourceType":"varchar"}]},{"item":{"path":"db2","name":"ht2"},"fields":[{"name":"c1","type":"bpchar","modifiers":["3"],"sourceType":"char"},{"name":"vc2","type":"varchar","modifiers":["3"],"sourceType":"varchar"}]}]}
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/00f6074b/src/test/regress/data/hcatalog/multi_table_duplicates.json
----------------------------------------------------------------------
diff --git a/src/test/regress/data/hcatalog/multi_table_duplicates.json b/src/test/regress/data/hcatalog/multi_table_duplicates.json
index 45db598..3c3acc0 100644
--- a/src/test/regress/data/hcatalog/multi_table_duplicates.json
+++ b/src/test/regress/data/hcatalog/multi_table_duplicates.json
@@ -1 +1 @@
-{"PXFMetadata":[{"item":{"path":"db","name":"t"},"fields":[{"name":"c1","type":"bpchar","modifiers":["3"]},{"name":"vc2","type":"varchar","modifiers":["3"]}]},{"item":{"path":"db","name":"t"},"fields":[{"name":"c1","type":"bpchar","modifiers":["3"]},{"name":"vc2","type":"varchar","modifiers":["3"]}]}]}
+{"PXFMetadata":[{"item":{"path":"db","name":"t"},"fields":[{"name":"c1","type":"bpchar","modifiers":["3"],"sourceType":"char"},{"name":"vc2","type":"varchar","modifiers":["3"],"sourceType":"varchar"}]},{"item":{"path":"db","name":"t"},"fields":[{"name":"c1","type":"bpchar","modifiers":["3"],"sourceType":"char"},{"name":"vc2","type":"varchar","modifiers":["3"],"sourceType": "varchar"}]}]}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/00f6074b/src/test/regress/data/hcatalog/null_field_name.json
----------------------------------------------------------------------
diff --git a/src/test/regress/data/hcatalog/null_field_name.json b/src/test/regress/data/hcatalog/null_field_name.json
new file mode 100644
index 0000000..e627865
--- /dev/null
+++ b/src/test/regress/data/hcatalog/null_field_name.json
@@ -0,0 +1 @@
+{"PXFMetadata":[{"item":{"path":"default","name":"mytable"},"fields":[{"name":"col1","type":"bytea","sourceType":"binary"}, {"type":"bytea","sourceType":"binary"}]}]}
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/00f6074b/src/test/regress/data/hcatalog/null_field_source_type.json
----------------------------------------------------------------------
diff --git a/src/test/regress/data/hcatalog/null_field_source_type.json b/src/test/regress/data/hcatalog/null_field_source_type.json
new file mode 100644
index 0000000..6d17592
--- /dev/null
+++ b/src/test/regress/data/hcatalog/null_field_source_type.json
@@ -0,0 +1 @@
+{"PXFMetadata":[{"item":{"path":"default","name":"mytable"},"fields":[{"name":"col1","type":"bytea","sourceType":"binary"}, {"name":"col2", "type":"bytea"}]}]}
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/00f6074b/src/test/regress/data/hcatalog/null_field_type.json
----------------------------------------------------------------------
diff --git a/src/test/regress/data/hcatalog/null_field_type.json b/src/test/regress/data/hcatalog/null_field_type.json
new file mode 100644
index 0000000..9dffd56
--- /dev/null
+++ b/src/test/regress/data/hcatalog/null_field_type.json
@@ -0,0 +1 @@
+{"PXFMetadata":[{"item":{"path":"default","name":"mytable"},"fields":[{"name":"col1","type":"bytea","sourceType":"binary"}, {"name":"col2","sourceType":"binary"}]}]}
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/00f6074b/src/test/regress/data/hcatalog/null_fields.json
----------------------------------------------------------------------
diff --git a/src/test/regress/data/hcatalog/null_fields.json b/src/test/regress/data/hcatalog/null_fields.json
new file mode 100644
index 0000000..a20a447
--- /dev/null
+++ b/src/test/regress/data/hcatalog/null_fields.json
@@ -0,0 +1 @@
+{"PXFMetadata":[{"item":{"name": "mytable", "path" : "default"}}]}
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/00f6074b/src/test/regress/data/hcatalog/null_item.json
----------------------------------------------------------------------
diff --git a/src/test/regress/data/hcatalog/null_item.json b/src/test/regress/data/hcatalog/null_item.json
new file mode 100644
index 0000000..b613cd8
--- /dev/null
+++ b/src/test/regress/data/hcatalog/null_item.json
@@ -0,0 +1 @@
+{"PXFMetadata":[{}]}
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/00f6074b/src/test/regress/data/hcatalog/null_item_name.json
----------------------------------------------------------------------
diff --git a/src/test/regress/data/hcatalog/null_item_name.json b/src/test/regress/data/hcatalog/null_item_name.json
new file mode 100644
index 0000000..0c580c9
--- /dev/null
+++ b/src/test/regress/data/hcatalog/null_item_name.json
@@ -0,0 +1 @@
+{"PXFMetadata":[{"item":{"path":"default"},"fields":[{"name":"s1","type":"text","sourceType":"string"}]}]}
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/00f6074b/src/test/regress/data/hcatalog/null_item_path.json
----------------------------------------------------------------------
diff --git a/src/test/regress/data/hcatalog/null_item_path.json b/src/test/regress/data/hcatalog/null_item_path.json
new file mode 100644
index 0000000..fdba035
--- /dev/null
+++ b/src/test/regress/data/hcatalog/null_item_path.json
@@ -0,0 +1 @@
+{"PXFMetadata":[{"item":{"name": "mytable"},"fields":[{"name":"s1","type":"text","sourceType":"string"}]}]}
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/00f6074b/src/test/regress/data/hcatalog/single_table.json
----------------------------------------------------------------------
diff --git a/src/test/regress/data/hcatalog/single_table.json b/src/test/regress/data/hcatalog/single_table.json
index 7df3427..b571e5d 100644
--- a/src/test/regress/data/hcatalog/single_table.json
+++ b/src/test/regress/data/hcatalog/single_table.json
@@ -1 +1 @@
-{"PXFMetadata":[{"item":{"path":"default","name":"mytable"},"fields":[{"name":"s1","type":"text"},{"name":"s2","type":"text"},{"name":"n1","type":"int4"},{"name":"d1","type":"float8"},{"name":"dc1","type":"numeric","modifiers":["38","18"]},{"name":"tm","type":"timestamp"},{"name":"f","type":"float4"},{"name":"bg","type":"int8"},{"name":"b","type":"bool"},{"name":"tn","type":"int2"},{"name":"sml","type":"int2"},{"name":"dt","type":"date"},{"name":"vc1","type":"varchar","modifiers":["5"]},{"name":"c1","type":"bpchar","modifiers":["3"]},{"name":"bin","type":"bytea"}]}]}
+{"PXFMetadata":[{"item":{"path":"default","name":"mytable"},"fields":[{"name":"s1","type":"text","sourceType":"string"},{"name":"s2","type":"text","sourceType":"string"},{"name":"n1","type":"int4","sourceType":"int"},{"name":"d1","type":"float8","sourceType":"double"},{"name":"dc1","type":"numeric","modifiers":["38","18"],"sourceType":"decimal"},{"name":"tm","type":"timestamp","sourceType":"timestamp"},{"name":"f","type":"float4","sourceType":"float"},{"name":"bg","type":"int8","sourceType":"bigint"},{"name":"b","type":"bool","sourceType":"boolean"},{"name":"tn","type":"int2","sourceType":"tinyint"},{"name":"sml","type":"int2","sourceType":"tinyint"},{"name":"dt","type":"date","sourceType":"date"},{"name":"vc1","type":"varchar","modifiers":["5"],"sourceType":"varchar"},{"name":"c1","type":"bpchar","modifiers":["3"],"sourceType":"char"},{"name":"bin","type":"bytea","sourceType":"binary"}]}]}
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/00f6074b/src/test/regress/input/json_load.source
----------------------------------------------------------------------
diff --git a/src/test/regress/input/json_load.source b/src/test/regress/input/json_load.source
index d0430d2..6dcef8a 100644
--- a/src/test/regress/input/json_load.source
+++ b/src/test/regress/input/json_load.source
@@ -91,6 +91,41 @@ BEGIN TRANSACTION;
SELECT load_json_data('@abs_builddir@/data/hcatalog/invalid_typemod_timestamp.json');
END TRANSACTION;
+-- negative test case: null "item" attribute
+BEGIN TRANSACTION;
+SELECT load_json_data('@abs_builddir@/data/hcatalog/null_item.json');
+END TRANSACTION;
+
+-- negative test case: null "name" attribute
+BEGIN TRANSACTION;
+SELECT load_json_data('@abs_builddir@/data/hcatalog/null_item_name.json');
+END TRANSACTION;
+
+-- negative test case: null "path" attribute
+BEGIN TRANSACTION;
+SELECT load_json_data('@abs_builddir@/data/hcatalog/null_item_path.json');
+END TRANSACTION;
+
+-- negative test case: null "fields" attribute
+BEGIN TRANSACTION;
+SELECT load_json_data('@abs_builddir@/data/hcatalog/null_fields.json');
+END TRANSACTION;
+
+-- negative test case: null "name" attribute
+BEGIN TRANSACTION;
+SELECT load_json_data('@abs_builddir@/data/hcatalog/null_field_name.json');
+END TRANSACTION;
+
+-- negative test case: null "type" attribute
+BEGIN TRANSACTION;
+SELECT load_json_data('@abs_builddir@/data/hcatalog/null_field_type.json');
+END TRANSACTION;
+
+-- negative test case: null "sourceType" attribute
+BEGIN TRANSACTION;
+SELECT load_json_data('@abs_builddir@/data/hcatalog/null_field_source_type.json');
+END TRANSACTION;
+
-- cleanup
drop table mytable_internal;
DROP FUNCTION load_json_data(filename text);
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/00f6074b/src/test/regress/output/hcatalog_lookup.source
----------------------------------------------------------------------
diff --git a/src/test/regress/output/hcatalog_lookup.source b/src/test/regress/output/hcatalog_lookup.source
index ac2a4f6..a79ac93 100644
--- a/src/test/regress/output/hcatalog_lookup.source
+++ b/src/test/regress/output/hcatalog_lookup.source
@@ -2,14 +2,14 @@
-- test hcatalog lookup
-- --------------------------------------
SELECT * FROM pxf_get_item_fields('Hive', '*');
- path | itemname | fieldname | fieldtype
-------+----------+-----------+-----------
+ path | itemname | fieldname | fieldtype | sourcefieldtype
+------+----------+-----------+-----------+-----------------
(0 rows)
\d hcatalog.*.*
SELECT * FROM pxf_get_item_fields('Hive', '*abc*abc*');
- path | itemname | fieldname | fieldtype
-------+----------+-----------+-----------
+ path | itemname | fieldname | fieldtype | sourcefieldtype
+------+----------+-----------+-----------+-----------------
(0 rows)
\d hcatalog.*abc*.*abc*
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/00f6074b/src/test/regress/output/json_load.source
----------------------------------------------------------------------
diff --git a/src/test/regress/output/json_load.source b/src/test/regress/output/json_load.source
index 3e7bea2..5bec43e 100644
--- a/src/test/regress/output/json_load.source
+++ b/src/test/regress/output/json_load.source
@@ -196,6 +196,41 @@ BEGIN TRANSACTION;
SELECT load_json_data('@abs_builddir@/data/hcatalog/invalid_typemod_timestamp.json');
ERROR: Invalid typemod for imported column n5
END TRANSACTION;
+-- negative test case: null "item" attribute
+BEGIN TRANSACTION;
+SELECT load_json_data('@abs_builddir@/data/hcatalog/null_item.json');
+ERROR: Could not parse PXF item, expected not null value for attribute "item"
+END TRANSACTION;
+-- negative test case: null "name" attribute
+BEGIN TRANSACTION;
+SELECT load_json_data('@abs_builddir@/data/hcatalog/null_item_name.json');
+ERROR: Could not parse PXF item, expected not null value for attribute "name"
+END TRANSACTION;
+-- negative test case: null "path" attribute
+BEGIN TRANSACTION;
+SELECT load_json_data('@abs_builddir@/data/hcatalog/null_item_path.json');
+ERROR: Could not parse PXF item, expected not null value for attribute "path"
+END TRANSACTION;
+-- negative test case: null "fields" attribute
+BEGIN TRANSACTION;
+SELECT load_json_data('@abs_builddir@/data/hcatalog/null_fields.json');
+ERROR: Could not parse PXF item, expected not null value for attribute "fields"
+END TRANSACTION;
+-- negative test case: null "name" attribute
+BEGIN TRANSACTION;
+SELECT load_json_data('@abs_builddir@/data/hcatalog/null_field_name.json');
+ERROR: Could not parse PXF item, expected not null value for attribute "name"
+END TRANSACTION;
+-- negative test case: null "type" attribute
+BEGIN TRANSACTION;
+SELECT load_json_data('@abs_builddir@/data/hcatalog/null_field_type.json');
+ERROR: Could not parse PXF item, expected not null value for attribute "type"
+END TRANSACTION;
+-- negative test case: null "sourceType" attribute
+BEGIN TRANSACTION;
+SELECT load_json_data('@abs_builddir@/data/hcatalog/null_field_source_type.json');
+ERROR: Could not parse PXF item, expected not null value for attribute "sourceType"
+END TRANSACTION;
-- cleanup
drop table mytable_internal;
DROP FUNCTION load_json_data(filename text);