You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by br...@apache.org on 2014/10/17 19:40:06 UTC
svn commit: r1632639 - in /hive/trunk:
contrib/src/java/org/apache/hadoop/hive/contrib/serde2/
hbase-handler/src/java/org/apache/hadoop/hive/hbase/
itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/
itests/test-serde/src/main/java/org/apa...
Author: brock
Date: Fri Oct 17 17:40:05 2014
New Revision: 1632639
URL: http://svn.apache.org/r1632639
Log:
HIVE-8450 - Create table like does not copy over table properties (Navis via Brock)
Added:
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDeSpec.java
Modified:
hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/MultiDelimitSerDe.java
hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/RegexSerDe.java
hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/TypedBytesSerDe.java
hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java
hive/trunk/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe1.java
hive/trunk/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe2.java
hive/trunk/itests/test-serde/src/main/java/org/apache/hadoop/hive/serde2/TestSerDe.java
hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSerde.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveSerDe.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
hive/trunk/ql/src/test/queries/clientpositive/create_like.q
hive/trunk/ql/src/test/results/clientpositive/create_like.q.out
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/AbstractSerDe.java
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/OpenCSVSerde.java
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDe.java
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/columnar/LazyBinaryColumnarSerDe.java
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySimpleSerDe.java
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
Modified: hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/MultiDelimitSerDe.java
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/MultiDelimitSerDe.java?rev=1632639&r1=1632638&r2=1632639&view=diff
==============================================================================
--- hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/MultiDelimitSerDe.java (original)
+++ hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/MultiDelimitSerDe.java Fri Oct 17 17:40:05 2014
@@ -55,6 +55,14 @@ import org.apache.hadoop.io.Writable;
* Currently field.delim can be multiple character while collection.delim
* and mapkey.delim should be just single character.
*/
+@SerDeSpec(schemaProps = {
+ serdeConstants.LIST_COLUMNS, serdeConstants.LIST_COLUMN_TYPES,
+ serdeConstants.FIELD_DELIM, serdeConstants.COLLECTION_DELIM, serdeConstants.MAPKEY_DELIM,
+ serdeConstants.SERIALIZATION_FORMAT, serdeConstants.SERIALIZATION_NULL_FORMAT,
+ serdeConstants.SERIALIZATION_LAST_COLUMN_TAKES_REST,
+ serdeConstants.ESCAPE_CHAR,
+ serdeConstants.SERIALIZATION_ENCODING,
+ LazySimpleSerDe.SERIALIZATION_EXTEND_NESTING_LEVELS})
public class MultiDelimitSerDe extends AbstractSerDe {
private static final Log LOG = LogFactory.getLog(MultiDelimitSerDe.class.getName());
private static final byte[] DEFAULT_SEPARATORS = {(byte) 1, (byte) 2, (byte) 3};
Modified: hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/RegexSerDe.java
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/RegexSerDe.java?rev=1632639&r1=1632638&r2=1632639&view=diff
==============================================================================
--- hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/RegexSerDe.java (original)
+++ hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/RegexSerDe.java Fri Oct 17 17:40:05 2014
@@ -31,6 +31,7 @@ import org.apache.hadoop.conf.Configurat
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.AbstractSerDe;
import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.SerDeSpec;
import org.apache.hadoop.hive.serde2.SerDeStats;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
@@ -70,10 +71,18 @@ import org.apache.hadoop.io.Writable;
* writableStringObjectInspector. We should switch to that when we have a UTF-8
* based Regex library.
*/
+@SerDeSpec(schemaProps = {
+ serdeConstants.LIST_COLUMNS, serdeConstants.LIST_COLUMN_TYPES,
+ RegexSerDe.INPUT_REGEX, RegexSerDe.OUTPUT_FORMAT_STRING,
+ RegexSerDe.INPUT_REGEX_CASE_SENSITIVE })
public class RegexSerDe extends AbstractSerDe {
public static final Log LOG = LogFactory.getLog(RegexSerDe.class.getName());
+ public static final String INPUT_REGEX = "input.regex";
+ public static final String OUTPUT_FORMAT_STRING = "output.format.string";
+ public static final String INPUT_REGEX_CASE_SENSITIVE = "input.regex.case.insensitive";
+
int numColumns;
String inputRegex;
String outputFormatString;
@@ -90,12 +99,12 @@ public class RegexSerDe extends Abstract
// We can get the table definition from tbl.
// Read the configuration parameters
- inputRegex = tbl.getProperty("input.regex");
- outputFormatString = tbl.getProperty("output.format.string");
+ inputRegex = tbl.getProperty(INPUT_REGEX);
+ outputFormatString = tbl.getProperty(OUTPUT_FORMAT_STRING);
String columnNameProperty = tbl.getProperty(serdeConstants.LIST_COLUMNS);
String columnTypeProperty = tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES);
boolean inputRegexIgnoreCase = "true".equalsIgnoreCase(tbl
- .getProperty("input.regex.case.insensitive"));
+ .getProperty(INPUT_REGEX_CASE_SENSITIVE));
// Parse the configuration parameters
if (inputRegex != null) {
@@ -258,6 +267,7 @@ public class RegexSerDe extends Abstract
return outputRowText;
}
+ @Override
public SerDeStats getSerDeStats() {
// no support for statistics
return null;
Modified: hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/TypedBytesSerDe.java
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/TypedBytesSerDe.java?rev=1632639&r1=1632638&r2=1632639&view=diff
==============================================================================
--- hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/TypedBytesSerDe.java (original)
+++ hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/TypedBytesSerDe.java Fri Oct 17 17:40:05 2014
@@ -34,6 +34,7 @@ import org.apache.hadoop.hive.ql.io.NonS
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.AbstractSerDe;
import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.SerDeSpec;
import org.apache.hadoop.hive.serde2.SerDeStats;
import org.apache.hadoop.hive.serde2.SerDeUtils;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
@@ -73,6 +74,7 @@ import org.apache.hadoop.io.Writable;
* this, which is apparently 25% faster than the python version is available at
* http://github.com/klbostee/ctypedbytes/tree/master
*/
+@SerDeSpec(schemaProps = {serdeConstants.LIST_COLUMNS, serdeConstants.LIST_COLUMN_TYPES})
public class TypedBytesSerDe extends AbstractSerDe {
public static final Log LOG = LogFactory.getLog(TypedBytesSerDe.class
Modified: hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java
URL: http://svn.apache.org/viewvc/hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java?rev=1632639&r1=1632638&r2=1632639&view=diff
==============================================================================
--- hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java (original)
+++ hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java Fri Oct 17 17:40:05 2014
@@ -28,9 +28,11 @@ import org.apache.hadoop.conf.Configurat
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hive.hbase.ColumnMappings.ColumnMapping;
import org.apache.hadoop.hive.ql.plan.TableDesc;
+import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.AbstractSerDe;
import org.apache.hadoop.hive.serde2.SerDe;
import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.SerDeSpec;
import org.apache.hadoop.hive.serde2.SerDeStats;
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector;
@@ -42,6 +44,27 @@ import org.apache.hadoop.mapred.JobConf;
* HBaseSerDe can be used to serialize object into an HBase table and
* deserialize objects from an HBase table.
*/
+@SerDeSpec(schemaProps = {
+ serdeConstants.LIST_COLUMNS, serdeConstants.LIST_COLUMN_TYPES,
+ serdeConstants.FIELD_DELIM, serdeConstants.COLLECTION_DELIM, serdeConstants.MAPKEY_DELIM,
+ serdeConstants.SERIALIZATION_FORMAT, serdeConstants.SERIALIZATION_NULL_FORMAT,
+ serdeConstants.SERIALIZATION_LAST_COLUMN_TAKES_REST,
+ serdeConstants.ESCAPE_CHAR,
+ serdeConstants.SERIALIZATION_ENCODING,
+ LazySimpleSerDe.SERIALIZATION_EXTEND_NESTING_LEVELS,
+ HBaseSerDe.HBASE_COLUMNS_MAPPING,
+ HBaseSerDe.HBASE_TABLE_NAME,
+ HBaseSerDe.HBASE_TABLE_DEFAULT_STORAGE_TYPE,
+ HBaseSerDe.HBASE_KEY_COL,
+ HBaseSerDe.HBASE_PUT_TIMESTAMP,
+ HBaseSerDe.HBASE_COMPOSITE_KEY_CLASS,
+ HBaseSerDe.HBASE_COMPOSITE_KEY_TYPES,
+ HBaseSerDe.HBASE_COMPOSITE_KEY_FACTORY,
+ HBaseSerDe.HBASE_STRUCT_SERIALIZER_CLASS,
+ HBaseSerDe.HBASE_SCAN_CACHE,
+ HBaseSerDe.HBASE_SCAN_CACHEBLOCKS,
+ HBaseSerDe.HBASE_SCAN_BATCH,
+ HBaseSerDe.HBASE_AUTOGENERATE_STRUCT})
public class HBaseSerDe extends AbstractSerDe {
public static final Log LOG = LogFactory.getLog(HBaseSerDe.class);
Modified: hive/trunk/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe1.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe1.java?rev=1632639&r1=1632638&r2=1632639&view=diff
==============================================================================
--- hive/trunk/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe1.java (original)
+++ hive/trunk/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe1.java Fri Oct 17 17:40:05 2014
@@ -34,6 +34,7 @@ import org.apache.hadoop.hive.serde2.typ
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
+@SerDeSpec(schemaProps = {serdeConstants.LIST_COLUMNS, serdeConstants.LIST_COLUMN_TYPES})
public class CustomSerDe1 extends AbstractSerDe {
int numColumns;
Modified: hive/trunk/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe2.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe2.java?rev=1632639&r1=1632638&r2=1632639&view=diff
==============================================================================
--- hive/trunk/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe2.java (original)
+++ hive/trunk/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe2.java Fri Oct 17 17:40:05 2014
@@ -35,6 +35,7 @@ import org.apache.hadoop.hive.serde2.typ
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
+@SerDeSpec(schemaProps = {serdeConstants.LIST_COLUMNS, serdeConstants.LIST_COLUMN_TYPES})
public class CustomSerDe2 extends AbstractSerDe {
int numColumns;
Modified: hive/trunk/itests/test-serde/src/main/java/org/apache/hadoop/hive/serde2/TestSerDe.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/test-serde/src/main/java/org/apache/hadoop/hive/serde2/TestSerDe.java?rev=1632639&r1=1632638&r2=1632639&view=diff
==============================================================================
--- hive/trunk/itests/test-serde/src/main/java/org/apache/hadoop/hive/serde2/TestSerDe.java (original)
+++ hive/trunk/itests/test-serde/src/main/java/org/apache/hadoop/hive/serde2/TestSerDe.java Fri Oct 17 17:40:05 2014
@@ -27,6 +27,7 @@ import java.util.Properties;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.objectinspector.MetadataListStructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
@@ -44,10 +45,17 @@ import com.google.common.collect.Lists;
* TestSerDe.
*
*/
+@SerDeSpec(schemaProps = {
+ serdeConstants.LIST_COLUMNS, serdeConstants.LIST_COLUMN_TYPES,
+ TestSerDe.COLUMNS, TestSerDe.COLUMNS_COMMENTS, TestSerDe.DEFAULT_SERIALIZATION_FORMAT})
public class TestSerDe extends AbstractSerDe {
public static final Log LOG = LogFactory.getLog(TestSerDe.class.getName());
+ public static final String COLUMNS = "columns";
+ public static final String COLUMNS_COMMENTS = "columns.comments";
+ public static final String DEFAULT_SERIALIZATION_FORMAT = "testserde.default.serialization.format";
+
public String getShortName() {
return shortName();
}
@@ -76,7 +84,7 @@ public class TestSerDe extends AbstractS
@Override
public void initialize(Configuration job, Properties tbl) throws SerDeException {
separator = DefaultSeparator;
- String altSep = tbl.getProperty("testserde.default.serialization.format");
+ String altSep = tbl.getProperty(DEFAULT_SERIALIZATION_FORMAT);
if (altSep != null && altSep.length() > 0) {
try {
byte[] b = new byte[1];
@@ -87,7 +95,7 @@ public class TestSerDe extends AbstractS
}
}
- String columnProperty = tbl.getProperty("columns");
+ String columnProperty = tbl.getProperty(COLUMNS);
if (columnProperty == null || columnProperty.length() == 0) {
// Hack for tables with no columns
// Treat it as a table with a single column called "col"
@@ -97,7 +105,7 @@ public class TestSerDe extends AbstractS
} else {
columnNames = Arrays.asList(columnProperty.split(","));
cachedObjectInspector = MetadataListStructObjectInspector
- .getInstance(columnNames,Lists.newArrayList(Splitter.on('\0').split(tbl.getProperty("columns.comments"))));
+ .getInstance(columnNames,Lists.newArrayList(Splitter.on('\0').split(tbl.getProperty(COLUMNS_COMMENTS))));
}
LOG.info(getClass().getName() + ": initialized with columnNames: "
+ columnNames);
Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java?rev=1632639&r1=1632638&r2=1632639&view=diff
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java (original)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java Fri Oct 17 17:40:05 2014
@@ -382,6 +382,12 @@ public class MetaStoreUtils {
}
}
+ public static Class<? extends Deserializer> getDeserializerClass(
+ Configuration conf, org.apache.hadoop.hive.metastore.api.Table table) throws Exception {
+ String lib = table.getSd().getSerdeInfo().getSerializationLib();
+ return lib == null ? null : conf.getClassByName(lib).asSubclass(Deserializer.class);
+ }
+
/**
* getDeserializer
*
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java?rev=1632639&r1=1632638&r2=1632639&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java Fri Oct 17 17:40:05 2014
@@ -156,6 +156,7 @@ import org.apache.hadoop.hive.serde.serd
import org.apache.hadoop.hive.serde2.AbstractSerDe;
import org.apache.hadoop.hive.serde2.Deserializer;
import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe;
+import org.apache.hadoop.hive.serde2.SerDeSpec;
import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe;
import org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe;
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
@@ -190,6 +191,7 @@ import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
+import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
@@ -3983,7 +3985,7 @@ public class DDLTask extends Task<DDLWor
* @throws HiveException
* Throws this exception if an unexpected error occurs.
*/
- private int createTableLike(Hive db, CreateTableLikeDesc crtTbl) throws HiveException {
+ private int createTableLike(Hive db, CreateTableLikeDesc crtTbl) throws Exception {
// Get the existing table
Table oldtbl = db.getTable(crtTbl.getLikeTableName());
Table tbl;
@@ -4049,12 +4051,22 @@ public class DDLTask extends Task<DDLWor
tbl.unsetDataLocation();
}
+ Class<? extends Deserializer> serdeClass = oldtbl.getDeserializerClass();
+
Map<String, String> params = tbl.getParameters();
// We should copy only those table parameters that are specified in the config.
+ SerDeSpec spec = AnnotationUtils.getAnnotation(serdeClass, SerDeSpec.class);
String paramsStr = HiveConf.getVar(conf, HiveConf.ConfVars.DDL_CTL_PARAMETERS_WHITELIST);
+
+ Set<String> retainer = new HashSet<String>();
+ if (spec != null && spec.schemaProps() != null) {
+ retainer.addAll(Arrays.asList(spec.schemaProps()));
+ }
if (paramsStr != null) {
- List<String> paramsList = Arrays.asList(paramsStr.split(","));
- params.keySet().retainAll(paramsList);
+ retainer.addAll(Arrays.asList(paramsStr.split(",")));
+ }
+ if (!retainer.isEmpty()) {
+ params.keySet().retainAll(retainer);
} else {
params.clear();
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSerde.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSerde.java?rev=1632639&r1=1632638&r2=1632639&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSerde.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSerde.java Fri Oct 17 17:40:05 2014
@@ -27,8 +27,10 @@ import org.apache.commons.logging.LogFac
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedSerde;
+import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.SerDe;
import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.SerDeSpec;
import org.apache.hadoop.hive.serde2.SerDeStats;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
@@ -40,6 +42,7 @@ import org.apache.hadoop.io.Writable;
* A serde class for ORC.
* It transparently passes the object to/from the ORC file reader/writer.
*/
+@SerDeSpec(schemaProps = {serdeConstants.LIST_COLUMNS, serdeConstants.LIST_COLUMN_TYPES})
public class OrcSerde implements SerDe, VectorizedSerde {
private static final Log LOG = LogFactory.getLog(OrcSerde.class);
@@ -75,9 +78,9 @@ public class OrcSerde implements SerDe,
@Override
public void initialize(Configuration conf, Properties table) {
// Read the configuration parameters
- String columnNameProperty = table.getProperty("columns");
+ String columnNameProperty = table.getProperty(serdeConstants.LIST_COLUMNS);
// NOTE: if "columns.types" is missing, all columns will be of String type
- String columnTypeProperty = table.getProperty("columns.types");
+ String columnTypeProperty = table.getProperty(serdeConstants.LIST_COLUMN_TYPES);
// Parse the configuration parameters
ArrayList<String> columnNames = new ArrayList<String>();
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveSerDe.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveSerDe.java?rev=1632639&r1=1632638&r2=1632639&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveSerDe.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveSerDe.java Fri Oct 17 17:40:05 2014
@@ -23,9 +23,10 @@ import java.util.Properties;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.ql.io.IOConstants;
+import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.AbstractSerDe;
import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.SerDeSpec;
import org.apache.hadoop.hive.serde2.SerDeStats;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
@@ -69,6 +70,7 @@ import parquet.io.api.Binary;
* A ParquetHiveSerDe for Hive (with the deprecated package mapred)
*
*/
+@SerDeSpec(schemaProps = {serdeConstants.LIST_COLUMNS, serdeConstants.LIST_COLUMN_TYPES})
public class ParquetHiveSerDe extends AbstractSerDe {
public static final Text MAP_KEY = new Text("key");
public static final Text MAP_VALUE = new Text("value");
@@ -105,8 +107,8 @@ public class ParquetHiveSerDe extends Ab
final List<String> columnNames;
final List<TypeInfo> columnTypes;
// Get column names and sort order
- final String columnNameProperty = tbl.getProperty(IOConstants.COLUMNS);
- final String columnTypeProperty = tbl.getProperty(IOConstants.COLUMNS_TYPES);
+ final String columnNameProperty = tbl.getProperty(serdeConstants.LIST_COLUMNS);
+ final String columnTypeProperty = tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES);
if (columnNameProperty.length() == 0) {
columnNames = new ArrayList<String>();
@@ -242,7 +244,7 @@ public class ParquetHiveSerDe extends Ab
case BOOLEAN:
return new BooleanWritable(((BooleanObjectInspector) inspector).get(obj) ? Boolean.TRUE : Boolean.FALSE);
case BYTE:
- return new ByteWritable((byte) ((ByteObjectInspector) inspector).get(obj));
+ return new ByteWritable(((ByteObjectInspector) inspector).get(obj));
case DOUBLE:
return new DoubleWritable(((DoubleObjectInspector) inspector).get(obj));
case FLOAT:
@@ -252,7 +254,7 @@ public class ParquetHiveSerDe extends Ab
case LONG:
return new LongWritable(((LongObjectInspector) inspector).get(obj));
case SHORT:
- return new ShortWritable((short) ((ShortObjectInspector) inspector).get(obj));
+ return new ShortWritable(((ShortObjectInspector) inspector).get(obj));
case STRING:
String v = ((StringObjectInspector) inspector).getPrimitiveJavaObject(obj);
try {
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java?rev=1632639&r1=1632638&r2=1632639&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java Fri Oct 17 17:40:05 2014
@@ -34,7 +34,6 @@ import org.apache.commons.logging.LogFac
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.common.JavaUtils;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.metastore.MetaStoreUtils;
@@ -258,6 +257,10 @@ public class Table implements Serializab
return deserializer;
}
+ final public Class<? extends Deserializer> getDeserializerClass() throws Exception {
+ return MetaStoreUtils.getDeserializerClass(Hive.get().getConf(), tTable);
+ }
+
final public Deserializer getDeserializer(boolean skipConfError) {
if (deserializer == null) {
deserializer = getDeserializerFromMetaStore(skipConfError);
Modified: hive/trunk/ql/src/test/queries/clientpositive/create_like.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/create_like.q?rev=1632639&r1=1632638&r2=1632639&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/create_like.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/create_like.q Fri Oct 17 17:40:05 2014
@@ -33,3 +33,32 @@ DROP TABLE table4;
CREATE EXTERNAL TABLE table4 (a INT) LOCATION '${system:hive.root}/data/files/ext_test';
SELECT * FROM table4;
+
+CREATE TABLE doctors STORED AS AVRO TBLPROPERTIES ('avro.schema.literal'='{
+ "namespace": "testing.hive.avro.serde",
+ "name": "doctors",
+ "type": "record",
+ "fields": [
+ {
+ "name":"number",
+ "type":"int",
+ "doc":"Order of playing the role"
+ },
+ {
+ "name":"first_name",
+ "type":"string",
+ "doc":"first name of actor playing role"
+ },
+ {
+ "name":"last_name",
+ "type":"string",
+ "doc":"last name of actor playing role"
+ }
+ ]
+}');
+
+alter table doctors set tblproperties ('k1'='v1', 'k2'='v2');
+DESCRIBE FORMATTED doctors;
+
+CREATE TABLE doctors2 like doctors;
+DESCRIBE FORMATTED doctors2;
Modified: hive/trunk/ql/src/test/results/clientpositive/create_like.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/create_like.q.out?rev=1632639&r1=1632638&r2=1632639&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/create_like.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/create_like.q.out Fri Oct 17 17:40:05 2014
@@ -265,3 +265,143 @@ POSTHOOK: Input: default@table4
4
5
6
+PREHOOK: query: CREATE TABLE doctors STORED AS AVRO TBLPROPERTIES ('avro.schema.literal'='{
+ "namespace": "testing.hive.avro.serde",
+ "name": "doctors",
+ "type": "record",
+ "fields": [
+ {
+ "name":"number",
+ "type":"int",
+ "doc":"Order of playing the role"
+ },
+ {
+ "name":"first_name",
+ "type":"string",
+ "doc":"first name of actor playing role"
+ },
+ {
+ "name":"last_name",
+ "type":"string",
+ "doc":"last name of actor playing role"
+ }
+ ]
+}')
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@doctors
+POSTHOOK: query: CREATE TABLE doctors STORED AS AVRO TBLPROPERTIES ('avro.schema.literal'='{
+ "namespace": "testing.hive.avro.serde",
+ "name": "doctors",
+ "type": "record",
+ "fields": [
+ {
+ "name":"number",
+ "type":"int",
+ "doc":"Order of playing the role"
+ },
+ {
+ "name":"first_name",
+ "type":"string",
+ "doc":"first name of actor playing role"
+ },
+ {
+ "name":"last_name",
+ "type":"string",
+ "doc":"last name of actor playing role"
+ }
+ ]
+}')
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@doctors
+PREHOOK: query: alter table doctors set tblproperties ('k1'='v1', 'k2'='v2')
+PREHOOK: type: ALTERTABLE_PROPERTIES
+PREHOOK: Input: default@doctors
+PREHOOK: Output: default@doctors
+POSTHOOK: query: alter table doctors set tblproperties ('k1'='v1', 'k2'='v2')
+POSTHOOK: type: ALTERTABLE_PROPERTIES
+POSTHOOK: Input: default@doctors
+POSTHOOK: Output: default@doctors
+PREHOOK: query: DESCRIBE FORMATTED doctors
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@doctors
+POSTHOOK: query: DESCRIBE FORMATTED doctors
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@doctors
+# col_name data_type comment
+
+number int from deserializer
+first_name string from deserializer
+last_name string from deserializer
+
+# Detailed Table Information
+Database: default
+#### A masked pattern was here ####
+Protect Mode: None
+Retention: 0
+#### A masked pattern was here ####
+Table Type: MANAGED_TABLE
+Table Parameters:
+ COLUMN_STATS_ACCURATE false
+ avro.schema.literal {\n \"namespace\": \"testing.hive.avro.serde\",\n \"name\": \"doctors\",\n \"type\": \"record\",\n \"fields\": [\n {\n \"name\":\"number\",\n \"type\":\"int\",\n \"doc\":\"Order of playing the role\"\n },\n {\n \"name\":\"first_name\",\n \"type\":\"string\",\n \"doc\":\"first name of actor playing role\"\n },\n {\n \"name\":\"last_name\",\n \"type\":\"string\",\n \"doc\":\"last name of actor playing role\"\n }\n ]\n}
+ k1 v1
+ k2 v2
+#### A masked pattern was here ####
+ numFiles 0
+ numRows -1
+ rawDataSize -1
+ totalSize 0
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.avro.AvroSerDe
+InputFormat: org.apache.hadoop.hive.ql.io.avro.AvroContainerInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.avro.AvroContainerOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
+PREHOOK: query: CREATE TABLE doctors2 like doctors
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@doctors2
+POSTHOOK: query: CREATE TABLE doctors2 like doctors
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@doctors2
+PREHOOK: query: DESCRIBE FORMATTED doctors2
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@doctors2
+POSTHOOK: query: DESCRIBE FORMATTED doctors2
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@doctors2
+# col_name data_type comment
+
+number int from deserializer
+first_name string from deserializer
+last_name string from deserializer
+
+# Detailed Table Information
+Database: default
+#### A masked pattern was here ####
+Protect Mode: None
+Retention: 0
+#### A masked pattern was here ####
+Table Type: MANAGED_TABLE
+Table Parameters:
+ avro.schema.literal {\n \"namespace\": \"testing.hive.avro.serde\",\n \"name\": \"doctors\",\n \"type\": \"record\",\n \"fields\": [\n {\n \"name\":\"number\",\n \"type\":\"int\",\n \"doc\":\"Order of playing the role\"\n },\n {\n \"name\":\"first_name\",\n \"type\":\"string\",\n \"doc\":\"first name of actor playing role\"\n },\n {\n \"name\":\"last_name\",\n \"type\":\"string\",\n \"doc\":\"last name of actor playing role\"\n }\n ]\n}
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.avro.AvroSerDe
+InputFormat: org.apache.hadoop.hive.ql.io.avro.AvroContainerInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.avro.AvroContainerOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/AbstractSerDe.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/AbstractSerDe.java?rev=1632639&r1=1632638&r2=1632639&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/AbstractSerDe.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/AbstractSerDe.java Fri Oct 17 17:40:05 2014
@@ -113,10 +113,6 @@ public abstract class AbstractSerDe impl
* @return The error messages in the configuration which are empty if no error occurred
*/
public String getConfigurationErrors() {
- if (configErrors == null || configErrors.isEmpty()) {
- return "";
- } else {
- return configErrors;
- }
+ return configErrors == null ? "" : configErrors;
}
}
Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java?rev=1632639&r1=1632638&r2=1632639&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java Fri Oct 17 17:40:05 2014
@@ -42,6 +42,11 @@ import org.apache.hadoop.io.Writable;
* MetadataTypedColumnsetSerDe.
*
*/
+@SerDeSpec(schemaProps = {
+ serdeConstants.SERIALIZATION_FORMAT,
+ serdeConstants.SERIALIZATION_NULL_FORMAT,
+ serdeConstants.SERIALIZATION_LIB,
+ serdeConstants.SERIALIZATION_LAST_COLUMN_TAKES_REST })
public class MetadataTypedColumnsetSerDe extends AbstractSerDe {
public static final Log LOG = LogFactory
Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/OpenCSVSerde.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/OpenCSVSerde.java?rev=1632639&r1=1632638&r2=1632639&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/OpenCSVSerde.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/OpenCSVSerde.java Fri Oct 17 17:40:05 2014
@@ -49,6 +49,9 @@ import au.com.bytecode.opencsv.CSVWriter
* quote("), and escape characters(\) are the same as the opencsv library.
*
*/
+@SerDeSpec(schemaProps = {
+ serdeConstants.LIST_COLUMNS,
+ OpenCSVSerde.SEPARATORCHAR, OpenCSVSerde.QUOTECHAR, OpenCSVSerde.ESCAPECHAR})
public final class OpenCSVSerde extends AbstractSerDe {
public static final Log LOG = LogFactory.getLog(OpenCSVSerde.class.getName());
@@ -69,7 +72,7 @@ public final class OpenCSVSerde extends
public void initialize(final Configuration conf, final Properties tbl) throws SerDeException {
final List<String> columnNames = Arrays.asList(tbl.getProperty(serdeConstants.LIST_COLUMNS)
- .split(","));
+ .split(","));
numCols = columnNames.size();
Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java?rev=1632639&r1=1632638&r2=1632639&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java Fri Oct 17 17:40:05 2014
@@ -39,7 +39,6 @@ import org.apache.hadoop.hive.serde2.obj
import org.apache.hadoop.hive.serde2.objectinspector.primitive.AbstractPrimitiveJavaObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
@@ -70,10 +69,16 @@ import com.google.common.collect.Lists;
* writableStringObjectInspector. We should switch to that when we have a UTF-8
* based Regex library.
*/
+@SerDeSpec(schemaProps = {
+ serdeConstants.LIST_COLUMNS, serdeConstants.LIST_COLUMN_TYPES,
+ RegexSerDe.INPUT_REGEX, RegexSerDe.INPUT_REGEX_CASE_SENSITIVE })
public class RegexSerDe extends AbstractSerDe {
public static final Log LOG = LogFactory.getLog(RegexSerDe.class.getName());
+ public static final String INPUT_REGEX = "input.regex";
+ public static final String INPUT_REGEX_CASE_SENSITIVE = "input.regex.case.insensitive";
+
int numColumns;
String inputRegex;
@@ -95,11 +100,11 @@ public class RegexSerDe extends Abstract
// We can get the table definition from tbl.
// Read the configuration parameters
- inputRegex = tbl.getProperty("input.regex");
+ inputRegex = tbl.getProperty(INPUT_REGEX);
String columnNameProperty = tbl.getProperty(serdeConstants.LIST_COLUMNS);
String columnTypeProperty = tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES);
boolean inputRegexIgnoreCase = "true".equalsIgnoreCase(tbl
- .getProperty("input.regex.case.insensitive"));
+ .getProperty(INPUT_REGEX_CASE_SENSITIVE));
// output format string is not supported anymore, warn user of deprecation
if (null != tbl.getProperty("output.format.string")) {
Added: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDeSpec.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDeSpec.java?rev=1632639&view=auto
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDeSpec.java (added)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDeSpec.java Fri Oct 17 17:40:05 2014
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.serde2;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Inherited;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+@Target(ElementType.TYPE)
+@Retention(RetentionPolicy.RUNTIME)
+@Inherited
+public @interface SerDeSpec {
+ // property names needed to keep internal structure of serde
+ String[] schemaProps();
+}
Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java?rev=1632639&r1=1632638&r2=1632639&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java Fri Oct 17 17:40:05 2014
@@ -26,8 +26,10 @@ import org.apache.avro.Schema;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.AbstractSerDe;
import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.SerDeSpec;
import org.apache.hadoop.hive.serde2.SerDeStats;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
@@ -37,9 +39,18 @@ import org.apache.hadoop.io.Writable;
/**
* Read or write Avro data from Hive.
*/
+@SerDeSpec(schemaProps = {
+ serdeConstants.LIST_COLUMNS, serdeConstants.LIST_COLUMN_TYPES,
+ AvroSerDe.LIST_COLUMN_COMMENTS, AvroSerDe.TABLE_NAME, AvroSerDe.TABLE_COMMENT,
+ AvroSerdeUtils.SCHEMA_LITERAL, AvroSerdeUtils.SCHEMA_URL,
+ AvroSerdeUtils.SCHEMA_NAMESPACE, AvroSerdeUtils.SCHEMA_NAME, AvroSerdeUtils.SCHEMA_DOC})
public class AvroSerDe extends AbstractSerDe {
private static final Log LOG = LogFactory.getLog(AvroSerDe.class);
+ public static final String TABLE_NAME = "name";
+ public static final String TABLE_COMMENT = "comment";
+ public static final String LIST_COLUMN_COMMENTS = "columns.comments";
+
public static final String DECIMAL_TYPE_NAME = "decimal";
public static final String CHAR_TYPE_NAME = "char";
public static final String VARCHAR_TYPE_NAME = "varchar";
@@ -59,8 +70,6 @@ public class AvroSerDe extends AbstractS
private AvroSerializer avroSerializer = null;
private boolean badSchema = false;
- private static String TABLE_NAME = "name";
- private static String TABLE_COMMENT = "comment";
@Override
public void initialize(Configuration configuration, Properties tableProperties,
@@ -81,9 +90,9 @@ public class AvroSerDe extends AbstractS
columnNames = null;
columnTypes = null;
- final String columnNameProperty = properties.getProperty("columns");
- final String columnTypeProperty = properties.getProperty("columns.types");
- final String columnCommentProperty = properties.getProperty("columns.comments");
+ final String columnNameProperty = properties.getProperty(serdeConstants.LIST_COLUMNS);
+ final String columnTypeProperty = properties.getProperty(serdeConstants.LIST_COLUMN_TYPES);
+ final String columnCommentProperty = properties.getProperty(LIST_COLUMN_COMMENTS);
if (properties.getProperty(AvroSerdeUtils.SCHEMA_LITERAL) != null
|| properties.getProperty(AvroSerdeUtils.SCHEMA_URL) != null
Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java?rev=1632639&r1=1632638&r2=1632639&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java Fri Oct 17 17:40:05 2014
@@ -38,6 +38,7 @@ import org.apache.hadoop.hive.serde2.Byt
import org.apache.hadoop.hive.serde2.ByteStream.Output;
import org.apache.hadoop.hive.serde2.ByteStream.RandomAccessOutput;
import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.SerDeSpec;
import org.apache.hadoop.hive.serde2.SerDeStats;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
import org.apache.hadoop.hive.serde2.io.DateWritable;
@@ -78,7 +79,6 @@ import org.apache.hadoop.hive.serde2.typ
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
import org.apache.hadoop.io.BooleanWritable;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.FloatWritable;
@@ -110,6 +110,9 @@ import org.apache.hadoop.io.Writable;
* fields in the same top-level field will have the same sort order.
*
*/
+@SerDeSpec(schemaProps = {
+ serdeConstants.LIST_COLUMNS, serdeConstants.LIST_COLUMN_TYPES,
+ serdeConstants.SERIALIZATION_SORT_ORDER})
public class BinarySortableSerDe extends AbstractSerDe {
public static final Log LOG = LogFactory.getLog(BinarySortableSerDe.class.getName());
Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDe.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDe.java?rev=1632639&r1=1632638&r2=1632639&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDe.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDe.java Fri Oct 17 17:40:05 2014
@@ -27,9 +27,11 @@ import java.util.Properties;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
import org.apache.hadoop.hive.serde2.SerDe;
import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.SerDeSpec;
import org.apache.hadoop.hive.serde2.SerDeUtils;
import org.apache.hadoop.hive.serde2.lazy.LazyFactory;
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
@@ -50,6 +52,14 @@ import org.apache.hadoop.io.Writable;
* (2) ColumnarSerDe initialize ColumnarStruct's field directly. But under the
* field level, it works like LazySimpleSerDe<br>
*/
+@SerDeSpec(schemaProps = {
+ serdeConstants.LIST_COLUMNS, serdeConstants.LIST_COLUMN_TYPES,
+ serdeConstants.FIELD_DELIM, serdeConstants.COLLECTION_DELIM, serdeConstants.MAPKEY_DELIM,
+ serdeConstants.SERIALIZATION_FORMAT, serdeConstants.SERIALIZATION_NULL_FORMAT,
+ serdeConstants.SERIALIZATION_LAST_COLUMN_TAKES_REST,
+ serdeConstants.ESCAPE_CHAR,
+ serdeConstants.SERIALIZATION_ENCODING,
+ LazySimpleSerDe.SERIALIZATION_EXTEND_NESTING_LEVELS})
public class ColumnarSerDe extends ColumnarSerDeBase {
@Override
Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/columnar/LazyBinaryColumnarSerDe.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/columnar/LazyBinaryColumnarSerDe.java?rev=1632639&r1=1632638&r2=1632639&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/columnar/LazyBinaryColumnarSerDe.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/columnar/LazyBinaryColumnarSerDe.java Fri Oct 17 17:40:05 2014
@@ -22,8 +22,10 @@ import java.util.List;
import java.util.Properties;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.SerDeSpec;
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.SerDeParameters;
import org.apache.hadoop.hive.serde2.lazy.LazyUtils;
import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryFactory;
@@ -44,6 +46,7 @@ import org.apache.hadoop.io.Writable;
* format and which is deserialized in a lazy, i.e. on-demand fashion.
*
*/
+@SerDeSpec(schemaProps = {serdeConstants.LIST_COLUMNS, serdeConstants.LIST_COLUMN_TYPES})
public class LazyBinaryColumnarSerDe extends ColumnarSerDeBase {
private List<String> columnNames;
Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java?rev=1632639&r1=1632638&r2=1632639&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java Fri Oct 17 17:40:05 2014
@@ -30,6 +30,7 @@ import org.apache.hadoop.hive.serde.serd
import org.apache.hadoop.hive.serde2.AbstractSerDe;
import org.apache.hadoop.hive.serde2.ByteStream;
import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.SerDeSpec;
import org.apache.hadoop.hive.serde2.SerDeStats;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
@@ -50,6 +51,9 @@ import org.apache.thrift.transport.TIOSt
* DynamicSerDe.
*
*/
+@SerDeSpec(schemaProps = {
+ serdeConstants.SERIALIZATION_DDL, serdeConstants.SERIALIZATION_FORMAT,
+ DynamicSerDe.META_TABLE_NAME})
public class DynamicSerDe extends AbstractSerDe {
public static final Log LOG = LogFactory.getLog(DynamicSerDe.class.getName());
Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySimpleSerDe.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySimpleSerDe.java?rev=1632639&r1=1632638&r2=1632639&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySimpleSerDe.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySimpleSerDe.java Fri Oct 17 17:40:05 2014
@@ -19,7 +19,6 @@
package org.apache.hadoop.hive.serde2.lazy;
import java.io.IOException;
-import java.nio.charset.Charset;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
@@ -31,10 +30,10 @@ import org.apache.hadoop.conf.Configurat
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.AbstractEncodingAwareSerDe;
-import org.apache.hadoop.hive.serde2.AbstractSerDe;
import org.apache.hadoop.hive.serde2.ByteStream;
import org.apache.hadoop.hive.serde2.SerDe;
import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.SerDeSpec;
import org.apache.hadoop.hive.serde2.SerDeStats;
import org.apache.hadoop.hive.serde2.SerDeUtils;
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
@@ -64,6 +63,14 @@ import org.apache.hadoop.io.Writable;
* Also LazySimpleSerDe outputs typed columns instead of treating all columns as
* String like MetadataTypedColumnsetSerDe.
*/
+@SerDeSpec(schemaProps = {
+ serdeConstants.LIST_COLUMNS, serdeConstants.LIST_COLUMN_TYPES,
+ serdeConstants.FIELD_DELIM, serdeConstants.COLLECTION_DELIM, serdeConstants.MAPKEY_DELIM,
+ serdeConstants.SERIALIZATION_FORMAT, serdeConstants.SERIALIZATION_NULL_FORMAT,
+ serdeConstants.SERIALIZATION_LAST_COLUMN_TAKES_REST,
+ serdeConstants.ESCAPE_CHAR,
+ serdeConstants.SERIALIZATION_ENCODING,
+ LazySimpleSerDe.SERIALIZATION_EXTEND_NESTING_LEVELS})
public class LazySimpleSerDe extends AbstractEncodingAwareSerDe {
public static final Log LOG = LogFactory.getLog(LazySimpleSerDe.class
Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java?rev=1632639&r1=1632638&r2=1632639&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java Fri Oct 17 17:40:05 2014
@@ -27,14 +27,12 @@ import java.util.Properties;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.common.type.Decimal128;
-import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.AbstractSerDe;
import org.apache.hadoop.hive.serde2.ByteStream;
-import org.apache.hadoop.hive.serde2.ByteStream.Output;
import org.apache.hadoop.hive.serde2.ByteStream.RandomAccessOutput;
import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.SerDeSpec;
import org.apache.hadoop.hive.serde2.SerDeStats;
import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -76,6 +74,7 @@ import org.apache.hadoop.io.Writable;
* deserialized until required. Binary means a field is serialized in binary
* compact format.
*/
+@SerDeSpec(schemaProps = {serdeConstants.LIST_COLUMNS, serdeConstants.LIST_COLUMN_TYPES})
public class LazyBinarySerDe extends AbstractSerDe {
public static final Log LOG = LogFactory.getLog(LazyBinarySerDe.class.getName());