You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2012/03/09 01:47:38 UTC

svn commit: r1298673 [4/4] - in /hive/trunk: hbase-handler/src/java/org/apache/hadoop/hive/hbase/ hbase-handler/src/test/org/apache/hadoop/hive/hbase/ hbase-handler/src/test/queries/ hbase-handler/src/test/results/ serde/src/java/org/apache/hadoop/hive...

Added: hive/trunk/hbase-handler/src/test/results/hbase_binary_storage_queries.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/hbase-handler/src/test/results/hbase_binary_storage_queries.q.out?rev=1298673&view=auto
==============================================================================
--- hive/trunk/hbase-handler/src/test/results/hbase_binary_storage_queries.q.out (added)
+++ hive/trunk/hbase-handler/src/test/results/hbase_binary_storage_queries.q.out Fri Mar  9 00:47:37 2012
@@ -0,0 +1,658 @@
+PREHOOK: query: DROP TABLE t_hbase
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE t_hbase
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE t_hbase(key STRING,
+                     tinyint_col TINYINT,
+                     smallint_col SMALLINT,
+                     int_col INT,
+                     bigint_col BIGINT,
+                     float_col FLOAT,
+                     double_col DOUBLE,
+                     boolean_col BOOLEAN)
+STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
+WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key#-,cf:binarybyte#-,cf:binaryshort#-,cf:binaryint#-,cf:binarylong#-,cf:binaryfloat#-,cf:binarydouble#-,cf:binaryboolean#-")
+TBLPROPERTIES ("hbase.table.name" = "t_hive",
+               "hbase.table.default.storage.type" = "binary")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE t_hbase(key STRING,
+                     tinyint_col TINYINT,
+                     smallint_col SMALLINT,
+                     int_col INT,
+                     bigint_col BIGINT,
+                     float_col FLOAT,
+                     double_col DOUBLE,
+                     boolean_col BOOLEAN)
+STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
+WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key#-,cf:binarybyte#-,cf:binaryshort#-,cf:binaryint#-,cf:binarylong#-,cf:binaryfloat#-,cf:binarydouble#-,cf:binaryboolean#-")
+TBLPROPERTIES ("hbase.table.name" = "t_hive",
+               "hbase.table.default.storage.type" = "binary")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@t_hbase
+PREHOOK: query: DESCRIBE FORMATTED t_hbase
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: DESCRIBE FORMATTED t_hbase
+POSTHOOK: type: DESCTABLE
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	from deserializer   
+tinyint_col         	tinyint             	from deserializer   
+smallint_col        	smallint            	from deserializer   
+int_col             	int                 	from deserializer   
+bigint_col          	bigint              	from deserializer   
+float_col           	float               	from deserializer   
+double_col          	double              	from deserializer   
+boolean_col         	boolean             	from deserializer   
+	 	 
+# Detailed Table Information	 	 
+Database:           	default             	 
+#### A masked pattern was here ####
+Protect Mode:       	None                	 
+Retention:          	0                   	 
+#### A masked pattern was here ####
+Table Type:         	MANAGED_TABLE       	 
+Table Parameters:	 	 
+	hbase.table.default.storage.type	binary              
+	hbase.table.name    	t_hive              
+	storage_handler     	org.apache.hadoop.hive.hbase.HBaseStorageHandler
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.hbase.HBaseSerDe	 
+InputFormat:        	org.apache.hadoop.hive.hbase.HiveHBaseTableInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.hbase.HiveHBaseTableOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	hbase.columns.mapping	:key#-,cf:binarybyte#-,cf:binaryshort#-,cf:binaryint#-,cf:binarylong#-,cf:binaryfloat#-,cf:binarydouble#-,cf:binaryboolean#-
+	serialization.format	1                   
+PREHOOK: query: INSERT OVERWRITE TABLE t_hbase
+SELECT 'user1', 1, 1, 1, 1, 1.0, 1.0, true
+FROM src
+WHERE key=100 OR key=125 OR key=126
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@t_hbase
+POSTHOOK: query: INSERT OVERWRITE TABLE t_hbase
+SELECT 'user1', 1, 1, 1, 1, 1.0, 1.0, true
+FROM src
+WHERE key=100 OR key=125 OR key=126
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@t_hbase
+PREHOOK: query: INSERT OVERWRITE TABLE t_hbase
+SELECT 'user2', 127, 32767, 2147483647, 9223372036854775807, 211.31, 268746532.0571, false
+FROM src
+WHERE key=100 OR key=125 OR key=126
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@t_hbase
+POSTHOOK: query: INSERT OVERWRITE TABLE t_hbase
+SELECT 'user2', 127, 32767, 2147483647, 9223372036854775807, 211.31, 268746532.0571, false
+FROM src
+WHERE key=100 OR key=125 OR key=126
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@t_hbase
+PREHOOK: query: INSERT OVERWRITE TABLE t_hbase
+SELECT 'user3', -128, -32768, -2147483648, -9223372036854775808, -201.17, -2110789.37145, true
+FROM src
+WHERE key=100 OR key=125 OR key=126
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@t_hbase
+POSTHOOK: query: INSERT OVERWRITE TABLE t_hbase
+SELECT 'user3', -128, -32768, -2147483648, -9223372036854775808, -201.17, -2110789.37145, true
+FROM src
+WHERE key=100 OR key=125 OR key=126
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@t_hbase
+PREHOOK: query: SELECT * FROM t_hbase
+PREHOOK: type: QUERY
+PREHOOK: Input: default@t_hbase
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM t_hbase
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@t_hbase
+#### A masked pattern was here ####
+user1	1	1	1	1	1.0	1.0	true
+user2	127	32767	2147483647	9223372036854775807	211.31	2.687465320571E8	false
+user3	-128	-32768	-2147483648	-9223372036854775808	-201.17	-2110789.37145	true
+PREHOOK: query: SELECT tinyint_col,
+       smallint_col,
+       int_col,
+       bigint_col,
+       float_col,
+       double_col,
+       boolean_col
+FROM t_hbase
+WHERE key='user1' OR key='user2' OR key='user3'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@t_hbase
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT tinyint_col,
+       smallint_col,
+       int_col,
+       bigint_col,
+       float_col,
+       double_col,
+       boolean_col
+FROM t_hbase
+WHERE key='user1' OR key='user2' OR key='user3'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@t_hbase
+#### A masked pattern was here ####
+1	1	1	1	1.0	1.0	true
+127	32767	2147483647	9223372036854775807	211.31	2.687465320571E8	false
+-128	-32768	-2147483648	-9223372036854775808	-201.17	-2110789.37145	true
+PREHOOK: query: SELECT sum(tinyint_col),
+       sum(smallint_col),
+       sum(int_col),
+       sum(bigint_col),
+       sum(float_col),
+       sum(double_col),
+       count(boolean_col)
+FROM t_hbase
+PREHOOK: type: QUERY
+PREHOOK: Input: default@t_hbase
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT sum(tinyint_col),
+       sum(smallint_col),
+       sum(int_col),
+       sum(bigint_col),
+       sum(float_col),
+       sum(double_col),
+       count(boolean_col)
+FROM t_hbase
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@t_hbase
+#### A masked pattern was here ####
+0	0	0	0	11.139999389648438	2.6663574368565E8	3
+PREHOOK: query: DROP TABLE t_hbase_1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE t_hbase_1
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE EXTERNAL TABLE t_hbase_1(key STRING,
+                                tinyint_col TINYINT,
+                                smallint_col SMALLINT,
+                                int_col INT,
+                                bigint_col BIGINT,
+                                float_col FLOAT,
+                                double_col DOUBLE,
+                                boolean_col BOOLEAN)
+STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
+WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key#b,cf:binarybyte#b,cf:binaryshort#b,cf:binaryint#b,cf:binarylong#b,cf:binaryfloat#b,cf:binarydouble#b,cf:binaryboolean#b")
+TBLPROPERTIES ("hbase.table.name" = "t_hive")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE EXTERNAL TABLE t_hbase_1(key STRING,
+                                tinyint_col TINYINT,
+                                smallint_col SMALLINT,
+                                int_col INT,
+                                bigint_col BIGINT,
+                                float_col FLOAT,
+                                double_col DOUBLE,
+                                boolean_col BOOLEAN)
+STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
+WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key#b,cf:binarybyte#b,cf:binaryshort#b,cf:binaryint#b,cf:binarylong#b,cf:binaryfloat#b,cf:binarydouble#b,cf:binaryboolean#b")
+TBLPROPERTIES ("hbase.table.name" = "t_hive")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@t_hbase_1
+PREHOOK: query: DESCRIBE FORMATTED t_hbase_1
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: DESCRIBE FORMATTED t_hbase_1
+POSTHOOK: type: DESCTABLE
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	from deserializer   
+tinyint_col         	tinyint             	from deserializer   
+smallint_col        	smallint            	from deserializer   
+int_col             	int                 	from deserializer   
+bigint_col          	bigint              	from deserializer   
+float_col           	float               	from deserializer   
+double_col          	double              	from deserializer   
+boolean_col         	boolean             	from deserializer   
+	 	 
+# Detailed Table Information	 	 
+Database:           	default             	 
+#### A masked pattern was here ####
+Protect Mode:       	None                	 
+Retention:          	0                   	 
+#### A masked pattern was here ####
+Table Type:         	EXTERNAL_TABLE      	 
+Table Parameters:	 	 
+	EXTERNAL            	TRUE                
+	hbase.table.name    	t_hive              
+	storage_handler     	org.apache.hadoop.hive.hbase.HBaseStorageHandler
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.hbase.HBaseSerDe	 
+InputFormat:        	org.apache.hadoop.hive.hbase.HiveHBaseTableInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.hbase.HiveHBaseTableOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	hbase.columns.mapping	:key#b,cf:binarybyte#b,cf:binaryshort#b,cf:binaryint#b,cf:binarylong#b,cf:binaryfloat#b,cf:binarydouble#b,cf:binaryboolean#b
+	serialization.format	1                   
+PREHOOK: query: SELECT * FROM t_hbase_1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@t_hbase_1
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM t_hbase_1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@t_hbase_1
+#### A masked pattern was here ####
+user1	1	1	1	1	1.0	1.0	true
+user2	127	32767	2147483647	9223372036854775807	211.31	2.687465320571E8	false
+user3	-128	-32768	-2147483648	-9223372036854775808	-201.17	-2110789.37145	true
+PREHOOK: query: SELECT tinyint_col,
+       smallint_col,
+       int_col,
+       bigint_col,
+       float_col,
+       double_col,
+       boolean_col
+FROM t_hbase_1
+WHERE key='user1' OR key='user2' OR key='user3'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@t_hbase_1
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT tinyint_col,
+       smallint_col,
+       int_col,
+       bigint_col,
+       float_col,
+       double_col,
+       boolean_col
+FROM t_hbase_1
+WHERE key='user1' OR key='user2' OR key='user3'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@t_hbase_1
+#### A masked pattern was here ####
+1	1	1	1	1.0	1.0	true
+127	32767	2147483647	9223372036854775807	211.31	2.687465320571E8	false
+-128	-32768	-2147483648	-9223372036854775808	-201.17	-2110789.37145	true
+PREHOOK: query: SELECT sum(tinyint_col),
+       sum(smallint_col),
+       sum(int_col),
+       sum(bigint_col),
+       sum(float_col),
+       sum(double_col),
+       count(boolean_col)
+FROM t_hbase_1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@t_hbase_1
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT sum(tinyint_col),
+       sum(smallint_col),
+       sum(int_col),
+       sum(bigint_col),
+       sum(float_col),
+       sum(double_col),
+       count(boolean_col)
+FROM t_hbase_1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@t_hbase_1
+#### A masked pattern was here ####
+0	0	0	0	11.139999389648438	2.6663574368565E8	3
+PREHOOK: query: DROP TABLE t_hbase_1
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@t_hbase_1
+PREHOOK: Output: default@t_hbase_1
+POSTHOOK: query: DROP TABLE t_hbase_1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@t_hbase_1
+POSTHOOK: Output: default@t_hbase_1
+PREHOOK: query: DROP TABLE t_hbase
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@t_hbase
+PREHOOK: Output: default@t_hbase
+POSTHOOK: query: DROP TABLE t_hbase
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@t_hbase
+POSTHOOK: Output: default@t_hbase
+PREHOOK: query: DROP TABLE t_hbase_2
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE t_hbase_2
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE t_hbase_2(key STRING,
+                     tinyint_col TINYINT,
+                     smallint_col SMALLINT,
+                     int_col INT,
+                     bigint_col BIGINT,
+                     float_col FLOAT,
+                     double_col DOUBLE,
+                     boolean_col BOOLEAN)
+STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
+WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key#-,cf:binarybyte#-,cf:binaryshort#-,cf:binaryint#-,cf:binarylong#-,cf:binaryfloat#-,cf:binarydouble#-,cf:binaryboolean#-")
+TBLPROPERTIES ("hbase.table.name" = "t_hive_2")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE t_hbase_2(key STRING,
+                     tinyint_col TINYINT,
+                     smallint_col SMALLINT,
+                     int_col INT,
+                     bigint_col BIGINT,
+                     float_col FLOAT,
+                     double_col DOUBLE,
+                     boolean_col BOOLEAN)
+STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
+WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key#-,cf:binarybyte#-,cf:binaryshort#-,cf:binaryint#-,cf:binarylong#-,cf:binaryfloat#-,cf:binarydouble#-,cf:binaryboolean#-")
+TBLPROPERTIES ("hbase.table.name" = "t_hive_2")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@t_hbase_2
+PREHOOK: query: INSERT OVERWRITE TABLE t_hbase_2
+SELECT 'user1', 1, 1, 1, 1, 1.0, 1.0, true
+FROM src
+WHERE key=100 OR key=125 OR key=126
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@t_hbase_2
+POSTHOOK: query: INSERT OVERWRITE TABLE t_hbase_2
+SELECT 'user1', 1, 1, 1, 1, 1.0, 1.0, true
+FROM src
+WHERE key=100 OR key=125 OR key=126
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@t_hbase_2
+PREHOOK: query: INSERT OVERWRITE TABLE t_hbase_2
+SELECT 'user2', 127, 32767, 2147483647, 9223372036854775807, 211.31, 268746532.0571, false
+FROM src
+WHERE key=100 OR key=125 OR key=126
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@t_hbase_2
+POSTHOOK: query: INSERT OVERWRITE TABLE t_hbase_2
+SELECT 'user2', 127, 32767, 2147483647, 9223372036854775807, 211.31, 268746532.0571, false
+FROM src
+WHERE key=100 OR key=125 OR key=126
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@t_hbase_2
+PREHOOK: query: INSERT OVERWRITE TABLE t_hbase_2
+SELECT 'user3', -128, -32768, -2147483648, -9223372036854775808, -201.17, -2110789.37145, true
+FROM src
+WHERE key=100 OR key=125 OR key=126
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@t_hbase_2
+POSTHOOK: query: INSERT OVERWRITE TABLE t_hbase_2
+SELECT 'user3', -128, -32768, -2147483648, -9223372036854775808, -201.17, -2110789.37145, true
+FROM src
+WHERE key=100 OR key=125 OR key=126
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@t_hbase_2
+PREHOOK: query: SELECT * FROM t_hbase_2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@t_hbase_2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM t_hbase_2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@t_hbase_2
+#### A masked pattern was here ####
+user1	1	1	1	1	1.0	1.0	true
+user2	127	32767	2147483647	9223372036854775807	211.31	2.687465320571E8	false
+user3	-128	-32768	-2147483648	-9223372036854775808	-201.17	-2110789.37145	true
+PREHOOK: query: SELECT tinyint_col,
+       smallint_col,
+       int_col,
+       bigint_col,
+       float_col,
+       double_col,
+       boolean_col
+FROM t_hbase_2
+WHERE key='user1' OR key='user2' OR key='user3'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@t_hbase_2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT tinyint_col,
+       smallint_col,
+       int_col,
+       bigint_col,
+       float_col,
+       double_col,
+       boolean_col
+FROM t_hbase_2
+WHERE key='user1' OR key='user2' OR key='user3'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@t_hbase_2
+#### A masked pattern was here ####
+1	1	1	1	1.0	1.0	true
+127	32767	2147483647	9223372036854775807	211.31	2.687465320571E8	false
+-128	-32768	-2147483648	-9223372036854775808	-201.17	-2110789.37145	true
+PREHOOK: query: SELECT sum(tinyint_col),
+       sum(smallint_col),
+       sum(int_col),
+       sum(bigint_col),
+       sum(float_col),
+       sum(double_col),
+       count(boolean_col)
+FROM t_hbase_2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@t_hbase_2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT sum(tinyint_col),
+       sum(smallint_col),
+       sum(int_col),
+       sum(bigint_col),
+       sum(float_col),
+       sum(double_col),
+       count(boolean_col)
+FROM t_hbase_2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@t_hbase_2
+#### A masked pattern was here ####
+0	0	0	0	11.139999389648438	2.6663574368565E8	3
+PREHOOK: query: DROP TABLE t_hbase_3
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE t_hbase_3
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE EXTERNAL TABLE t_hbase_3(key STRING,
+                                tinyint_col TINYINT,
+                                smallint_col SMALLINT,
+                                int_col INT,
+                                bigint_col BIGINT,
+                                float_col FLOAT,
+                                double_col DOUBLE,
+                                boolean_col BOOLEAN)
+STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
+WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key#b,cf:binarybyte#b,cf:binaryshort#b,cf:binaryint#b,cf:binarylong#b,cf:binaryfloat#b,cf:binarydouble#b,cf:binaryboolean#b")
+TBLPROPERTIES ("hbase.table.name" = "t_hive_2")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE EXTERNAL TABLE t_hbase_3(key STRING,
+                                tinyint_col TINYINT,
+                                smallint_col SMALLINT,
+                                int_col INT,
+                                bigint_col BIGINT,
+                                float_col FLOAT,
+                                double_col DOUBLE,
+                                boolean_col BOOLEAN)
+STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
+WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key#b,cf:binarybyte#b,cf:binaryshort#b,cf:binaryint#b,cf:binarylong#b,cf:binaryfloat#b,cf:binarydouble#b,cf:binaryboolean#b")
+TBLPROPERTIES ("hbase.table.name" = "t_hive_2")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@t_hbase_3
+PREHOOK: query: SELECT * FROM t_hbase_3
+PREHOOK: type: QUERY
+PREHOOK: Input: default@t_hbase_3
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM t_hbase_3
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@t_hbase_3
+#### A masked pattern was here ####
+user1	49	NULL	NULL	NULL	NULL	NULL	true
+user2	49	13106	842085431	4121411804481401392	1.0313938E-8	5.6030888442763564E-67	true
+user3	45	11571	758264116	3258690996568012594	1.0128829E-11	5.581687380553606E-91	true
+PREHOOK: query: SELECT tinyint_col,
+       smallint_col,
+       int_col,
+       bigint_col,
+       float_col,
+       double_col,
+       boolean_col
+FROM t_hbase_3
+WHERE key='user1' OR key='user2' OR key='user3'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@t_hbase_3
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT tinyint_col,
+       smallint_col,
+       int_col,
+       bigint_col,
+       float_col,
+       double_col,
+       boolean_col
+FROM t_hbase_3
+WHERE key='user1' OR key='user2' OR key='user3'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@t_hbase_3
+#### A masked pattern was here ####
+49	NULL	NULL	NULL	NULL	NULL	true
+49	13106	842085431	4121411804481401392	1.0313938E-8	5.6030888442763564E-67	true
+45	11571	758264116	3258690996568012594	1.0128829E-11	5.581687380553606E-91	true
+PREHOOK: query: SELECT sum(tinyint_col),
+       sum(smallint_col),
+       sum(int_col),
+       sum(bigint_col),
+       sum(float_col),
+       sum(double_col),
+       count(boolean_col)
+FROM t_hbase_3
+PREHOOK: type: QUERY
+PREHOOK: Input: default@t_hbase_3
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT sum(tinyint_col),
+       sum(smallint_col),
+       sum(int_col),
+       sum(bigint_col),
+       sum(float_col),
+       sum(double_col),
+       count(boolean_col)
+FROM t_hbase_3
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@t_hbase_3
+#### A masked pattern was here ####
+143	24677	1600349547	7380102801049413986	1.0324066977186741E-8	5.6030888442763564E-67	3
+PREHOOK: query: DROP TABLE t_hbase_3
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@t_hbase_3
+PREHOOK: Output: default@t_hbase_3
+POSTHOOK: query: DROP TABLE t_hbase_3
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@t_hbase_3
+POSTHOOK: Output: default@t_hbase_3
+PREHOOK: query: DROP TABLE t_hbase_4
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE t_hbase_4
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE EXTERNAL TABLE t_hbase_4(key STRING,
+                     tinyint_col TINYINT,
+                     smallint_col SMALLINT,
+                     int_col INT,
+                     bigint_col BIGINT,
+                     float_col FLOAT,
+                     double_col DOUBLE,
+                     boolean_col BOOLEAN)
+STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
+WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key#-,cf:binarybyte#-,cf:binaryshort#-,cf:binaryint#-,cf:binarylong#-,cf:binaryfloat#-,cf:binarydouble#-,cf:binaryboolean#-")
+TBLPROPERTIES (
+"hbase.table.name" = "t_hive_2",
+"hbase.table.default.storage.type" = "binary")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE EXTERNAL TABLE t_hbase_4(key STRING,
+                     tinyint_col TINYINT,
+                     smallint_col SMALLINT,
+                     int_col INT,
+                     bigint_col BIGINT,
+                     float_col FLOAT,
+                     double_col DOUBLE,
+                     boolean_col BOOLEAN)
+STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
+WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key#-,cf:binarybyte#-,cf:binaryshort#-,cf:binaryint#-,cf:binarylong#-,cf:binaryfloat#-,cf:binarydouble#-,cf:binaryboolean#-")
+TBLPROPERTIES (
+"hbase.table.name" = "t_hive_2",
+"hbase.table.default.storage.type" = "binary")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@t_hbase_4
+PREHOOK: query: SELECT * FROM t_hbase_4
+PREHOOK: type: QUERY
+PREHOOK: Input: default@t_hbase_4
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM t_hbase_4
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@t_hbase_4
+#### A masked pattern was here ####
+user1	49	NULL	NULL	NULL	NULL	NULL	true
+user2	49	13106	842085431	4121411804481401392	1.0313938E-8	5.6030888442763564E-67	true
+user3	45	11571	758264116	3258690996568012594	1.0128829E-11	5.581687380553606E-91	true
+PREHOOK: query: SELECT tinyint_col,
+       smallint_col,
+       int_col,
+       bigint_col,
+       float_col,
+       double_col,
+       boolean_col
+FROM t_hbase_4
+WHERE key='user1' OR key='user2' OR key='user3'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@t_hbase_4
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT tinyint_col,
+       smallint_col,
+       int_col,
+       bigint_col,
+       float_col,
+       double_col,
+       boolean_col
+FROM t_hbase_4
+WHERE key='user1' OR key='user2' OR key='user3'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@t_hbase_4
+#### A masked pattern was here ####
+49	NULL	NULL	NULL	NULL	NULL	true
+49	13106	842085431	4121411804481401392	1.0313938E-8	5.6030888442763564E-67	true
+45	11571	758264116	3258690996568012594	1.0128829E-11	5.581687380553606E-91	true
+PREHOOK: query: SELECT sum(tinyint_col),
+       sum(smallint_col),
+       sum(int_col),
+       sum(bigint_col),
+       sum(float_col),
+       sum(double_col),
+       count(boolean_col)
+FROM t_hbase_4
+PREHOOK: type: QUERY
+PREHOOK: Input: default@t_hbase_4
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT sum(tinyint_col),
+       sum(smallint_col),
+       sum(int_col),
+       sum(bigint_col),
+       sum(float_col),
+       sum(double_col),
+       count(boolean_col)
+FROM t_hbase_4
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@t_hbase_4
+#### A masked pattern was here ####
+143	24677	1600349547	7380102801049413986	1.0324066977186741E-8	5.6030888442763564E-67	3
+PREHOOK: query: DROP TABLE t_hbase_4
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@t_hbase_4
+PREHOOK: Output: default@t_hbase_4
+POSTHOOK: query: DROP TABLE t_hbase_4
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@t_hbase_4
+POSTHOOK: Output: default@t_hbase_4
+PREHOOK: query: DROP TABLE t_hbase_2
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@t_hbase_2
+PREHOOK: Output: default@t_hbase_2
+POSTHOOK: query: DROP TABLE t_hbase_2
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@t_hbase_2
+POSTHOOK: Output: default@t_hbase_2

Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java?rev=1298673&r1=1298672&r2=1298673&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java Fri Mar  9 00:47:37 2012
@@ -15,6 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package org.apache.hadoop.hive.serde2.lazy;
 
 import java.util.ArrayList;
@@ -36,9 +37,17 @@ import org.apache.hadoop.hive.serde2.laz
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyShortObjectInspector;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyStringObjectInspector;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyTimestampObjectInspector;
+import org.apache.hadoop.hive.serde2.lazydio.LazyDioBoolean;
+import org.apache.hadoop.hive.serde2.lazydio.LazyDioByte;
+import org.apache.hadoop.hive.serde2.lazydio.LazyDioDouble;
+import org.apache.hadoop.hive.serde2.lazydio.LazyDioFloat;
+import org.apache.hadoop.hive.serde2.lazydio.LazyDioInteger;
+import org.apache.hadoop.hive.serde2.lazydio.LazyDioLong;
+import org.apache.hadoop.hive.serde2.lazydio.LazyDioShort;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo;
@@ -47,6 +56,7 @@ import org.apache.hadoop.hive.serde2.typ
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo;
 import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.Writable;
 
 /**
  * LazyFactory.
@@ -55,11 +65,32 @@ import org.apache.hadoop.io.Text;
 public final class LazyFactory {
 
   /**
+   * Create a lazy primitive object instance given a primitive object inspector based on it's
+   * type. It takes a boolean switch to decide whether to return a binary or standard variant
+   * of the lazy object.
+   *
+   * @param poi PrimitiveObjectInspector
+   * @param typeBinary a switch to return either a LazyPrimtive class or it's binary
+   *        companion
+   * @return LazyPrimitive<? extends ObjectInspector, ? extends Writable>
+   */
+  public static LazyPrimitive<? extends ObjectInspector, ? extends Writable>
+  createLazyPrimitiveClass(PrimitiveObjectInspector poi, boolean typeBinary) {
+    if (typeBinary) {
+      return createLazyPrimitiveBinaryClass(poi);
+    } else {
+      return createLazyPrimitiveClass(poi);
+    }
+  }
+
+  /**
    * Create a lazy primitive class given the type name.
    */
-  public static LazyPrimitive<?, ?> createLazyPrimitiveClass(
-      PrimitiveObjectInspector oi) {
+  public static LazyPrimitive<? extends ObjectInspector, ? extends Writable>
+  createLazyPrimitiveClass(PrimitiveObjectInspector oi) {
+
     PrimitiveCategory p = oi.getPrimitiveCategory();
+
     switch (p) {
     case BOOLEAN:
       return new LazyBoolean((LazyBooleanObjectInspector) oi);
@@ -86,10 +117,35 @@ public final class LazyFactory {
     }
   }
 
+  public static LazyPrimitive<? extends ObjectInspector, ? extends Writable>
+  createLazyPrimitiveBinaryClass(PrimitiveObjectInspector poi) {
+
+    PrimitiveCategory pc = poi.getPrimitiveCategory();
+
+    switch (pc) {
+    case BOOLEAN:
+      return new LazyDioBoolean((LazyBooleanObjectInspector) poi);
+    case BYTE:
+      return new LazyDioByte((LazyByteObjectInspector) poi);
+    case SHORT:
+      return new LazyDioShort((LazyShortObjectInspector) poi);
+    case INT:
+      return new LazyDioInteger((LazyIntObjectInspector) poi);
+    case LONG:
+      return new LazyDioLong((LazyLongObjectInspector) poi);
+    case FLOAT:
+      return new LazyDioFloat((LazyFloatObjectInspector) poi);
+    case DOUBLE:
+      return new LazyDioDouble((LazyDoubleObjectInspector) poi);
+    default:
+      throw new RuntimeException("Hive Internal Error: no LazyObject for " + poi);
+    }
+  }
+
   /**
    * Create a hierarchical LazyObject based on the given typeInfo.
    */
-  public static LazyObject createLazyObject(ObjectInspector oi) {
+  public static LazyObject<? extends ObjectInspector> createLazyObject(ObjectInspector oi) {
     ObjectInspector.Category c = oi.getCategory();
     switch (c) {
     case PRIMITIVE:
@@ -108,9 +164,28 @@ public final class LazyFactory {
   }
 
   /**
+   * Creates a LazyObject based on the LazyObjectInspector. Will create binary variants for
+   * primitive objects when the switch <code>typeBinary</code> is specified as true.
+   *
+   * @param oi ObjectInspector
+   * @param typeBinary Boolean value used as switch to return variants of LazyPrimitive
+   *                   objects which are initialized from a binary format for the data.
+   * @return LazyObject<? extends ObjectInspector>
+   */
+  public static LazyObject<? extends ObjectInspector>
+  createLazyObject(ObjectInspector oi, boolean typeBinary) {
+
+    if (oi.getCategory() == Category.PRIMITIVE) {
+      return createLazyPrimitiveClass((PrimitiveObjectInspector) oi, typeBinary);
+    } else {
+      return createLazyObject(oi);
+    }
+  }
+
+  /**
    * Create a hierarchical ObjectInspector for LazyObject with the given
    * typeInfo.
-   * 
+   *
    * @param typeInfo
    *          The type information for the LazyObject
    * @param separator
@@ -180,7 +255,7 @@ public final class LazyFactory {
   /**
    * Create a hierarchical ObjectInspector for LazyStruct with the given
    * columnNames and columnTypeInfos.
-   * 
+   *
    * @param lastColumnTakesRest
    *          whether the last column of the struct should take the rest of the
    *          row if there are extra fields.
@@ -205,7 +280,7 @@ public final class LazyFactory {
   /**
    * Create a hierarchical ObjectInspector for ColumnarStruct with the given
    * columnNames and columnTypeInfos.
-   * 
+   *
    * @see LazyFactory#createLazyObjectInspector(TypeInfo, byte[], int, Text,
    *      boolean, byte)
    */

Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyObject.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyObject.java?rev=1298673&r1=1298672&r2=1298673&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyObject.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyObject.java Fri Mar  9 00:47:37 2012
@@ -21,17 +21,17 @@ import org.apache.hadoop.hive.serde2.obj
 
 /**
  * LazyObject stores an object in a range of bytes in a byte[].
- * 
+ *
  * A LazyObject can represent any primitive object or hierarchical object like
  * array, map or struct.
  */
 public abstract class LazyObject<OI extends ObjectInspector> extends LazyObjectBase {
 
-  OI oi;
+  protected OI oi;
 
   /**
    * Create a LazyObject.
-   * 
+   *
    * @param oi
    *          Derived classes can access meta information about this Lazy Object
    *          (e.g, separator, nullSequence, escaper) from it.

Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyPrimitive.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyPrimitive.java?rev=1298673&r1=1298672&r2=1298673&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyPrimitive.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyPrimitive.java Fri Mar  9 00:47:37 2012
@@ -32,17 +32,17 @@ public abstract class LazyPrimitive<OI e
     extends LazyObject<OI> {
 
   private static final Log LOG = LogFactory.getLog(LazyPrimitive.class);
-  LazyPrimitive(OI oi) {
+  protected LazyPrimitive(OI oi) {
     super(oi);
   }
 
-  LazyPrimitive(LazyPrimitive<OI, T> copy) {
+  protected LazyPrimitive(LazyPrimitive<OI, T> copy) {
     super(copy.oi);
     isNull = copy.isNull;
   }
 
-  T data;
-  boolean isNull = false;
+  protected T data;
+  protected boolean isNull = false;
 
   /**
    * Returns the primitive object represented by this LazyObject. This is useful

Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java?rev=1298673&r1=1298672&r2=1298673&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java Fri Mar  9 00:47:37 2012
@@ -15,8 +15,10 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package org.apache.hadoop.hive.serde2.lazy;
 
+import java.io.DataOutputStream;
 import java.io.IOException;
 import java.io.OutputStream;
 import java.nio.ByteBuffer;
@@ -239,6 +241,69 @@ public final class LazyUtils {
     }
   }
 
+  /**
+   * Write out a binary representation of a PrimitiveObject to a byte stream.
+   *
+   * @param out ByteStream.Output, an unsynchronized version of ByteArrayOutputStream, used as a
+   *            backing buffer for the the DataOutputStream
+   * @param o the PrimitiveObject
+   * @param oi the PrimitiveObjectInspector
+   * @throws IOException on error during the write operation
+   */
+  public static void writePrimitive(
+      OutputStream out,
+      Object o,
+      PrimitiveObjectInspector oi) throws IOException {
+
+    DataOutputStream dos = new DataOutputStream(out);
+
+    try {
+      switch (oi.getPrimitiveCategory()) {
+      case BOOLEAN:
+        boolean b = ((BooleanObjectInspector) oi).get(o);
+        dos.writeBoolean(b);
+        break;
+
+      case BYTE:
+        byte bt = ((ByteObjectInspector) oi).get(o);
+        dos.writeByte(bt);
+        break;
+
+      case SHORT:
+        short s = ((ShortObjectInspector) oi).get(o);
+        dos.writeShort(s);
+        break;
+
+      case INT:
+        int i = ((IntObjectInspector) oi).get(o);
+        dos.writeInt(i);
+        break;
+
+      case LONG:
+        long l = ((LongObjectInspector) oi).get(o);
+        dos.writeLong(l);
+        break;
+
+      case FLOAT:
+        float f = ((FloatObjectInspector) oi).get(o);
+        dos.writeFloat(f);
+        break;
+
+      case DOUBLE:
+        double d = ((DoubleObjectInspector) oi).get(o);
+        dos.writeDouble(d);
+        break;
+
+      default:
+        throw new RuntimeException("Hive internal error.");
+      }
+    } finally {
+      // closing the underlying ByteStream should have no effect, the data should still be
+      // accessible
+      dos.close();
+    }
+  }
+
   public static int hashBytes(byte[] data, int start, int len) {
     int hash = 1;
     for (int i = start; i < len; i++) {
@@ -287,5 +352,4 @@ public final class LazyUtils {
   private LazyUtils() {
     // prevent instantiation
   }
-
 }

Added: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazydio/LazyDioBoolean.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazydio/LazyDioBoolean.java?rev=1298673&view=auto
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazydio/LazyDioBoolean.java (added)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazydio/LazyDioBoolean.java Fri Mar  9 00:47:37 2012
@@ -0,0 +1,84 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.serde2.lazydio;
+
+import java.io.DataInputStream;
+import java.io.IOException;
+
+import org.apache.hadoop.hive.serde2.ByteStream;
+import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef;
+import org.apache.hadoop.hive.serde2.lazy.LazyPrimitive;
+import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyBooleanObjectInspector;
+import org.apache.hadoop.io.BooleanWritable;
+
+/**
+ * LazyBooleanBinary for storing a boolean value as an BooleanWritable. This class complements class
+ * LazyBoolean. It's primary difference is the {@link #init(ByteArrayRef, int, int)} method, which
+ * reads the boolean value stored from the default binary format.
+ */
+public class LazyDioBoolean extends LazyPrimitive<LazyBooleanObjectInspector, BooleanWritable> {
+
+  private ByteStream.Input in;
+  private DataInputStream din;
+
+  public LazyDioBoolean(LazyBooleanObjectInspector oi) {
+    super(oi);
+    data = new BooleanWritable();
+  }
+
+  public LazyDioBoolean(LazyDioBoolean copy) {
+    super(copy);
+    data = new BooleanWritable(copy.data.get());
+  }
+
+  /* (non-Javadoc)
+   * This provides a LazyBoolean like class which can be initialized from data stored in a
+   * binary format.
+   *
+   * @see org.apache.hadoop.hive.serde2.lazy.LazyObject#init
+   *        (org.apache.hadoop.hive.serde2.lazy.ByteArrayRef, int, int)
+   */
+  @Override
+  public void init(ByteArrayRef bytes, int start, int length) {
+
+    boolean value = false;
+
+    try {
+      in = new ByteStream.Input(bytes.getData(), start, length);
+      din = new DataInputStream(in);
+      value = din.readBoolean();
+      data.set(value);
+      isNull = false;
+    } catch (IOException e) {
+      isNull = true;
+    } finally {
+      try {
+        din.close();
+      } catch (IOException e) {
+        // swallow exception
+      }
+
+      try {
+        in.close();
+      } catch (IOException e) {
+        // swallow exception
+      }
+    }
+  }
+}

Added: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazydio/LazyDioByte.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazydio/LazyDioByte.java?rev=1298673&view=auto
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazydio/LazyDioByte.java (added)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazydio/LazyDioByte.java Fri Mar  9 00:47:37 2012
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.serde2.lazydio;
+
+import java.io.DataInputStream;
+import java.io.IOException;
+
+import org.apache.hadoop.hive.serde2.ByteStream;
+import org.apache.hadoop.hive.serde2.io.ByteWritable;
+import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef;
+import org.apache.hadoop.hive.serde2.lazy.LazyPrimitive;
+import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyByteObjectInspector;
+
+/**
+ * LazyByteBinary for storing a byte value as a ByteWritable. This class complements class
+ * LazyByte. It's primary difference is the {@link #init(ByteArrayRef, int, int)} method, which
+ * reads the raw byte value stored.
+ */
+public class LazyDioByte extends LazyPrimitive<LazyByteObjectInspector, ByteWritable> {
+
+  private ByteStream.Input in;
+  private DataInputStream din;
+
+  public LazyDioByte(LazyByteObjectInspector oi) {
+    super(oi);
+    data = new ByteWritable();
+  }
+
+  public LazyDioByte(LazyDioByte copy) {
+    super(copy);
+    data = new ByteWritable(copy.data.get());
+  }
+
+  @Override
+  public void init(ByteArrayRef bytes, int start, int length) {
+
+    byte value = 0;
+
+    try {
+      in = new ByteStream.Input(bytes.getData(), start, length);
+      din = new DataInputStream(in);
+      value = din.readByte();
+      data.set(value);
+      isNull = false;
+    } catch (Exception e) {
+      isNull = true;
+    } finally {
+      try {
+        din.close();
+      } catch (IOException e) {
+        // swallow exception
+      }
+      try {
+        in.close();
+      } catch (IOException e) {
+        // swallow exception
+      }
+    }
+  }
+}

Added: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazydio/LazyDioDouble.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazydio/LazyDioDouble.java?rev=1298673&view=auto
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazydio/LazyDioDouble.java (added)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazydio/LazyDioDouble.java Fri Mar  9 00:47:37 2012
@@ -0,0 +1,83 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.serde2.lazydio;
+
+import java.io.DataInputStream;
+import java.io.IOException;
+
+import org.apache.hadoop.hive.serde2.ByteStream;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef;
+import org.apache.hadoop.hive.serde2.lazy.LazyPrimitive;
+import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyDoubleObjectInspector;
+
+/**
+ * LazyDoubleBinary for storing a double value as a DoubleWritable. This class complements class
+ * LazyDouble. It's primary difference is the {@link #init(ByteArrayRef, int, int)} method, which
+ * reads the double value stored from the default binary format.
+ */
+public class LazyDioDouble extends LazyPrimitive<LazyDoubleObjectInspector, DoubleWritable> {
+
+  private ByteStream.Input in;
+  private DataInputStream din;
+
+  public LazyDioDouble(LazyDoubleObjectInspector oi) {
+    super(oi);
+    data = new DoubleWritable();
+  }
+
+  LazyDioDouble(LazyDioDouble copy) {
+    super(copy);
+    data = new DoubleWritable(copy.data.get());
+  }
+
+  /* (non-Javadoc)
+   * This provides a LazyDouble like class which can be initialized from data stored in a
+   * binary format.
+   *
+   * @see org.apache.hadoop.hive.serde2.lazy.LazyObject#init
+   *        (org.apache.hadoop.hive.serde2.lazy.ByteArrayRef, int, int)
+   */
+  @Override
+  public void init(ByteArrayRef bytes, int start, int length) {
+
+    double value = 0.0;
+
+    try {
+      in = new ByteStream.Input(bytes.getData(), start, length);
+      din = new DataInputStream(in);
+      value = din.readDouble();
+      data.set(value);
+      isNull = false;
+    } catch (IOException e) {
+      isNull = true;
+    } finally {
+      try {
+        din.close();
+      } catch (IOException e) {
+        // swallow exception
+      }
+      try {
+        in.close();
+      } catch (IOException e) {
+        // swallow exception
+      }
+    }
+  }
+}

Added: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazydio/LazyDioFloat.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazydio/LazyDioFloat.java?rev=1298673&view=auto
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazydio/LazyDioFloat.java (added)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazydio/LazyDioFloat.java Fri Mar  9 00:47:37 2012
@@ -0,0 +1,83 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.serde2.lazydio;
+
+import java.io.DataInputStream;
+import java.io.IOException;
+
+import org.apache.hadoop.hive.serde2.ByteStream;
+import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef;
+import org.apache.hadoop.hive.serde2.lazy.LazyPrimitive;
+import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyFloatObjectInspector;
+import org.apache.hadoop.io.FloatWritable;
+
+/**
+ * LazyFloatBinary for storing a float value as a FloatWritable. This class complements class
+ * LazyFloat. It's primary difference is the {@link #init(ByteArrayRef, int, int)} method, which
+ * reads the float value stored from the default binary format.
+ */
+public class LazyDioFloat extends LazyPrimitive<LazyFloatObjectInspector, FloatWritable> {
+
+  private ByteStream.Input in;
+  private DataInputStream din;
+
+  public LazyDioFloat(LazyFloatObjectInspector oi) {
+    super(oi);
+    data = new FloatWritable();
+  }
+
+  public LazyDioFloat(LazyDioFloat copy) {
+    super(copy);
+    data = new FloatWritable(copy.data.get());
+  }
+
+  /* (non-Javadoc)
+   * This provides a LazyFloat like class which can be initialized from data stored in a
+   * binary format.
+   *
+   * @see org.apache.hadoop.hive.serde2.lazy.LazyObject#init
+   *        (org.apache.hadoop.hive.serde2.lazy.ByteArrayRef, int, int)
+   */
+  @Override
+  public void init(ByteArrayRef bytes, int start, int length) {
+
+    float value = 0.0F;
+
+    try {
+      in = new ByteStream.Input(bytes.getData(), start, length);
+      din = new DataInputStream(in);
+      value = din.readFloat();
+      data.set(value);
+      isNull = false;
+    } catch (IOException e) {
+      isNull = true;
+    } finally {
+      try {
+        din.close();
+      } catch (IOException e) {
+        // swallow exception
+      }
+      try {
+        in.close();
+      } catch (IOException e) {
+        // swallow exception
+      }
+    }
+  }
+}

Added: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazydio/LazyDioInteger.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazydio/LazyDioInteger.java?rev=1298673&view=auto
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazydio/LazyDioInteger.java (added)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazydio/LazyDioInteger.java Fri Mar  9 00:47:37 2012
@@ -0,0 +1,83 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.serde2.lazydio;
+
+import java.io.DataInputStream;
+import java.io.IOException;
+
+import org.apache.hadoop.hive.serde2.ByteStream;
+import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef;
+import org.apache.hadoop.hive.serde2.lazy.LazyPrimitive;
+import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyIntObjectInspector;
+import org.apache.hadoop.io.IntWritable;
+
+/**
+ * LazyIntegerBinary for storing an int value as an IntWritable. This class complements class
+ * LazyInteger. It's primary difference is the {@link #init(ByteArrayRef, int, int)} method, which
+ * reads the integer value stored from the default binary format.
+ */
+public class LazyDioInteger extends LazyPrimitive<LazyIntObjectInspector, IntWritable> {
+
+  private ByteStream.Input in;
+  private DataInputStream din;
+
+  public LazyDioInteger(LazyIntObjectInspector oi) {
+    super(oi);
+    data = new IntWritable();
+  }
+
+  public LazyDioInteger(LazyDioInteger copy) {
+    super(copy);
+    data = new IntWritable(copy.data.get());
+  }
+
+  /* (non-Javadoc)
+   * This provides a LazyInteger like class which can be initialized from data stored in a
+   * binary format.
+   *
+   * @see org.apache.hadoop.hive.serde2.lazy.LazyObject#init
+   *        (org.apache.hadoop.hive.serde2.lazy.ByteArrayRef, int, int)
+   */
+  @Override
+  public void init(ByteArrayRef bytes, int start, int length) {
+
+    int value = 0;
+
+    try {
+      in = new ByteStream.Input(bytes.getData(), start, length);
+      din = new DataInputStream(in);
+      value = din.readInt();
+      data.set(value);
+      isNull = false;
+    } catch (IOException e) {
+      isNull = true;
+    } finally {
+      try {
+        din.close();
+      } catch (IOException e) {
+        // swallow exception
+      }
+      try {
+        in.close();
+      } catch (IOException e) {
+        // swallow exception
+      }
+    }
+  }
+}

Added: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazydio/LazyDioLong.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazydio/LazyDioLong.java?rev=1298673&view=auto
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazydio/LazyDioLong.java (added)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazydio/LazyDioLong.java Fri Mar  9 00:47:37 2012
@@ -0,0 +1,83 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.serde2.lazydio;
+
+import java.io.DataInputStream;
+import java.io.IOException;
+
+import org.apache.hadoop.hive.serde2.ByteStream;
+import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef;
+import org.apache.hadoop.hive.serde2.lazy.LazyPrimitive;
+import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyLongObjectInspector;
+import org.apache.hadoop.io.LongWritable;
+
+/**
+ * LazyLongBinary for storing a long value as a LongWritable. This class complements class
+ * LazyLong. It's primary difference is the {@link #init(ByteArrayRef, int, int)} method, which
+ * reads the long value stored in the default binary format.
+ */
+public class LazyDioLong extends LazyPrimitive<LazyLongObjectInspector, LongWritable> {
+
+  private ByteStream.Input in;
+  private DataInputStream din;
+
+  public LazyDioLong(LazyLongObjectInspector oi) {
+    super(oi);
+    data = new LongWritable();
+  }
+
+  public LazyDioLong(LazyDioLong copy) {
+    super(copy);
+    data = new LongWritable(copy.data.get());
+  }
+
+  /* (non-Javadoc)
+   * This provides a LazyLong like class which can be initialized from data stored in a
+   * binary format.
+   *
+   * @see org.apache.hadoop.hive.serde2.lazy.LazyObject#init
+   *        (org.apache.hadoop.hive.serde2.lazy.ByteArrayRef, int, int)
+   */
+  @Override
+  public void init(ByteArrayRef bytes, int start, int length) {
+
+    long value = 0;
+
+    try {
+      in = new ByteStream.Input(bytes.getData(), start, length);
+      din = new DataInputStream(in);
+      value = din.readLong();
+      data.set(value);
+      isNull = false;
+    } catch (IOException e) {
+      isNull = true;
+    } finally {
+      try {
+        din.close();
+      } catch (IOException e) {
+        // swallow exception
+      }
+      try {
+        in.close();
+      } catch (IOException e) {
+        // swallow exception
+      }
+    }
+  }
+}

Added: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazydio/LazyDioShort.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazydio/LazyDioShort.java?rev=1298673&view=auto
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazydio/LazyDioShort.java (added)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazydio/LazyDioShort.java Fri Mar  9 00:47:37 2012
@@ -0,0 +1,83 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.serde2.lazydio;
+
+import java.io.DataInputStream;
+import java.io.IOException;
+
+import org.apache.hadoop.hive.serde2.ByteStream;
+import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef;
+import org.apache.hadoop.hive.serde2.lazy.LazyPrimitive;
+import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyShortObjectInspector;
+
+/**
+ * LazyShortBinary for storing a short value as a ShortWritable. This class complements class
+ * LazyShort. It's primary difference is the {@link #init(ByteArrayRef, int, int)} method, which
+ * reads the short value stored from the default binary format.
+ */
+public class LazyDioShort extends LazyPrimitive<LazyShortObjectInspector, ShortWritable> {
+
+  private ByteStream.Input in;
+  private DataInputStream din;
+
+  public LazyDioShort(LazyShortObjectInspector oi) {
+    super(oi);
+    data = new ShortWritable();
+  }
+
+  public LazyDioShort(LazyDioShort copy) {
+    super(copy);
+    data = new ShortWritable(copy.data.get());
+  }
+
+  /* (non-Javadoc)
+   * This provides a LazyShort like class which can be initialized from data stored in a
+   * binary format.
+   *
+   * @see org.apache.hadoop.hive.serde2.lazy.LazyObject#init
+   *        (org.apache.hadoop.hive.serde2.lazy.ByteArrayRef, int, int)
+   */
+  @Override
+  public void init(ByteArrayRef bytes, int start, int length) {
+
+    short value = 0;
+
+    try {
+      in = new ByteStream.Input(bytes.getData(), start, length);
+      din = new DataInputStream(in);
+      value = din.readShort();
+      data.set(value);
+      isNull = false;
+    } catch (Exception e) {
+      isNull = true;
+    } finally {
+      try {
+        din.close();
+      } catch (IOException e) {
+        // swallow exception
+      }
+      try {
+        in.close();
+      } catch (IOException e) {
+        // swallow exception
+      }
+    }
+  }
+}