You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by he...@apache.org on 2009/12/22 04:40:45 UTC

svn commit: r893065 - in /hadoop/hive/trunk: ./ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ ql/src/java/org/apache/hadoop/hive/ql/plan/ ql/src/test/queries/clientpositive/ ql/src/test/results/clientposit...

Author: heyongqiang
Date: Tue Dec 22 03:40:44 2009
New Revision: 893065

URL: http://svn.apache.org/viewvc?rev=893065&view=rev
Log:
HIVE-977 NullPointerException when operating on a non-existing partition

Added:
    hadoop/hive/trunk/ql/src/test/queries/clientpositive/nullinput2.q
    hadoop/hive/trunk/ql/src/test/results/clientpositive/nullinput2.q.out
Modified:
    hadoop/hive/trunk/CHANGES.txt
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/partitionDesc.java

Modified: hadoop/hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/CHANGES.txt?rev=893065&r1=893064&r2=893065&view=diff
==============================================================================
--- hadoop/hive/trunk/CHANGES.txt (original)
+++ hadoop/hive/trunk/CHANGES.txt Tue Dec 22 03:40:44 2009
@@ -345,6 +345,9 @@
     HIVE-913 no error if user specifies same output table mutliple times
     (namit via He Yongqiang)
 
+    HIVE-977 NullPointerException when operating on a non-existing partition
+    (namit via He Yongqiang)
+
 Release 0.4.0 -  Unreleased
 
   INCOMPATIBLE CHANGES

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java?rev=893065&r1=893064&r2=893065&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java Tue Dec 22 03:40:44 2009
@@ -203,7 +203,11 @@
       for(int i = 0; i < partKeys.length; i++ ) {
         String key = partKeys[i];
         partNames.add(key);
-        partValues[i] = new Text(partSpec.get(key));
+        // Partitions do not exist for this table
+        if (partSpec == null)
+          partValues[i] = new Text();
+        else
+          partValues[i] = new Text(partSpec.get(key));
         partObjectInspectors.add(PrimitiveObjectInspectorFactory.writableStringObjectInspector);
       }
       StructObjectInspector partObjectInspector = ObjectInspectorFactory

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java?rev=893065&r1=893064&r2=893065&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java Tue Dec 22 03:40:44 2009
@@ -430,6 +430,10 @@
       throw new SemanticException(e.getMessage(), e);
     }
 
+    // The table does not have any partitions
+    if (aliasPartnDesc == null)
+      aliasPartnDesc = new partitionDesc(Utilities.getTableDesc(parseCtx.getTopToTable().get(topOp)), null);
+
     plan.getAliasToPartnInfo().put(alias_id, aliasPartnDesc);
 
     for (Partition part : parts) {

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/partitionDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/partitionDesc.java?rev=893065&r1=893064&r2=893065&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/partitionDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/partitionDesc.java Tue Dec 22 03:40:44 2009
@@ -33,21 +33,21 @@
 public class partitionDesc implements Serializable, Cloneable {
   private static final long serialVersionUID = 2L;
 	private tableDesc table;
-  private java.util.LinkedHashMap<String, String> partSpec;  
+  private java.util.LinkedHashMap<String, String> partSpec;
   private java.lang.Class<? extends  org.apache.hadoop.hive.serde2.Deserializer> deserializerClass;
   private Class<? extends InputFormat> inputFileFormatClass;
   private Class<? extends HiveOutputFormat> outputFileFormatClass;
   private java.util.Properties properties;
   private String serdeClassName;
-  
+
   public partitionDesc() { }
-  
+
   public partitionDesc(
     final tableDesc table,
     final java.util.LinkedHashMap<String, String> partSpec) {
     this(table, partSpec, null, null, null, null, null);
   }
-  
+
   public partitionDesc(
   		final tableDesc table,
   		final java.util.LinkedHashMap<String, String> partSpec,
@@ -65,7 +65,7 @@
 		if (properties != null)
 			this.serdeClassName = properties.getProperty(org.apache.hadoop.hive.serde.Constants.SERIALIZATION_LIB);
   }
-  
+
   public partitionDesc(final org.apache.hadoop.hive.ql.metadata.Partition part)  throws HiveException{
   	this.table = Utilities.getTableDesc(part.getTable());
   	this.partSpec = part.getSpec();
@@ -83,7 +83,7 @@
   public void setTableDesc(final tableDesc table) {
     this.table = table;
   }
-  
+
   @explain(displayName="partition values")
   public java.util.LinkedHashMap<String, String> getPartSpec() {
     return this.partSpec;
@@ -91,23 +91,23 @@
   public void setPartSpec(final java.util.LinkedHashMap<String, String> partSpec) {
     this.partSpec=partSpec;
   }
-  
+
   public java.lang.Class<? extends  org.apache.hadoop.hive.serde2.Deserializer> getDeserializerClass() {
 		if (this.deserializerClass == null && this.table !=null)
 			setDeserializerClass(this.table.getDeserializerClass());
     return this.deserializerClass;
   }
-  
+
   public void setDeserializerClass(final java.lang.Class<? extends  org.apache.hadoop.hive.serde2.Deserializer> serdeClass) {
     this.deserializerClass = serdeClass;
   }
-  
+
   public Class<? extends InputFormat> getInputFileFormatClass() {
   	if (this.inputFileFormatClass == null && this.table !=null)
   		setInputFileFormatClass (this.table.getInputFileFormatClass());
     return this.inputFileFormatClass;
   }
-  
+
   /**
    * Return a deserializer object corresponding to the tableDesc
    */
@@ -116,28 +116,28 @@
     de.initialize(null, properties);
     return de;
   }
-  
+
   public void setInputFileFormatClass(final Class<? extends InputFormat> inputFileFormatClass) {
     this.inputFileFormatClass=inputFileFormatClass;
   }
-  
+
   public Class<? extends HiveOutputFormat> getOutputFileFormatClass() {
   	if (this.outputFileFormatClass == null && this.table !=null)
   		setOutputFileFormatClass( this.table.getOutputFileFormatClass());
     return this.outputFileFormatClass;
   }
-  
+
   public void setOutputFileFormatClass(final Class<?> outputFileFormatClass) {
     this.outputFileFormatClass = HiveFileFormatUtils.getOutputFormatSubstitute(outputFileFormatClass);
   }
-  
+
   @explain(displayName="properties", normalExplain=false)
   public java.util.Properties getProperties() {
     if(this.table !=null)
       return this.table.getProperties();
     return this.properties;
   }
-  
+
   public void setProperties(final java.util.Properties properties) {
     this.properties = properties;
   }
@@ -156,22 +156,22 @@
   public void setSerdeClassName(String serdeClassName) {
     this.serdeClassName = serdeClassName;
   }
-  
+
   @explain(displayName="name")
   public String getTableName() {
     return getProperties().getProperty(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_NAME);
   }
-  
+
   @explain(displayName="input format")
   public String getInputFileFormatClassName() {
     return getInputFileFormatClass().getName();
   }
-  
+
   @explain(displayName="output format")
   public String getOutputFileFormatClassName() {
     return getOutputFileFormatClass().getName();
   }
-  
+
   public partitionDesc clone() {
   	partitionDesc ret = new partitionDesc();
 
@@ -181,7 +181,7 @@
     ret.outputFileFormatClass = this.outputFileFormatClass;
     if(this.properties != null) {
       Properties newProp = new Properties();
-      Enumeration<Object> keysProp = properties.keys(); 
+      Enumeration<Object> keysProp = properties.keys();
       while (keysProp.hasMoreElements()) {
         Object key = keysProp.nextElement();
         newProp.put(key, properties.get(key));
@@ -189,8 +189,11 @@
       ret.setProperties(newProp);
     }
   	ret.table = (tableDesc)this.table.clone();
-  	ret.partSpec = new java.util.LinkedHashMap<String, String>();
-  	ret.partSpec.putAll(this.partSpec);
+    // The partition spec is not present
+    if (this.partSpec != null) {
+      ret.partSpec = new java.util.LinkedHashMap<String, String>();
+      ret.partSpec.putAll(this.partSpec);
+    }
   	return ret;
   }
 }

Added: hadoop/hive/trunk/ql/src/test/queries/clientpositive/nullinput2.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/nullinput2.q?rev=893065&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/nullinput2.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/nullinput2.q Tue Dec 22 03:40:44 2009
@@ -0,0 +1,8 @@
+drop table nulltbl;
+
+create table nulltbl(key int) partitioned by (ds string);
+select key from nulltbl where ds='101';
+
+select count(1) from nulltbl where ds='101';
+
+drop table nulltbl;

Added: hadoop/hive/trunk/ql/src/test/results/clientpositive/nullinput2.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/nullinput2.q.out?rev=893065&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/nullinput2.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/nullinput2.q.out Tue Dec 22 03:40:44 2009
@@ -0,0 +1,27 @@
+PREHOOK: query: drop table nulltbl
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table nulltbl
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table nulltbl(key int) partitioned by (ds string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table nulltbl(key int) partitioned by (ds string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@nulltbl
+PREHOOK: query: select key from nulltbl where ds='101'
+PREHOOK: type: QUERY
+PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/tmp/1897224698/10000
+POSTHOOK: query: select key from nulltbl where ds='101'
+POSTHOOK: type: QUERY
+POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/tmp/1897224698/10000
+PREHOOK: query: select count(1) from nulltbl where ds='101'
+PREHOOK: type: QUERY
+PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/tmp/1803910053/10000
+POSTHOOK: query: select count(1) from nulltbl where ds='101'
+POSTHOOK: type: QUERY
+POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/tmp/1803910053/10000
+0
+PREHOOK: query: drop table nulltbl
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table nulltbl
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@nulltbl