You are viewing a plain text version of this content. The canonical link for it is here.
Posted to derby-commits@db.apache.org by ma...@apache.org on 2010/06/14 18:29:09 UTC

svn commit: r954544 - in /db/derby/code/trunk/java: engine/org/apache/derby/impl/sql/execute/ testing/org/apache/derbyTesting/functionTests/tests/lang/ testing/org/apache/derbyTesting/functionTests/tests/tools/

Author: mamta
Date: Mon Jun 14 16:29:09 2010
New Revision: 954544

URL: http://svn.apache.org/viewvc?rev=954544&view=rev
Log:
DERBY-4677

We were not transferring unique nullable properties from the system catalog to the store layer during compress table and bulk insert. Because of that, after those operations, we started allowing duplicate rows those causing db corruption. With this checkin, we will be transferring unique nullable properties from sql layer to store layer. System catalogs have the information correct, but unique nullability information was not getting transferred to store during btree recreation in case of compress table and bulk insert operations.


Modified:
    db/derby/code/trunk/java/engine/org/apache/derby/impl/sql/execute/AlterTableConstantAction.java
    db/derby/code/trunk/java/engine/org/apache/derby/impl/sql/execute/InsertResultSet.java
    db/derby/code/trunk/java/testing/org/apache/derbyTesting/functionTests/tests/lang/NullableUniqueConstraintTest.java
    db/derby/code/trunk/java/testing/org/apache/derbyTesting/functionTests/tests/tools/ImportExportBinaryDataTest.java

Modified: db/derby/code/trunk/java/engine/org/apache/derby/impl/sql/execute/AlterTableConstantAction.java
URL: http://svn.apache.org/viewvc/db/derby/code/trunk/java/engine/org/apache/derby/impl/sql/execute/AlterTableConstantAction.java?rev=954544&r1=954543&r2=954544&view=diff
==============================================================================
--- db/derby/code/trunk/java/engine/org/apache/derby/impl/sql/execute/AlterTableConstantAction.java (original)
+++ db/derby/code/trunk/java/engine/org/apache/derby/impl/sql/execute/AlterTableConstantAction.java Mon Jun 14 16:29:09 2010
@@ -2458,6 +2458,12 @@ class AlterTableConstantAction extends D
 			properties.put(
                 "nUniqueColumns", Integer.toString(indexRowLength));
 		}
+		if(cd.getIndexDescriptor().isUniqueWithDuplicateNulls())
+		{
+			properties.put(
+                    "uniqueWithDuplicateNulls", Boolean.toString(true));
+		}
+
 		properties.put(
             "rowLocationColumn", Integer.toString(indexRowLength - 1));
 		properties.put(

Modified: db/derby/code/trunk/java/engine/org/apache/derby/impl/sql/execute/InsertResultSet.java
URL: http://svn.apache.org/viewvc/db/derby/code/trunk/java/engine/org/apache/derby/impl/sql/execute/InsertResultSet.java?rev=954544&r1=954543&r2=954544&view=diff
==============================================================================
--- db/derby/code/trunk/java/engine/org/apache/derby/impl/sql/execute/InsertResultSet.java (original)
+++ db/derby/code/trunk/java/engine/org/apache/derby/impl/sql/execute/InsertResultSet.java Mon Jun 14 16:29:09 2010
@@ -1869,6 +1869,11 @@ class InsertResultSet extends DMLWriteRe
 				properties.put("nUniqueColumns", 
 							   Integer.toString(indexRowLength));
 			}
+			if(cd.getIndexDescriptor().isUniqueWithDuplicateNulls())
+			{
+				properties.put(
+	                    "uniqueWithDuplicateNulls", Boolean.toString(true));
+			}
 			properties.put("rowLocationColumn", 
 							Integer.toString(indexRowLength - 1));
 			properties.put("nKeyFields", Integer.toString(indexRowLength));
@@ -2343,6 +2348,11 @@ class InsertResultSet extends DMLWriteRe
 				properties.put("nUniqueColumns", 
 							   Integer.toString(indexRowLength));
 			}
+			if(cd.getIndexDescriptor().isUniqueWithDuplicateNulls())
+			{
+				properties.put(
+	                    "uniqueWithDuplicateNulls", Boolean.toString(true));
+			}
 			properties.put("rowLocationColumn", 
 							Integer.toString(indexRowLength - 1));
 			properties.put("nKeyFields", Integer.toString(indexRowLength));

Modified: db/derby/code/trunk/java/testing/org/apache/derbyTesting/functionTests/tests/lang/NullableUniqueConstraintTest.java
URL: http://svn.apache.org/viewvc/db/derby/code/trunk/java/testing/org/apache/derbyTesting/functionTests/tests/lang/NullableUniqueConstraintTest.java?rev=954544&r1=954543&r2=954544&view=diff
==============================================================================
--- db/derby/code/trunk/java/testing/org/apache/derbyTesting/functionTests/tests/lang/NullableUniqueConstraintTest.java (original)
+++ db/derby/code/trunk/java/testing/org/apache/derbyTesting/functionTests/tests/lang/NullableUniqueConstraintTest.java Mon Jun 14 16:29:09 2010
@@ -21,6 +21,7 @@
 
 package org.apache.derbyTesting.functionTests.tests.lang;
 
+import java.sql.CallableStatement;
 import java.sql.Connection;
 import java.sql.PreparedStatement;
 import java.sql.ResultSet;
@@ -36,6 +37,7 @@ import junit.framework.TestResult;
 import junit.framework.TestSuite;
 
 import org.apache.derbyTesting.junit.BaseJDBCTestCase;
+import org.apache.derbyTesting.junit.SupportFilesSetup;
 import org.apache.derbyTesting.junit.TestConfiguration;
 
 /**
@@ -145,6 +147,30 @@ public class NullableUniqueConstraintTes
         stmt.close ();
         ps.close();
     }
+
+    /**
+     * Compress table should recreate the indexes correctly rather
+     * than ignoring the unique nullable property of the index
+     * @throws SQLException
+     */
+    public void testDerby4677CompressTable() throws SQLException {
+        Connection con = getConnection();
+        Statement stmt = con.createStatement();
+        stmt.executeUpdate("CREATE TABLE TABLE1(NAME1 INT UNIQUE, "+
+        		"name2 int unique not null, name3 int primary key)");
+        stmt.execute("call syscs_util.syscs_compress_table('APP','TABLE1',1)");
+        stmt.executeUpdate("INSERT INTO TABLE1 VALUES(1,11,111)");
+        //following should run into problem because of constraint on name1
+        assertStatementError("23505", stmt,
+        		"INSERT INTO TABLE1 VALUES(1,22,222)");
+        //following should run into problem because of constraint on name2
+        assertStatementError("23505", stmt,
+        		"INSERT INTO TABLE1 VALUES(3,11,333)");
+        //following should run into problem because of constraint on name3
+        assertStatementError("23505", stmt,
+        		"INSERT INTO TABLE1 VALUES(4,44,111)");
+        stmt.executeUpdate("DROP TABLE TABLE1");    
+    }
     
     /**
      * Basic test of Unique Constraint using multipart part key.

Modified: db/derby/code/trunk/java/testing/org/apache/derbyTesting/functionTests/tests/tools/ImportExportBinaryDataTest.java
URL: http://svn.apache.org/viewvc/db/derby/code/trunk/java/testing/org/apache/derbyTesting/functionTests/tests/tools/ImportExportBinaryDataTest.java?rev=954544&r1=954543&r2=954544&view=diff
==============================================================================
--- db/derby/code/trunk/java/testing/org/apache/derbyTesting/functionTests/tests/tools/ImportExportBinaryDataTest.java (original)
+++ db/derby/code/trunk/java/testing/org/apache/derbyTesting/functionTests/tests/tools/ImportExportBinaryDataTest.java Mon Jun 14 16:29:09 2010
@@ -21,6 +21,7 @@
  */
 package org.apache.derbyTesting.functionTests.tests.tools;
 
+import java.sql.Connection;
 import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.SQLException;
@@ -124,6 +125,79 @@ public class ImportExportBinaryDataTest 
 	    doImportTable("APP", "BIN_TAB_IMP", fileName, null, null, null, 0);
         verifyData(" * ");
     }
+    
+    /**
+     * Bulk insert into a table should recreate the indexes correctly rather
+     * than ignoring the unique nullable property of the index.
+     * In the following test case, we have an empty table in which we are
+     * 	trying to do an import from a file with one row worth's data.
+     * 	This combination used to cause bulk insert functionality to 
+     * 	recreate index incorrectly for unique nullable index. This allowed
+     * 	duplicate rows for unique nullable index. Fix for DERBY-4677 resolves
+     * 	the issue.
+     * @throws SQLException
+     */
+    public void testDerby4677BulkInsertIntoEmptyTable() throws SQLException {
+        Connection con = getConnection();
+        Statement stmt = con.createStatement();
+        stmt.executeUpdate("CREATE TABLE TABLE1(NAME1 INT UNIQUE, "+
+        		"name2 int unique not null, name3 int primary key)");
+        stmt.executeUpdate("INSERT INTO TABLE1 VALUES(1,11,111)");
+        String dataFileName =
+            (SupportFilesSetup.getReadWrite("data_file.dat")).getPath();
+        doExportTable("APP", "TABLE1", dataFileName, null, null, "UTF-16");
+        stmt.executeUpdate("DELETE FROM TABLE1");
+        commit();
+        doImportTable("APP", "TABLE1", dataFileName, null, null, "UTF-16",0);
+        //following should run into problem because of constraint on name1
+        assertStatementError("23505", stmt,
+        		"INSERT INTO TABLE1 VALUES(1,22,222)");
+        //following should run into problem because of constraint on name2
+        assertStatementError("23505", stmt,
+        		"INSERT INTO TABLE1 VALUES(3,11,333)");
+        //following should run into problem because of constraint on name3
+        assertStatementError("23505", stmt,
+        		"INSERT INTO TABLE1 VALUES(4,44,111)");
+        stmt.executeUpdate("DROP TABLE TABLE1");    
+    	SupportFilesSetup.deleteFile(dataFileName);
+    }
+    
+    /**
+     * Bulk insert into a table should recreate the indexes correctly rather
+     * than ignoring the unique nullable property of the index.
+     * In the following test case, we have an empty table in which we are
+     * 	trying to do an import from an empty file with the REPLACE option.
+     * 	This combination used to cause bulk insert functionality to 
+     * 	recreate index incorrectly for unique nullable index. This allowed
+     * 	duplicate rows for unique nullable index. Fix for DERBY-4677 resolves
+     * 	the issue.
+     * @throws SQLException
+     */
+    public void testDerby4677BulkInsertWithReplace() throws SQLException {
+        Connection con = getConnection();
+        Statement stmt = con.createStatement();
+        stmt.executeUpdate("CREATE TABLE TABLE1(NAME1 INT UNIQUE, "+
+        		"name2 int unique not null, name3 int primary key)");
+        String emptyFileName =
+            (SupportFilesSetup.getReadWrite("empty_file.dat")).getPath();
+        //there is no data in TABLE1 so empty_file.dat will be empty 
+        //after export. Using following to just create an empty file
+        doExportTable("APP", "TABLE1", emptyFileName, null, null, "UTF-16");
+        commit();
+        doImportTable("APP", "TABLE1", emptyFileName, null, null, "UTF-16",1);
+        stmt.executeUpdate("INSERT INTO TABLE1 VALUES(1,11,111)");
+        //following should run into problem because of constraint on name1
+        assertStatementError("23505", stmt,
+        		"INSERT INTO TABLE1 VALUES(1,22,222)");
+        //following should run into problem because of constraint on name2
+        assertStatementError("23505", stmt,
+        		"INSERT INTO TABLE1 VALUES(3,11,333)");
+        //following should run into problem because of constraint on name3
+        assertStatementError("23505", stmt,
+        		"INSERT INTO TABLE1 VALUES(4,44,111)");
+        stmt.executeUpdate("DROP TABLE TABLE1");    
+    	SupportFilesSetup.deleteFile(emptyFileName);
+    }
 
     
     /*