You are viewing a plain text version of this content. The canonical link for it is here.
Posted to derby-commits@db.apache.org by ma...@apache.org on 2010/06/17 08:04:51 UTC

svn commit: r955487 - in /db/derby/code/branches/10.5/java: engine/org/apache/derby/impl/sql/execute/ testing/org/apache/derbyTesting/functionTests/tests/lang/ testing/org/apache/derbyTesting/functionTests/tests/tools/

Author: mamta
Date: Thu Jun 17 06:04:50 2010
New Revision: 955487

URL: http://svn.apache.org/viewvc?rev=955487&view=rev
Log:
Backporting changes for DERBY-4677 from trunk into 10.5 codeline. The checkin into trunk was revision 954544


Modified:
    db/derby/code/branches/10.5/java/engine/org/apache/derby/impl/sql/execute/AlterTableConstantAction.java
    db/derby/code/branches/10.5/java/engine/org/apache/derby/impl/sql/execute/InsertResultSet.java
    db/derby/code/branches/10.5/java/testing/org/apache/derbyTesting/functionTests/tests/lang/NullableUniqueConstraintTest.java
    db/derby/code/branches/10.5/java/testing/org/apache/derbyTesting/functionTests/tests/tools/ImportExportBinaryDataTest.java

Modified: db/derby/code/branches/10.5/java/engine/org/apache/derby/impl/sql/execute/AlterTableConstantAction.java
URL: http://svn.apache.org/viewvc/db/derby/code/branches/10.5/java/engine/org/apache/derby/impl/sql/execute/AlterTableConstantAction.java?rev=955487&r1=955486&r2=955487&view=diff
==============================================================================
--- db/derby/code/branches/10.5/java/engine/org/apache/derby/impl/sql/execute/AlterTableConstantAction.java (original)
+++ db/derby/code/branches/10.5/java/engine/org/apache/derby/impl/sql/execute/AlterTableConstantAction.java Thu Jun 17 06:04:50 2010
@@ -2459,6 +2459,12 @@ class AlterTableConstantAction extends D
 			properties.put(
                 "nUniqueColumns", Integer.toString(indexRowLength));
 		}
+		if(cd.getIndexDescriptor().isUniqueWithDuplicateNulls())
+		{
+			properties.put(
+                    "uniqueWithDuplicateNulls", Boolean.toString(true));
+		}
+
 		properties.put(
             "rowLocationColumn", Integer.toString(indexRowLength - 1));
 		properties.put(

Modified: db/derby/code/branches/10.5/java/engine/org/apache/derby/impl/sql/execute/InsertResultSet.java
URL: http://svn.apache.org/viewvc/db/derby/code/branches/10.5/java/engine/org/apache/derby/impl/sql/execute/InsertResultSet.java?rev=955487&r1=955486&r2=955487&view=diff
==============================================================================
--- db/derby/code/branches/10.5/java/engine/org/apache/derby/impl/sql/execute/InsertResultSet.java (original)
+++ db/derby/code/branches/10.5/java/engine/org/apache/derby/impl/sql/execute/InsertResultSet.java Thu Jun 17 06:04:50 2010
@@ -1856,6 +1856,11 @@ class InsertResultSet extends DMLWriteRe
 				properties.put("nUniqueColumns", 
 							   Integer.toString(indexRowLength));
 			}
+			if(cd.getIndexDescriptor().isUniqueWithDuplicateNulls())
+			{
+				properties.put(
+	                    "uniqueWithDuplicateNulls", Boolean.toString(true));
+			}
 			properties.put("rowLocationColumn", 
 							Integer.toString(indexRowLength - 1));
 			properties.put("nKeyFields", Integer.toString(indexRowLength));
@@ -2330,6 +2335,11 @@ class InsertResultSet extends DMLWriteRe
 				properties.put("nUniqueColumns", 
 							   Integer.toString(indexRowLength));
 			}
+			if(cd.getIndexDescriptor().isUniqueWithDuplicateNulls())
+			{
+				properties.put(
+	                    "uniqueWithDuplicateNulls", Boolean.toString(true));
+			}
 			properties.put("rowLocationColumn", 
 							Integer.toString(indexRowLength - 1));
 			properties.put("nKeyFields", Integer.toString(indexRowLength));

Modified: db/derby/code/branches/10.5/java/testing/org/apache/derbyTesting/functionTests/tests/lang/NullableUniqueConstraintTest.java
URL: http://svn.apache.org/viewvc/db/derby/code/branches/10.5/java/testing/org/apache/derbyTesting/functionTests/tests/lang/NullableUniqueConstraintTest.java?rev=955487&r1=955486&r2=955487&view=diff
==============================================================================
--- db/derby/code/branches/10.5/java/testing/org/apache/derbyTesting/functionTests/tests/lang/NullableUniqueConstraintTest.java (original)
+++ db/derby/code/branches/10.5/java/testing/org/apache/derbyTesting/functionTests/tests/lang/NullableUniqueConstraintTest.java Thu Jun 17 06:04:50 2010
@@ -21,6 +21,7 @@
 
 package org.apache.derbyTesting.functionTests.tests.lang;
 
+import java.sql.CallableStatement;
 import java.sql.Connection;
 import java.sql.PreparedStatement;
 import java.sql.ResultSet;
@@ -36,6 +37,7 @@ import junit.framework.TestResult;
 import junit.framework.TestSuite;
 
 import org.apache.derbyTesting.junit.BaseJDBCTestCase;
+import org.apache.derbyTesting.junit.SupportFilesSetup;
 import org.apache.derbyTesting.junit.TestConfiguration;
 
 /**
@@ -145,6 +147,30 @@ public class NullableUniqueConstraintTes
         stmt.close ();
         ps.close();
     }
+
+    /**
+     * Compress table should recreate the indexes correctly rather
+     * than ignoring the unique nullable property of the index
+     * @throws SQLException
+     */
+    public void testDerby4677CompressTable() throws SQLException {
+        Connection con = getConnection();
+        Statement stmt = con.createStatement();
+        stmt.executeUpdate("CREATE TABLE TABLE1(NAME1 INT UNIQUE, "+
+        		"name2 int unique not null, name3 int primary key)");
+        stmt.execute("call syscs_util.syscs_compress_table('APP','TABLE1',1)");
+        stmt.executeUpdate("INSERT INTO TABLE1 VALUES(1,11,111)");
+        //following should run into problem because of constraint on name1
+        assertStatementError("23505", stmt,
+        		"INSERT INTO TABLE1 VALUES(1,22,222)");
+        //following should run into problem because of constraint on name2
+        assertStatementError("23505", stmt,
+        		"INSERT INTO TABLE1 VALUES(3,11,333)");
+        //following should run into problem because of constraint on name3
+        assertStatementError("23505", stmt,
+        		"INSERT INTO TABLE1 VALUES(4,44,111)");
+        stmt.executeUpdate("DROP TABLE TABLE1");    
+    }
     
     /**
      * Basic test of Unique Constraint using multipart part key.

Modified: db/derby/code/branches/10.5/java/testing/org/apache/derbyTesting/functionTests/tests/tools/ImportExportBinaryDataTest.java
URL: http://svn.apache.org/viewvc/db/derby/code/branches/10.5/java/testing/org/apache/derbyTesting/functionTests/tests/tools/ImportExportBinaryDataTest.java?rev=955487&r1=955486&r2=955487&view=diff
==============================================================================
--- db/derby/code/branches/10.5/java/testing/org/apache/derbyTesting/functionTests/tests/tools/ImportExportBinaryDataTest.java (original)
+++ db/derby/code/branches/10.5/java/testing/org/apache/derbyTesting/functionTests/tests/tools/ImportExportBinaryDataTest.java Thu Jun 17 06:04:50 2010
@@ -21,6 +21,7 @@
  */
 package org.apache.derbyTesting.functionTests.tests.tools;
 
+import java.sql.Connection;
 import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.SQLException;
@@ -124,6 +125,79 @@ public class ImportExportBinaryDataTest 
 	    doImportTable("APP", "BIN_TAB_IMP", fileName, null, null, null, 0);
         verifyData(" * ");
     }
+    
+    /**
+     * Bulk insert into a table should recreate the indexes correctly rather
+     * than ignoring the unique nullable property of the index.
+     * In the following test case, we have an empty table in which we are
+     * 	trying to do an import from a file with one row worth's data.
+     * 	This combination used to cause bulk insert functionality to 
+     * 	recreate index incorrectly for unique nullable index. This allowed
+     * 	duplicate rows for unique nullable index. Fix for DERBY-4677 resolves
+     * 	the issue.
+     * @throws SQLException
+     */
+    public void testDerby4677BulkInsertIntoEmptyTable() throws SQLException {
+        Connection con = getConnection();
+        Statement stmt = con.createStatement();
+        stmt.executeUpdate("CREATE TABLE TABLE1(NAME1 INT UNIQUE, "+
+        		"name2 int unique not null, name3 int primary key)");
+        stmt.executeUpdate("INSERT INTO TABLE1 VALUES(1,11,111)");
+        String dataFileName =
+            (SupportFilesSetup.getReadWrite("data_file.dat")).getPath();
+        doExportTable("APP", "TABLE1", dataFileName, null, null, "UTF-16");
+        stmt.executeUpdate("DELETE FROM TABLE1");
+        commit();
+        doImportTable("APP", "TABLE1", dataFileName, null, null, "UTF-16",0);
+        //following should run into problem because of constraint on name1
+        assertStatementError("23505", stmt,
+        		"INSERT INTO TABLE1 VALUES(1,22,222)");
+        //following should run into problem because of constraint on name2
+        assertStatementError("23505", stmt,
+        		"INSERT INTO TABLE1 VALUES(3,11,333)");
+        //following should run into problem because of constraint on name3
+        assertStatementError("23505", stmt,
+        		"INSERT INTO TABLE1 VALUES(4,44,111)");
+        stmt.executeUpdate("DROP TABLE TABLE1");    
+    	SupportFilesSetup.deleteFile(dataFileName);
+    }
+    
+    /**
+     * Bulk insert into a table should recreate the indexes correctly rather
+     * than ignoring the unique nullable property of the index.
+     * In the following test case, we have an empty table in which we are
+     * 	trying to do an import from an empty file with the REPLACE option.
+     * 	This combination used to cause bulk insert functionality to 
+     * 	recreate index incorrectly for unique nullable index. This allowed
+     * 	duplicate rows for unique nullable index. Fix for DERBY-4677 resolves
+     * 	the issue.
+     * @throws SQLException
+     */
+    public void testDerby4677BulkInsertWithReplace() throws SQLException {
+        Connection con = getConnection();
+        Statement stmt = con.createStatement();
+        stmt.executeUpdate("CREATE TABLE TABLE1(NAME1 INT UNIQUE, "+
+        		"name2 int unique not null, name3 int primary key)");
+        String emptyFileName =
+            (SupportFilesSetup.getReadWrite("empty_file.dat")).getPath();
+        //there is no data in TABLE1 so empty_file.dat will be empty 
+        //after export. Using following to just create an empty file
+        doExportTable("APP", "TABLE1", emptyFileName, null, null, "UTF-16");
+        commit();
+        doImportTable("APP", "TABLE1", emptyFileName, null, null, "UTF-16",1);
+        stmt.executeUpdate("INSERT INTO TABLE1 VALUES(1,11,111)");
+        //following should run into problem because of constraint on name1
+        assertStatementError("23505", stmt,
+        		"INSERT INTO TABLE1 VALUES(1,22,222)");
+        //following should run into problem because of constraint on name2
+        assertStatementError("23505", stmt,
+        		"INSERT INTO TABLE1 VALUES(3,11,333)");
+        //following should run into problem because of constraint on name3
+        assertStatementError("23505", stmt,
+        		"INSERT INTO TABLE1 VALUES(4,44,111)");
+        stmt.executeUpdate("DROP TABLE TABLE1");    
+    	SupportFilesSetup.deleteFile(emptyFileName);
+    }
 
     
     /*