You are viewing a plain text version of this content. The canonical link for it is here.
Posted to oak-commits@jackrabbit.apache.org by re...@apache.org on 2017/12/01 15:52:50 UTC

svn commit: r1816880 - in /jackrabbit/oak/trunk/oak-store-document/src: main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStore.java test/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStoreSchemaUpgradeTest.java

Author: reschke
Date: Fri Dec  1 15:52:50 2017
New Revision: 1816880

URL: http://svn.apache.org/viewvc?rev=1816880&view=rev
Log:
OAK-7019: RDBDocumentStore: refactor table upgrade code

Modified:
    jackrabbit/oak/trunk/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStore.java
    jackrabbit/oak/trunk/oak-store-document/src/test/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStoreSchemaUpgradeTest.java

Modified: jackrabbit/oak/trunk/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStore.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStore.java?rev=1816880&r1=1816879&r2=1816880&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStore.java (original)
+++ jackrabbit/oak/trunk/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStore.java Fri Dec  1 15:52:50 2017
@@ -31,7 +31,6 @@ import java.io.IOException;
 import java.io.UnsupportedEncodingException;
 import java.sql.Connection;
 import java.sql.DatabaseMetaData;
-import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.ResultSetMetaData;
 import java.sql.SQLException;
@@ -72,7 +71,8 @@ import org.apache.jackrabbit.oak.plugins
 import org.apache.jackrabbit.oak.plugins.document.DocumentStoreException;
 import org.apache.jackrabbit.oak.plugins.document.DocumentStoreStatsCollector;
 import org.apache.jackrabbit.oak.plugins.document.NodeDocument;
-import org.apache.jackrabbit.oak.plugins.document.UpdateOp;import org.apache.jackrabbit.oak.plugins.document.UpdateOp.Key;
+import org.apache.jackrabbit.oak.plugins.document.UpdateOp;
+import org.apache.jackrabbit.oak.plugins.document.UpdateOp.Key;
 import org.apache.jackrabbit.oak.plugins.document.UpdateOp.Operation;
 import org.apache.jackrabbit.oak.plugins.document.UpdateUtils;
 import org.apache.jackrabbit.oak.plugins.document.cache.CacheChangesTracker;
@@ -925,7 +925,7 @@ public class RDBDocumentStore implements
         return sqlType == Types.VARBINARY || sqlType == Types.BINARY || sqlType == Types.LONGVARBINARY;
     }
 
-    private void obtainFlagsFromResultSetMeta(ResultSetMetaData met, RDBTableMetaData tmd) throws SQLException {
+    private static void obtainFlagsFromResultSetMeta(ResultSetMetaData met, RDBTableMetaData tmd) throws SQLException {
 
         for (int i = 1; i <= met.getColumnCount(); i++) {
             String lcName = met.getColumnName(i).toLowerCase(Locale.ENGLISH);
@@ -967,7 +967,7 @@ public class RDBDocumentStore implements
         }
     }
 
-    private String dumpIndexData(DatabaseMetaData met, ResultSetMetaData rmet, String tableName) {
+    private static String dumpIndexData(DatabaseMetaData met, ResultSetMetaData rmet, String tableName) {
 
         ResultSet rs = null;
         try {
@@ -1022,7 +1022,7 @@ public class RDBDocumentStore implements
         }
     }
 
-    private void getIndexInformation(ResultSet rs, String rmetSchemaName, Map<String, Map<String, Object>> indices)
+    private static void getIndexInformation(ResultSet rs, String rmetSchemaName, Map<String, Map<String, Object>> indices)
             throws SQLException {
         while (rs.next()) {
             String name = asQualifiedDbName(rs.getString(5), rs.getString(6));
@@ -1052,7 +1052,9 @@ public class RDBDocumentStore implements
     }
 
     private void createTableFor(Connection con, Collection<? extends Document> col, RDBTableMetaData tmd, List<String> tablesCreated,
-            List<String> tablesPresent, StringBuilder diagnostics, int initialSchema, int upgradeToSchema) throws SQLException {
+            List<String> tablesPresent, StringBuilder overallDiagnostics, int initialSchema, int upgradeToSchema) throws SQLException {
+        StringBuilder diagnostics = new StringBuilder(); 
+
         String dbname = this.dbInfo.toString();
         if (con.getMetaData().getURL() != null) {
             dbname += " (" + con.getMetaData().getURL() + ")";
@@ -1111,6 +1113,9 @@ public class RDBDocumentStore implements
                 }
             }
 
+            closeResultSet(checkResultSet);
+            boolean dbWasChanged = false;
+
             if (!hasVersionColumn && upgradeToSchema >= 1) {
                 for (String upStatement1 : this.dbInfo.getTableUpgradeStatements(tableName, 1)) {
                     try {
@@ -1119,6 +1124,7 @@ public class RDBDocumentStore implements
                         upgradeStatement.close();
                         con.commit();
                         LOG.info("Upgraded " + tableName + " to DB level 1 using '" + upStatement1 + "'");
+                        dbWasChanged = true;
                     } catch (SQLException exup) {
                         con.rollback();
                         LOG.info("Attempted to upgrade " + tableName + " to DB level 1 using '" + upStatement1
@@ -1128,13 +1134,15 @@ public class RDBDocumentStore implements
             }
 
             tablesPresent.add(tableName);
+
+            if (dbWasChanged) {
+                diagnostics.setLength(0);
+                getTableMetaData(con, col, tmd, diagnostics);
+            }
         } catch (SQLException ex) {
             // table does not appear to exist
             con.rollback();
 
-            PreparedStatement checkStatement2 = null;
-            ResultSet checkResultSet2 = null;
-
             try {
                 creatStatement = con.createStatement();
                 creatStatement.execute(this.dbInfo.getTableCreationStatement(tableName, initialSchema));
@@ -1166,30 +1174,13 @@ public class RDBDocumentStore implements
 
                 tablesCreated.add(tableName);
 
-                checkStatement2 = con.prepareStatement("select * from " + tableName + " where ID = ?");
-                checkStatement2.setString(1, "0:/");
-                checkResultSet2 = checkStatement2.executeQuery();
-                // try to discover size of DATA column and binary-ness of ID
-                ResultSetMetaData met = checkResultSet2.getMetaData();
-                obtainFlagsFromResultSetMeta(met, tmd);
-
-                if (col == Collection.NODES) {
-                    String tableInfo = RDBJDBCTools.dumpResultSetMeta(met);
-                    diagnostics.append(tableInfo);
-                    String indexInfo = dumpIndexData(con.getMetaData(), met, tableName);
-                    if (!indexInfo.isEmpty()) {
-                        diagnostics.append(" ").append(indexInfo);
-                    }
-                }
+                diagnostics.setLength(0);
+                getTableMetaData(con, col, tmd, diagnostics);
             }
             catch (SQLException ex2) {
                 LOG.error("Failed to create table " + tableName + " in " + dbname, ex2);
                 throw ex2;
             }
-            finally {
-                closeResultSet(checkResultSet2);
-                closeStatement(checkStatement2);
-            }
         }
         finally {
             closeResultSet(checkResultSet);
@@ -1197,6 +1188,35 @@ public class RDBDocumentStore implements
             closeStatement(creatStatement);
             closeStatement(upgradeStatement);
         }
+
+        overallDiagnostics.append(diagnostics);
+    }
+
+    private static void getTableMetaData(Connection con, Collection<? extends Document> col, RDBTableMetaData tmd,
+            StringBuilder diagnostics) throws SQLException {
+        Statement checkStatement = null;
+        ResultSet checkResultSet = null;
+
+        try {
+            checkStatement = con.createStatement();
+            checkResultSet = checkStatement.executeQuery("select * from " + tmd.getName() + " where ID = '0'");
+
+            // try to discover size of DATA column and binary-ness of ID
+            ResultSetMetaData met = checkResultSet.getMetaData();
+            obtainFlagsFromResultSetMeta(met, tmd);
+
+            if (col == Collection.NODES) {
+                String tableInfo = RDBJDBCTools.dumpResultSetMeta(met);
+                diagnostics.append(tableInfo);
+                String indexInfo = dumpIndexData(con.getMetaData(), met, tmd.getName());
+                if (!indexInfo.isEmpty()) {
+                    diagnostics.append(" ").append(indexInfo);
+                }
+            }
+        } finally {
+            closeResultSet(checkResultSet);
+            closeStatement(checkStatement);
+        }
     }
 
     @Override

Modified: jackrabbit/oak/trunk/oak-store-document/src/test/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStoreSchemaUpgradeTest.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-store-document/src/test/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStoreSchemaUpgradeTest.java?rev=1816880&r1=1816879&r2=1816880&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-store-document/src/test/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStoreSchemaUpgradeTest.java (original)
+++ jackrabbit/oak/trunk/oak-store-document/src/test/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStoreSchemaUpgradeTest.java Fri Dec  1 15:52:50 2017
@@ -21,6 +21,7 @@ import static org.junit.Assert.assertFal
 import static org.junit.Assert.assertTrue;
 
 import java.util.ArrayList;
+import java.util.Collections;
 
 import javax.sql.DataSource;
 
@@ -28,7 +29,9 @@ import org.apache.jackrabbit.oak.commons
 import org.apache.jackrabbit.oak.plugins.document.Collection;
 import org.apache.jackrabbit.oak.plugins.document.DocumentMK;
 import org.apache.jackrabbit.oak.plugins.document.DocumentStoreFixture;
+import org.apache.jackrabbit.oak.plugins.document.UpdateOp;
 import org.apache.jackrabbit.oak.plugins.document.rdb.RDBDocumentStore.RDBTableMetaData;
+import org.apache.jackrabbit.oak.plugins.document.util.Utils;
 import org.junit.Assume;
 import org.junit.Test;
 import org.junit.runner.RunWith;
@@ -101,6 +104,30 @@ public class RDBDocumentStoreSchemaUpgra
     }
 
     @Test
+    public void init0then1() {
+        RDBOptions op = new RDBOptions().tablePrefix("T0T1").initialSchema(0).upgradeToSchema(0).dropTablesOnClose(true);
+        RDBDocumentStore rdb0 = null;
+        RDBDocumentStore rdb1 = null;
+        try {
+            rdb0 = new RDBDocumentStore(this.ds, new DocumentMK.Builder(), op);
+            RDBTableMetaData meta0 = rdb0.getTable(Collection.NODES);
+            assertFalse(meta0.hasVersion());
+            rdb1 = new RDBDocumentStore(this.ds, new DocumentMK.Builder(), new RDBOptions().tablePrefix("T0T1").initialSchema(0).upgradeToSchema(1));
+            RDBTableMetaData meta1 = rdb1.getTable(Collection.NODES);
+            assertTrue(meta1.hasVersion());
+            UpdateOp testInsert = new UpdateOp(Utils.getIdFromPath("/foo"), true);
+            assertTrue(rdb1.create(Collection.NODES, Collections.singletonList(testInsert)));
+        } finally {
+            if (rdb1 != null) {
+                rdb1.dispose();
+            }
+            if (rdb0 != null) {
+                rdb0.dispose();
+            }
+        }
+    }
+
+    @Test
     public void init01fail() {
         LogCustomizer logCustomizer = LogCustomizer.forLogger(RDBDocumentStore.class.getName()).enable(Level.INFO)
                 .contains("Attempted to upgrade").create();
@@ -119,7 +146,10 @@ public class RDBDocumentStoreSchemaUpgra
             assertFalse(meta.hasVersion());
             assertEquals("unexpected # of log entries: " + logCustomizer.getLogs(), RDBDocumentStore.getTableNames().size(),
                     logCustomizer.getLogs().size());
+            UpdateOp testInsert = new UpdateOp(Utils.getIdFromPath("/foo"), true);
+            assertTrue(rdb.create(Collection.NODES, Collections.singletonList(testInsert)));
         } finally {
+            wds.setFailAlterTableAddColumnStatements(false);
             logCustomizer.finished();
             if (rdb != null) {
                 rdb.dispose();