You are viewing a plain text version of this content. The canonical link for it is here.
Posted to oak-commits@jackrabbit.apache.org by re...@apache.org on 2015/06/15 13:25:11 UTC

svn commit: r1685552 - in /jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document: AbstractDocumentStoreTest.java BasicDocumentStoreTest.java DocumentStorePerformanceTest.java rdb/RDBDocumentStorePerformanceTest.java

Author: reschke
Date: Mon Jun 15 11:25:10 2015
New Revision: 1685552

URL: http://svn.apache.org/r1685552
Log:
OAK-2982: BasicDocumentStoreTest: separate actual unit tests from performance tests

Added:
    jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/DocumentStorePerformanceTest.java   (with props)
    jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStorePerformanceTest.java   (with props)
Modified:
    jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/AbstractDocumentStoreTest.java
    jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/BasicDocumentStoreTest.java

Modified: jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/AbstractDocumentStoreTest.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/AbstractDocumentStoreTest.java?rev=1685552&r1=1685551&r2=1685552&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/AbstractDocumentStoreTest.java (original)
+++ jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/AbstractDocumentStoreTest.java Mon Jun 15 11:25:10 2015
@@ -93,4 +93,19 @@ public abstract class AbstractDocumentSt
 
         return result;
     }
+
+    /**
+     * Generate a random string of given size, with or without non-ASCII characters.
+     */
+    public static String generateString(int length, boolean asciiOnly) {
+        char[] s = new char[length];
+        for (int i = 0; i < length; i++) {
+            if (asciiOnly) {
+                s[i] = (char) (32 + (int) (95 * Math.random()));
+            } else {
+                s[i] = (char) (32 + (int) ((0xd7ff - 32) * Math.random()));
+            }
+        }
+        return new String(s);
+    }
 }

Modified: jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/BasicDocumentStoreTest.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/BasicDocumentStoreTest.java?rev=1685552&r1=1685551&r2=1685552&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/BasicDocumentStoreTest.java (original)
+++ jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/BasicDocumentStoreTest.java Mon Jun 15 11:25:10 2015
@@ -24,22 +24,12 @@ import static org.junit.Assert.assertTru
 import static org.junit.Assert.fail;
 
 import java.io.UnsupportedEncodingException;
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Types;
 import java.util.ArrayList;
 import java.util.Collections;
-import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
-import java.util.Set;
 import java.util.UUID;
 
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-
 import org.apache.jackrabbit.oak.plugins.document.UpdateOp.Condition;
 import org.apache.jackrabbit.oak.plugins.document.UpdateOp.Key;
 import org.apache.jackrabbit.oak.plugins.document.util.Utils;
@@ -47,6 +37,9 @@ import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+
 public class BasicDocumentStoreTest extends AbstractDocumentStoreTest {
 
     private static final Logger LOG = LoggerFactory.getLogger(BasicDocumentStoreTest.class);
@@ -448,274 +441,6 @@ public class BasicDocumentStoreTest exte
         return result;
     }
 
-    @Test
-    public void testCreatePerfSmall() {
-        createPerf(16, 1);
-    }
-
-    @Test
-    public void testCreatePerfSmallBatch() {
-        createPerf(16, 64);
-    }
-
-    @Test
-    public void testCreatePerfBig() {
-        createPerf(32 * 1024, 1);
-    }
-
-    private void createPerf(int size, int amount) {
-        String pval = generateString(size, true);
-        long duration = 1000;
-        long end = System.currentTimeMillis() + duration;
-        long cnt = 0;
-        List<String> ids = new ArrayList<String>();
-
-        while (System.currentTimeMillis() < end) {
-            List<UpdateOp> ups = new ArrayList<UpdateOp>();
-            for (int i = 0; i < amount; i++) {
-                String id = this.getClass().getName() + ".testCreatePerf-" + size + "-" + cnt + "-" + i;
-                UpdateOp up = new UpdateOp(id, true);
-                up.set("_id", id);
-                up.set("foo", pval);
-                ups.add(up);
-                ids.add(id);
-            }
-            boolean success = super.ds.create(Collection.NODES, ups);
-            removeMe.addAll(ids);
-            assertTrue("documents with " + ids + " not created", success);
-            cnt += 1;
-        }
-
-        LOG.info("document creation with property of size " + size + " and batch size " + amount + " for " + super.dsname + " was " + cnt + " in " + duration + "ms (" + (cnt / (duration / 1000f)) + "/s)");
-    }
-
-    @Test
-    public void testPerfCollectionPaging() {
-        testPerfCollectionPaging(this.getClass().getName() + ".testPerfCollectionPaging", false);
-    }
-
-    @Test
-    public void testPerfCollectionPagingUnCached() {
-        testPerfCollectionPaging(this.getClass().getName() + ".testPerfCollectionPagingUnCached", true);
-    }
-
-    private void testPerfCollectionPaging(String name, boolean invalidateCache) {
-        String cid = name;
-        int nodecount = 20000;
-        int initialFetchCount = 100;
-        int maxFetchCount = 1600;
-        int fetchcount = initialFetchCount;
-        long duration = 2000;
-        int cnt = 0;
-        List<UpdateOp> ups = new ArrayList<UpdateOp>();
-
-        UpdateOp container = new UpdateOp(cid, true);
-        container.set("_id", cid);
-        ups.add(container);
-        removeMe.add(cid);
-        for (int i = 0; i < nodecount; i++) {
-            String id = String.format("%s/%08d", cid, i);
-            removeMe.add(id);
-            UpdateOp u = new UpdateOp(id, true);
-            u.set("_id", id);
-            ups.add(u);
-        }
-
-        boolean success = super.ds.create(Collection.NODES, ups);
-        assertTrue(success);
-        super.ds.invalidateCache();
-
-        long end = System.currentTimeMillis() + duration;
-        String sid = cid;
-        int found = 0;
-        while (System.currentTimeMillis() < end) {
-            long now = System.currentTimeMillis();
-            List<NodeDocument> result = super.ds.query(Collection.NODES, sid, cid + "X", fetchcount);
-            if (super.ds.getCacheStats() != null && result.size() > 0) {
-                // check freshness of returned documents
-                long created = result.get(0).getLastCheckTime();
-                assertTrue(
-                        "'getLastCheckTime' timestamp of NodeDocument too old (" + created + " vs " + now + ") (on " + super.dsname + ")",
-                        created >= now);
-            }
-            found += result.size();
-            if (result.size() < fetchcount) {
-                if (sid.equals(cid)) {
-                    fail("first page must not be empty");
-                }
-                sid = cid;
-                assertEquals(nodecount, found);
-                found = 0;
-                fetchcount = initialFetchCount;
-            }
-            else {
-                sid = result.get(result.size() -1).getId();
-                if (fetchcount < maxFetchCount) {
-                    fetchcount *= 2;
-                }
-            }
-            cnt += 1;
-            if (invalidateCache) {
-                super.ds.invalidateCache();
-            }
-        }
-
-        LOG.info("collection lookups " + (invalidateCache ? "(uncached) " : "") + super.dsname + " was " + cnt + " in " + duration
-                + "ms (" + (cnt / (duration / 1000f)) + "/s)");
-    }
-
-    @Test
-    public void testPerfLastRevBatch() {
-        String bid = this.getClass().getName() + ".testPerfLastRevBatch";
-        int nodecount = 100;
-        long duration = 5000;
-        int cnt = 0;
-        List<String> ids = new ArrayList<String>();
-        Revision cr = Revision.fromString("r0-0-1");
-
-        // create test nodes
-        for (int i = 0; i < nodecount; i++) {
-            String id = bid + "-" + i;
-            super.ds.remove(Collection.NODES, id);
-            removeMe.add(id);
-            UpdateOp up = new UpdateOp(id, true);
-            up.set("_id", id);
-            up.set("testprop", generateString(100 * i, true));
-            up.setMapEntry("_lastRev", cr, "setup");
-            up.set("_modified", NodeDocument.getModifiedInSecs(System.currentTimeMillis()));
-            boolean success = super.ds.create(Collection.NODES, Collections.singletonList(up));
-            assertTrue("creation failed for " + id + " in " + super.dsname, success);
-            ids.add(id);
-        }
-
-        long end = System.currentTimeMillis() + duration;
-        while (System.currentTimeMillis() < end) {
-            UpdateOp up = new UpdateOp(bid, true);
-            up.setMapEntry("_lastRev", cr, "iteration-" + cnt);
-            up.max("_modified", NodeDocument.getModifiedInSecs(System.currentTimeMillis()));
-            super.ds.update(Collection.NODES, ids, up);
-            cnt += 1;
-        }
-
-        // check postcondition
-        super.ds.invalidateCache();
-        for (int i = 0; i < nodecount; i++) {
-            NodeDocument d = super.ds.find(Collection.NODES, bid + "-" + i);
-            assertNotNull(d);
-            Map<Revision, String> m = (Map<Revision, String>)d.get("_lastRev");
-            assertEquals("iteration-" + (cnt - 1), m.get(cr));
-        }
-
-        LOG.info("batch update for _lastRev for " + super.dsname + " was "
-                + cnt + " in " + duration + "ms (" + (cnt / (duration / 1000f)) + "/s)");
-    }
-
-    @Test
-    public void testPerfReadBigDocCached() {
-        perfReadBigDoc(true, this.getClass().getName() + ".testReadBigDocCached");
-    }
-
-    @Test
-    public void testPerfReadBigDocAfterInvalidate() {
-        perfReadBigDoc(false, this.getClass().getName() + ".testReadBigDocAfterInvalidate");
-    }
-
-    private void perfReadBigDoc(boolean cached, String name) {
-        String id = name;
-        long duration = 1000;
-        int cnt = 0;
-
-        super.ds.remove(Collection.NODES, Collections.singletonList(id));
-        UpdateOp up = new UpdateOp(id, true);
-        up.set("_id", id);
-        for (int i = 0; i < 100; i++) {
-            up.set("foo" + i, generateString(1024, true));
-        }
-        assertTrue(super.ds.create(Collection.NODES, Collections.singletonList(up)));
-        removeMe.add(id);
-
-        long end = System.currentTimeMillis() + duration;
-        while (System.currentTimeMillis() < end) {
-            if (!cached) {
-                super.ds.invalidateCache(Collection.NODES, id);
-            }
-            NodeDocument d = super.ds.find(Collection.NODES, id, 10);
-            cnt += 1;
-        }
-
-        LOG.info("big doc read " + (cached ? "" : "(after invalidate) ") + "from " + super.dsname + " was " + cnt + " in "
-                + duration + "ms (" + (cnt / (duration / 1000f)) + "/s)");
-    }
-
-    @Test
-    public void testUpdatePerfSmall() {
-        updatePerf(16, false);
-    }
-
-    @Test
-    public void testUpdatePerfSmallGrowing() {
-        updatePerf(16, true);
-    }
-
-    @Test
-    public void testUpdatePerfBig() {
-        updatePerf(32 * 1024, false);
-    }
-
-    private void updatePerf(int size, boolean growing) {
-        String pval = generateString(size, true);
-        long duration = 1000;
-        long end = System.currentTimeMillis() + duration;
-        long cnt = 0;
-        Set<Revision> expectedRevs = new HashSet<Revision>();
-
-        String id = this.getClass().getName() + ".testUpdatePerf" + (growing ? "Growing" : "") + "-" + size;
-        removeMe.add(id);
-
-        while (System.currentTimeMillis() < end) {
-            UpdateOp up = new UpdateOp(id, true);
-            up.set("_id", id);
-            if (growing) {
-                Revision r = new Revision(System.currentTimeMillis(), (int) cnt, 1);
-                up.setMapEntry("foo", r, pval);
-                up.setMapEntry("_commitRoot", r, "1");
-                up.increment("c", 1);
-                up.max("max", System.currentTimeMillis());
-                expectedRevs.add(r);
-            } else {
-                up.set("foo", pval);
-            }
-            NodeDocument old = super.ds.createOrUpdate(Collection.NODES, up);
-            if (cnt == 0) {
-                assertNull("expect null on create", old);
-            } else {
-                assertNotNull("fail on update " + cnt, old);
-            }
-            cnt += 1;
-        }
-
-        if (growing) {
-            NodeDocument result = super.ds.find(Collection.NODES, id, 0);
-            Map<Revision, Object> m = (Map<Revision, Object>)result.get("foo");
-            assertEquals("number of revisions", expectedRevs.size(), m.size());
-            assertTrue(m.keySet().equals(expectedRevs));
-        }
-
-        LOG.info("document updates with property of size " + size + (growing ? " (growing)" : "") + " for " + super.dsname + " was " + cnt + " in " + duration + "ms (" + (cnt / (duration / 1000f)) + "/s)");
-    }
-
-    private static String generateString(int length, boolean ascii) {
-        char[] s = new char[length];
-        for (int i = 0; i < length; i++) {
-            if (ascii) {
-                s[i] = (char) (32 + (int) (95 * Math.random()));
-            } else {
-                s[i] = (char) (32 + (int) ((0xd7ff - 32) * Math.random()));
-            }
-        }
-        return new String(s);
-    }
-
     private static String generateId(int length, boolean ascii) {
         StringBuffer sb = new StringBuffer();
         for (int i = 0; i < length; i++) {
@@ -729,243 +454,6 @@ public class BasicDocumentStoreTest exte
         return sb.toString();
     }
 
-    @Test
-    public void testPerfUpdateLimit() throws SQLException, UnsupportedEncodingException {
-        internalTestPerfUpdateLimit("testPerfUpdateLimit", "raw row update (set long)", 0);
-    }
-
-    @Test
-    public void testPerfUpdateLimitString() throws SQLException, UnsupportedEncodingException {
-        internalTestPerfUpdateLimit("testPerfUpdateLimitString", "raw row update (set long/string)", 1);
-    }
-
-    @Test
-    public void testPerfUpdateLimitStringBlob() throws SQLException, UnsupportedEncodingException {
-        internalTestPerfUpdateLimit("testPerfUpdateLimitStringBlob", "raw row update (set long/string/blob)", 2);
-    }
-
-    @Test
-    public void testPerfUpdateAppendString() throws SQLException, UnsupportedEncodingException {
-        internalTestPerfUpdateLimit("testPerfUpdateAppendString", "raw row update (append string)", 3);
-    }
-
-    @Test
-    public void testPerfUpdateGrowingDoc() throws SQLException, UnsupportedEncodingException {
-        internalTestPerfUpdateLimit("testPerfUpdateGrowingDoc", "raw row update (string + blob)", 4);
-    }
-
-    private void internalTestPerfUpdateLimit(String name, String desc, int mode) throws SQLException, UnsupportedEncodingException {
-        if (super.rdbDataSource != null) {
-            String key = name;
-            Connection connection = null;
-            String table = DocumentStoreFixture.TABLEPREFIX + "NODES";
-
-            // create test node
-            try {
-                connection = super.rdbDataSource.getConnection();
-                connection.setAutoCommit(false);
-                // we use the same pool as the document store, and the connection might have been returned in read-only mode
-                connection.setReadOnly(false);
-                PreparedStatement stmt = connection.prepareStatement("insert into " + table
-                        + " (ID, MODCOUNT, DATA) values (?, ?, ?)");
-                try {
-                    setIdInStatement(stmt, 1, key);
-                    stmt.setLong(2, 0);
-                    stmt.setString(3, "X");
-                    stmt.executeUpdate();
-                    connection.commit();
-                } finally {
-                    stmt.close();
-                }
-            } catch (SQLException ex) {
-                // ignored
-                // ex.printStackTrace();
-            } finally {
-                if (connection != null) {
-                    try {
-                        connection.close();
-                    } catch (SQLException e) {
-                        // ignored
-                    }
-                }
-            }
-
-            removeMe.add(key);
-            StringBuffer expect = new StringBuffer("X");
-
-            String appendString = generateString(512, true);
-
-            long duration = 1000;
-            long end = System.currentTimeMillis() + duration;
-            long cnt = 0;
-            byte bdata[] = new byte[65536];
-            String sdata = appendString;
-            boolean needsConcat = super.dsname.contains("MySQL");
-            boolean needsSQLStringConcat = super.dsname.contains("MSSql");
-            int dataInChars = ((super.dsname.contains("Oracle") || (super.dsname.contains("MSSql"))) ? 4000 : 16384);
-            int dataInBytes = dataInChars / 3;
-
-            while (System.currentTimeMillis() < end) {
-
-                try {
-                    connection = super.rdbDataSource.getConnection();
-                    connection.setAutoCommit(false);
-
-                    if (mode == 0) {
-                        PreparedStatement stmt = connection.prepareStatement("update " + table + " set MODCOUNT = ? where ID = ?");
-                        try {
-                            stmt.setLong(1, cnt);
-                            setIdInStatement(stmt, 2, key);
-                            assertEquals(1, stmt.executeUpdate());
-                            connection.commit();
-                        } finally {
-                            stmt.close();
-                        }
-                    } else if (mode == 1) {
-                        PreparedStatement stmt = connection.prepareStatement("update " + table
-                                + " set MODCOUNT = ?, DATA = ? where ID = ?");
-                        try {
-                            stmt.setLong(1, cnt);
-                            stmt.setString(2, "JSON data " + UUID.randomUUID());
-                            setIdInStatement(stmt, 3, key);
-                            assertEquals(1, stmt.executeUpdate());
-                            connection.commit();
-                        } finally {
-                            stmt.close();
-                        }
-                    } else if (mode == 2) {
-                        PreparedStatement stmt = connection.prepareStatement("update " + table
-                                + " set MODCOUNT = ?, DATA = ?, BDATA = ? where ID = ?");
-                        try {
-                            stmt.setLong(1, cnt);
-                            stmt.setString(2, "JSON data " + UUID.randomUUID());
-                            bdata[(int) cnt % bdata.length] = (byte) (cnt & 0xff);
-                            stmt.setString(2, "JSON data " + UUID.randomUUID());
-                            stmt.setBytes(3, bdata);
-                            setIdInStatement(stmt, 4, key);
-                            assertEquals(1, stmt.executeUpdate());
-                            connection.commit();
-                        } finally {
-                            stmt.close();
-                        }
-                    } else if (mode == 3) {
-                        String t = "update " + table + " ";
-
-                        t += "set DATA = ";
-                        if (needsConcat) {
-                            t += "CONCAT(DATA, ?) ";
-                        } else if (needsSQLStringConcat) {
-                            t += "CASE WHEN LEN(DATA) <= " + (dataInChars - appendString.length()) + " THEN (DATA + CAST(? AS nvarchar(" + 4000
-                                    + "))) ELSE (DATA + CAST(DATA AS nvarchar(max))) END";
-                        } else {
-                            t += "DATA || CAST(? as varchar(" + dataInChars + "))";
-                        }
-
-                        t += " where ID = ?";
-
-                        PreparedStatement stmt = connection.prepareStatement(t);
-                        try {
-                            stmt.setString(1, appendString);
-                            setIdInStatement(stmt, 2, key);
-                            assertEquals(1, stmt.executeUpdate());
-                            connection.commit();
-                            expect.append(appendString);
-                        } catch (SQLException ex) {
-                            // ex.printStackTrace();
-                            String state = ex.getSQLState();
-                            if ("22001".equals(state) /* everybody */ || ("72000".equals(state) && 1489 == ex.getErrorCode()) /* Oracle */) {
-                                // overflow
-                                connection.rollback();
-                                stmt = connection.prepareStatement("update " + table
-                                        + " set MODCOUNT = MODCOUNT + 1, DATA = ? where ID = ?");
-                                stmt.setString(1, "X");
-                                setIdInStatement(stmt, 2, key);
-                                assertEquals(1, stmt.executeUpdate());
-                                connection.commit();
-                                expect = new StringBuffer("X");
-                            } else {
-                                // ex.printStackTrace();
-                                throw (ex);
-                            }
-                        } finally {
-                            stmt.close();
-                        }
-                    } else if (mode == 4) {
-                        PreparedStatement stmt = connection.prepareStatement("update " + table
-                                + " set MODIFIED = ?, HASBINARY = ?, MODCOUNT = ?, CMODCOUNT = ?, DSIZE = ?, DATA = ?, BDATA = ? where ID = ?");
-                        try {
-                            int si = 1;
-                            stmt.setObject(si++, System.currentTimeMillis() / 5, Types.BIGINT);
-                            stmt.setObject(si++, 0, Types.SMALLINT);
-                            stmt.setObject(si++, cnt, Types.BIGINT);
-                            stmt.setObject(si++, null, Types.BIGINT);
-                            stmt.setObject(si++, sdata.length(), Types.BIGINT);
-
-                            if (sdata.length() < dataInBytes) {
-                                stmt.setString(si++, sdata);
-                                stmt.setBinaryStream(si++, null, 0);
-                            }
-                            else {
-                                stmt.setString(si++, "null");
-                                stmt.setBytes(si++, sdata.getBytes("UTF-8"));
-                            }
-                            setIdInStatement(stmt, si++, key);
-                            assertEquals(1, stmt.executeUpdate());
-                            connection.commit();
-                            sdata += appendString;
-                        } finally {
-                            stmt.close();
-                        }
-
-                    }
-                } catch (SQLException ex) {
-                    LOG.error(ex.getMessage() + " " + ex.getSQLState() + " " + ex.getErrorCode(), ex);
-                } finally {
-                    if (connection != null) {
-                        try {
-                            connection.close();
-                        } catch (SQLException e) {
-                            // ignored
-                        }
-                    }
-                }
-
-                cnt += 1;
-            }
-
-            // check persisted values
-            if (mode == 3) {
-                try {
-                    connection = super.rdbDataSource.getConnection();
-                    connection.setAutoCommit(false);
-                    PreparedStatement stmt = connection.prepareStatement("select DATA, MODCOUNT from " + table + " where ID = ?");
-                    try {
-                        setIdInStatement(stmt, 1, key);
-                        ResultSet rs = stmt.executeQuery();
-                        assertTrue("test record " + key + " not found in " + super.dsname, rs.next());
-                        String got = rs.getString(1);
-                        long modc = rs.getLong(2);
-                        LOG.info("column reset " + modc + " times");
-                        assertEquals(expect.toString(), got);
-                    } finally {
-                        stmt.close();
-                    }
-                } finally {
-                    if (connection != null) {
-                        try {
-                            connection.close();
-                        } catch (SQLException e) {
-                            // ignored
-                        }
-                    }
-                }
-            }
-
-            LOG.info(desc + " for " + super.dsname + " was " + cnt + " in " + duration + "ms (" + (cnt / (duration / 1000f))
-                    + "/s)");
-        }
-    }
-
     // make sure _collisionsModCount property is maintained properly when it exists
     @Test
     public void testCollisionsModCount() {
@@ -1022,20 +510,6 @@ public class BasicDocumentStoreTest exte
         }
     }
 
-    private void setIdInStatement(PreparedStatement stmt, int idx, String id) throws SQLException {
-        boolean binaryId = super.dsname.contains("MySQL") || super.dsname.contains("MSSql");
-        if (binaryId) {
-            try {
-                stmt.setBytes(idx, id.getBytes("UTF-8"));
-            } catch (UnsupportedEncodingException ex) {
-                LOG.error("UTF-8 not supported??", ex);
-                throw new DocumentStoreException(ex);
-            }
-        } else {
-            stmt.setString(idx, id);
-        }
-    }
-
     @Test
     public void description() throws Exception{
         Map<String, String> desc = ds.getMetadata();

Added: jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/DocumentStorePerformanceTest.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/DocumentStorePerformanceTest.java?rev=1685552&view=auto
==============================================================================
--- jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/DocumentStorePerformanceTest.java (added)
+++ jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/DocumentStorePerformanceTest.java Mon Jun 15 11:25:10 2015
@@ -0,0 +1,303 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.plugins.document;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Tests measuring the performance of various {@link DocumentStore} operations.
+ */
+public class DocumentStorePerformanceTest extends AbstractDocumentStoreTest {
+
+    private static final Logger LOG = LoggerFactory.getLogger(DocumentStorePerformanceTest.class);
+
+    public DocumentStorePerformanceTest(DocumentStoreFixture dsf) {
+        super(dsf);
+    }
+
+    @Test
+    public void testCreatePerfSmall() {
+        createPerf(16, 1);
+    }
+
+    @Test
+    public void testCreatePerfSmallBatch() {
+        createPerf(16, 64);
+    }
+
+    @Test
+    public void testCreatePerfBig() {
+        createPerf(32 * 1024, 1);
+    }
+
+    private void createPerf(int size, int amount) {
+        String pval = generateString(size, true);
+        long duration = 1000;
+        long end = System.currentTimeMillis() + duration;
+        long cnt = 0;
+        List<String> ids = new ArrayList<String>();
+
+        while (System.currentTimeMillis() < end) {
+            List<UpdateOp> ups = new ArrayList<UpdateOp>();
+            for (int i = 0; i < amount; i++) {
+                String id = this.getClass().getName() + ".testCreatePerf-" + size + "-" + cnt + "-" + i;
+                UpdateOp up = new UpdateOp(id, true);
+                up.set("_id", id);
+                up.set("foo", pval);
+                ups.add(up);
+                ids.add(id);
+            }
+            boolean success = super.ds.create(Collection.NODES, ups);
+            removeMe.addAll(ids);
+            assertTrue("documents with " + ids + " not created", success);
+            cnt += 1;
+        }
+
+        LOG.info("document creation with property of size " + size + " and batch size " + amount + " for " + super.dsname + " was " + cnt + " in " + duration + "ms (" + (cnt / (duration / 1000f)) + "/s)");
+    }
+
+    @Test
+    public void testPerfCollectionPaging() {
+        testPerfCollectionPaging(this.getClass().getName() + ".testPerfCollectionPaging", false);
+    }
+
+    @Test
+    public void testPerfCollectionPagingUnCached() {
+        testPerfCollectionPaging(this.getClass().getName() + ".testPerfCollectionPagingUnCached", true);
+    }
+
+    private void testPerfCollectionPaging(String name, boolean invalidateCache) {
+        String cid = name;
+        int nodecount = 20000;
+        int initialFetchCount = 100;
+        int maxFetchCount = 1600;
+        int fetchcount = initialFetchCount;
+        long duration = 2000;
+        int cnt = 0;
+        List<UpdateOp> ups = new ArrayList<UpdateOp>();
+
+        UpdateOp container = new UpdateOp(cid, true);
+        container.set("_id", cid);
+        ups.add(container);
+        removeMe.add(cid);
+        for (int i = 0; i < nodecount; i++) {
+            String id = String.format("%s/%08d", cid, i);
+            removeMe.add(id);
+            UpdateOp u = new UpdateOp(id, true);
+            u.set("_id", id);
+            ups.add(u);
+        }
+
+        boolean success = super.ds.create(Collection.NODES, ups);
+        assertTrue(success);
+        super.ds.invalidateCache();
+
+        long end = System.currentTimeMillis() + duration;
+        String sid = cid;
+        int found = 0;
+        while (System.currentTimeMillis() < end) {
+            long now = System.currentTimeMillis();
+            List<NodeDocument> result = super.ds.query(Collection.NODES, sid, cid + "X", fetchcount);
+            if (super.ds.getCacheStats() != null && result.size() > 0) {
+                // check freshness of returned documents
+                long created = result.get(0).getLastCheckTime();
+                assertTrue(
+                        "'getLastCheckTime' timestamp of NodeDocument too old (" + created + " vs " + now + ") (on " + super.dsname + ")",
+                        created >= now);
+            }
+            found += result.size();
+            if (result.size() < fetchcount) {
+                if (sid.equals(cid)) {
+                    fail("first page must not be empty");
+                }
+                sid = cid;
+                assertEquals(nodecount, found);
+                found = 0;
+                fetchcount = initialFetchCount;
+            }
+            else {
+                sid = result.get(result.size() -1).getId();
+                if (fetchcount < maxFetchCount) {
+                    fetchcount *= 2;
+                }
+            }
+            cnt += 1;
+            if (invalidateCache) {
+                super.ds.invalidateCache();
+            }
+        }
+
+        LOG.info("collection lookups " + (invalidateCache ? "(uncached) " : "") + super.dsname + " was " + cnt + " in " + duration
+                + "ms (" + (cnt / (duration / 1000f)) + "/s)");
+    }
+
+    @Test
+    public void testPerfLastRevBatch() {
+        String bid = this.getClass().getName() + ".testPerfLastRevBatch";
+        int nodecount = 100;
+        long duration = 5000;
+        int cnt = 0;
+        List<String> ids = new ArrayList<String>();
+        Revision cr = Revision.fromString("r0-0-1");
+
+        // create test nodes
+        for (int i = 0; i < nodecount; i++) {
+            String id = bid + "-" + i;
+            super.ds.remove(Collection.NODES, id);
+            removeMe.add(id);
+            UpdateOp up = new UpdateOp(id, true);
+            up.set("_id", id);
+            up.set("testprop", generateString(100 * i, true));
+            up.setMapEntry("_lastRev", cr, "setup");
+            up.set("_modified", NodeDocument.getModifiedInSecs(System.currentTimeMillis()));
+            boolean success = super.ds.create(Collection.NODES, Collections.singletonList(up));
+            assertTrue("creation failed for " + id + " in " + super.dsname, success);
+            ids.add(id);
+        }
+
+        long end = System.currentTimeMillis() + duration;
+        while (System.currentTimeMillis() < end) {
+            UpdateOp up = new UpdateOp(bid, true);
+            up.setMapEntry("_lastRev", cr, "iteration-" + cnt);
+            up.max("_modified", NodeDocument.getModifiedInSecs(System.currentTimeMillis()));
+            super.ds.update(Collection.NODES, ids, up);
+            cnt += 1;
+        }
+
+        // check postcondition
+        super.ds.invalidateCache();
+        for (int i = 0; i < nodecount; i++) {
+            NodeDocument d = super.ds.find(Collection.NODES, bid + "-" + i);
+            assertNotNull(d);
+            Map<Revision, String> m = (Map<Revision, String>)d.get("_lastRev");
+            assertEquals("iteration-" + (cnt - 1), m.get(cr));
+        }
+
+        LOG.info("batch update for _lastRev for " + super.dsname + " was "
+                + cnt + " in " + duration + "ms (" + (cnt / (duration / 1000f)) + "/s)");
+    }
+
+    @Test
+    public void testPerfReadBigDocCached() {
+        perfReadBigDoc(true, this.getClass().getName() + ".testReadBigDocCached");
+    }
+
+    @Test
+    public void testPerfReadBigDocAfterInvalidate() {
+        perfReadBigDoc(false, this.getClass().getName() + ".testReadBigDocAfterInvalidate");
+    }
+
+    private void perfReadBigDoc(boolean cached, String name) {
+        String id = name;
+        long duration = 1000;
+        int cnt = 0;
+
+        super.ds.remove(Collection.NODES, Collections.singletonList(id));
+        UpdateOp up = new UpdateOp(id, true);
+        up.set("_id", id);
+        for (int i = 0; i < 100; i++) {
+            up.set("foo" + i, generateString(1024, true));
+        }
+        assertTrue(super.ds.create(Collection.NODES, Collections.singletonList(up)));
+        removeMe.add(id);
+
+        long end = System.currentTimeMillis() + duration;
+        while (System.currentTimeMillis() < end) {
+            if (!cached) {
+                super.ds.invalidateCache(Collection.NODES, id);
+            }
+            NodeDocument d = super.ds.find(Collection.NODES, id, 10);
+            assertNotNull(d);
+            cnt += 1;
+        }
+
+        LOG.info("big doc read " + (cached ? "" : "(after invalidate) ") + "from " + super.dsname + " was " + cnt + " in "
+                + duration + "ms (" + (cnt / (duration / 1000f)) + "/s)");
+    }
+
+    @Test
+    public void testUpdatePerfSmall() {
+        updatePerf(16, false);
+    }
+
+    @Test
+    public void testUpdatePerfSmallGrowing() {
+        updatePerf(16, true);
+    }
+
+    @Test
+    public void testUpdatePerfBig() {
+        updatePerf(32 * 1024, false);
+    }
+
+    private void updatePerf(int size, boolean growing) {
+        String pval = generateString(size, true);
+        long duration = 1000;
+        long end = System.currentTimeMillis() + duration;
+        long cnt = 0;
+        Set<Revision> expectedRevs = new HashSet<Revision>();
+
+        String id = this.getClass().getName() + ".testUpdatePerf" + (growing ? "Growing" : "") + "-" + size;
+        removeMe.add(id);
+
+        while (System.currentTimeMillis() < end) {
+            UpdateOp up = new UpdateOp(id, true);
+            up.set("_id", id);
+            if (growing) {
+                Revision r = new Revision(System.currentTimeMillis(), (int) cnt, 1);
+                up.setMapEntry("foo", r, pval);
+                up.setMapEntry("_commitRoot", r, "1");
+                up.increment("c", 1);
+                up.max("max", System.currentTimeMillis());
+                expectedRevs.add(r);
+            } else {
+                up.set("foo", pval);
+            }
+            NodeDocument old = super.ds.createOrUpdate(Collection.NODES, up);
+            if (cnt == 0) {
+                assertNull("expect null on create", old);
+            } else {
+                assertNotNull("fail on update " + cnt, old);
+            }
+            cnt += 1;
+        }
+
+        if (growing) {
+            NodeDocument result = super.ds.find(Collection.NODES, id, 0);
+            Map<Revision, Object> m = (Map<Revision, Object>)result.get("foo");
+            assertEquals("number of revisions", expectedRevs.size(), m.size());
+            assertTrue(m.keySet().equals(expectedRevs));
+        }
+
+        LOG.info("document updates with property of size " + size + (growing ? " (growing)" : "") + " for " + super.dsname + " was " + cnt + " in " + duration + "ms (" + (cnt / (duration / 1000f)) + "/s)");
+    }
+}

Propchange: jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/DocumentStorePerformanceTest.java
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/DocumentStorePerformanceTest.java
------------------------------------------------------------------------------
    svn:executable = *

Added: jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStorePerformanceTest.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStorePerformanceTest.java?rev=1685552&view=auto
==============================================================================
--- jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStorePerformanceTest.java (added)
+++ jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStorePerformanceTest.java Mon Jun 15 11:25:10 2015
@@ -0,0 +1,303 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.plugins.document.rdb;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.io.UnsupportedEncodingException;
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Types;
+import java.util.UUID;
+
+import org.apache.jackrabbit.oak.plugins.document.AbstractDocumentStoreTest;
+import org.apache.jackrabbit.oak.plugins.document.DocumentStoreException;
+import org.apache.jackrabbit.oak.plugins.document.DocumentStoreFixture;
+import org.apache.jackrabbit.oak.plugins.document.DocumentStorePerformanceTest;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Tests measuring the performance of various {@link RDBDocumentStore}
+ * operations.
+ */
+public class RDBDocumentStorePerformanceTest extends AbstractDocumentStoreTest {
+
+    private static final Logger LOG = LoggerFactory.getLogger(RDBDocumentStorePerformanceTest.class);
+
+    public RDBDocumentStorePerformanceTest(DocumentStoreFixture dsf) {
+        super(dsf);
+    }
+
+    @Test
+    public void testPerfUpdateLimit() throws SQLException, UnsupportedEncodingException {
+        internalTestPerfUpdateLimit("testPerfUpdateLimit", "raw row update (set long)", 0);
+    }
+
+    @Test
+    public void testPerfUpdateLimitString() throws SQLException, UnsupportedEncodingException {
+        internalTestPerfUpdateLimit("testPerfUpdateLimitString", "raw row update (set long/string)", 1);
+    }
+
+    @Test
+    public void testPerfUpdateLimitStringBlob() throws SQLException, UnsupportedEncodingException {
+        internalTestPerfUpdateLimit("testPerfUpdateLimitStringBlob", "raw row update (set long/string/blob)", 2);
+    }
+
+    @Test
+    public void testPerfUpdateAppendString() throws SQLException, UnsupportedEncodingException {
+        internalTestPerfUpdateLimit("testPerfUpdateAppendString", "raw row update (append string)", 3);
+    }
+
+    @Test
+    public void testPerfUpdateGrowingDoc() throws SQLException, UnsupportedEncodingException {
+        internalTestPerfUpdateLimit("testPerfUpdateGrowingDoc", "raw row update (string + blob)", 4);
+    }
+
+    private void internalTestPerfUpdateLimit(String name, String desc, int mode) throws SQLException, UnsupportedEncodingException {
+        if (super.rdbDataSource != null) {
+            String key = name;
+            Connection connection = null;
+            String table = DocumentStoreFixture.TABLEPREFIX + "NODES";
+
+            // create test node
+            try {
+                connection = super.rdbDataSource.getConnection();
+                connection.setAutoCommit(false);
+                // we use the same pool as the document store, and the
+                // connection might have been returned in read-only mode
+                connection.setReadOnly(false);
+                PreparedStatement stmt = connection.prepareStatement("insert into " + table
+                        + " (ID, MODCOUNT, DATA) values (?, ?, ?)");
+                try {
+                    setIdInStatement(stmt, 1, key);
+                    stmt.setLong(2, 0);
+                    stmt.setString(3, "X");
+                    stmt.executeUpdate();
+                    connection.commit();
+                } finally {
+                    stmt.close();
+                }
+            } catch (SQLException ex) {
+                // ignored
+                // ex.printStackTrace();
+            } finally {
+                if (connection != null) {
+                    try {
+                        connection.close();
+                    } catch (SQLException e) {
+                        // ignored
+                    }
+                }
+            }
+
+            removeMe.add(key);
+            StringBuffer expect = new StringBuffer("X");
+
+            String appendString = generateString(512, true);
+
+            long duration = 1000;
+            long end = System.currentTimeMillis() + duration;
+            long cnt = 0;
+            byte bdata[] = new byte[65536];
+            String sdata = appendString;
+            boolean needsConcat = super.dsname.contains("MySQL");
+            boolean needsSQLStringConcat = super.dsname.contains("MSSql");
+            int dataInChars = ((super.dsname.contains("Oracle") || (super.dsname.contains("MSSql"))) ? 4000 : 16384);
+            int dataInBytes = dataInChars / 3;
+
+            while (System.currentTimeMillis() < end) {
+
+                try {
+                    connection = super.rdbDataSource.getConnection();
+                    connection.setAutoCommit(false);
+
+                    if (mode == 0) {
+                        PreparedStatement stmt = connection.prepareStatement("update " + table + " set MODCOUNT = ? where ID = ?");
+                        try {
+                            stmt.setLong(1, cnt);
+                            setIdInStatement(stmt, 2, key);
+                            assertEquals(1, stmt.executeUpdate());
+                            connection.commit();
+                        } finally {
+                            stmt.close();
+                        }
+                    } else if (mode == 1) {
+                        PreparedStatement stmt = connection.prepareStatement("update " + table
+                                + " set MODCOUNT = ?, DATA = ? where ID = ?");
+                        try {
+                            stmt.setLong(1, cnt);
+                            stmt.setString(2, "JSON data " + UUID.randomUUID());
+                            setIdInStatement(stmt, 3, key);
+                            assertEquals(1, stmt.executeUpdate());
+                            connection.commit();
+                        } finally {
+                            stmt.close();
+                        }
+                    } else if (mode == 2) {
+                        PreparedStatement stmt = connection.prepareStatement("update " + table
+                                + " set MODCOUNT = ?, DATA = ?, BDATA = ? where ID = ?");
+                        try {
+                            stmt.setLong(1, cnt);
+                            stmt.setString(2, "JSON data " + UUID.randomUUID());
+                            bdata[(int) cnt % bdata.length] = (byte) (cnt & 0xff);
+                            stmt.setString(2, "JSON data " + UUID.randomUUID());
+                            stmt.setBytes(3, bdata);
+                            setIdInStatement(stmt, 4, key);
+                            assertEquals(1, stmt.executeUpdate());
+                            connection.commit();
+                        } finally {
+                            stmt.close();
+                        }
+                    } else if (mode == 3) {
+                        String t = "update " + table + " ";
+
+                        t += "set DATA = ";
+                        if (needsConcat) {
+                            t += "CONCAT(DATA, ?) ";
+                        } else if (needsSQLStringConcat) {
+                            t += "CASE WHEN LEN(DATA) <= " + (dataInChars - appendString.length())
+                                    + " THEN (DATA + CAST(? AS nvarchar(" + 4000
+                                    + "))) ELSE (DATA + CAST(DATA AS nvarchar(max))) END";
+                        } else {
+                            t += "DATA || CAST(? as varchar(" + dataInChars + "))";
+                        }
+
+                        t += " where ID = ?";
+
+                        PreparedStatement stmt = connection.prepareStatement(t);
+                        try {
+                            stmt.setString(1, appendString);
+                            setIdInStatement(stmt, 2, key);
+                            assertEquals(1, stmt.executeUpdate());
+                            connection.commit();
+                            expect.append(appendString);
+                        } catch (SQLException ex) {
+                            // ex.printStackTrace();
+                            String state = ex.getSQLState();
+                            if ("22001".equals(state) /* everybody */|| ("72000".equals(state) && 1489 == ex.getErrorCode()) /* Oracle */) {
+                                // overflow
+                                connection.rollback();
+                                stmt = connection.prepareStatement("update " + table
+                                        + " set MODCOUNT = MODCOUNT + 1, DATA = ? where ID = ?");
+                                stmt.setString(1, "X");
+                                setIdInStatement(stmt, 2, key);
+                                assertEquals(1, stmt.executeUpdate());
+                                connection.commit();
+                                expect = new StringBuffer("X");
+                            } else {
+                                // ex.printStackTrace();
+                                throw (ex);
+                            }
+                        } finally {
+                            stmt.close();
+                        }
+                    } else if (mode == 4) {
+                        PreparedStatement stmt = connection
+                                .prepareStatement("update "
+                                        + table
+                                        + " set MODIFIED = ?, HASBINARY = ?, MODCOUNT = ?, CMODCOUNT = ?, DSIZE = ?, DATA = ?, BDATA = ? where ID = ?");
+                        try {
+                            int si = 1;
+                            stmt.setObject(si++, System.currentTimeMillis() / 5, Types.BIGINT);
+                            stmt.setObject(si++, 0, Types.SMALLINT);
+                            stmt.setObject(si++, cnt, Types.BIGINT);
+                            stmt.setObject(si++, null, Types.BIGINT);
+                            stmt.setObject(si++, sdata.length(), Types.BIGINT);
+
+                            if (sdata.length() < dataInBytes) {
+                                stmt.setString(si++, sdata);
+                                stmt.setBinaryStream(si++, null, 0);
+                            } else {
+                                stmt.setString(si++, "null");
+                                stmt.setBytes(si++, sdata.getBytes("UTF-8"));
+                            }
+                            setIdInStatement(stmt, si++, key);
+                            assertEquals(1, stmt.executeUpdate());
+                            connection.commit();
+                            sdata += appendString;
+                        } finally {
+                            stmt.close();
+                        }
+
+                    }
+                } catch (SQLException ex) {
+                    LOG.error(ex.getMessage() + " " + ex.getSQLState() + " " + ex.getErrorCode(), ex);
+                } finally {
+                    if (connection != null) {
+                        try {
+                            connection.close();
+                        } catch (SQLException e) {
+                            // ignored
+                        }
+                    }
+                }
+
+                cnt += 1;
+            }
+
+            // check persisted values
+            if (mode == 3) {
+                try {
+                    connection = super.rdbDataSource.getConnection();
+                    connection.setAutoCommit(false);
+                    PreparedStatement stmt = connection.prepareStatement("select DATA, MODCOUNT from " + table + " where ID = ?");
+                    try {
+                        setIdInStatement(stmt, 1, key);
+                        ResultSet rs = stmt.executeQuery();
+                        assertTrue("test record " + key + " not found in " + super.dsname, rs.next());
+                        String got = rs.getString(1);
+                        long modc = rs.getLong(2);
+                        LOG.info("column reset " + modc + " times");
+                        assertEquals(expect.toString(), got);
+                    } finally {
+                        stmt.close();
+                    }
+                } finally {
+                    if (connection != null) {
+                        try {
+                            connection.close();
+                        } catch (SQLException e) {
+                            // ignored
+                        }
+                    }
+                }
+            }
+
+            LOG.info(desc + " for " + super.dsname + " was " + cnt + " in " + duration + "ms (" + (cnt / (duration / 1000f))
+                    + "/s)");
+        }
+    }
+
+    private void setIdInStatement(PreparedStatement stmt, int idx, String id) throws SQLException {
+        boolean binaryId = super.dsname.contains("MySQL") || super.dsname.contains("MSSql");
+        if (binaryId) {
+            try {
+                stmt.setBytes(idx, id.getBytes("UTF-8"));
+            } catch (UnsupportedEncodingException ex) {
+                LOG.error("UTF-8 not supported??", ex);
+                throw new DocumentStoreException(ex);
+            }
+        } else {
+            stmt.setString(idx, id);
+        }
+    }
+}

Propchange: jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStorePerformanceTest.java
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStorePerformanceTest.java
------------------------------------------------------------------------------
    svn:executable = *