You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@geode.apache.org by ds...@apache.org on 2019/01/16 20:28:22 UTC
[geode] branch develop updated: GEODE-6272: change jdbc to use pdx
as keys instead of json (#3076)
This is an automated email from the ASF dual-hosted git repository.
dschneider pushed a commit to branch develop
in repository https://gitbox.apache.org/repos/asf/geode.git
The following commit(s) were added to refs/heads/develop by this push:
new 56fda39 GEODE-6272: change jdbc to use pdx as keys instead of json (#3076)
56fda39 is described below
commit 56fda39a8ab41fc9ffeddb3da13322eaa1516084
Author: Darrel Schneider <ds...@pivotal.io>
AuthorDate: Wed Jan 16 12:28:08 2019 -0800
GEODE-6272: change jdbc to use pdx as keys instead of json (#3076)
Composite keys for the jdbc connector must now be PdxInstances created with "neverDeserialize".
* added PdxInstanceFactory.neverDeserialize and PdxInstance.isDeserializable as new external APIs.
* improved the InternalPdxInstance interface to include Sendable.
Also this interface is now implemented by the pdx enum classes.
So now all classes that implement PdxInstance also implement InternalPdxInstance
* getCacheObject is now on the InternalPdxInstance interface.
* getRawField is now in the InternalPdxInstance interface.
A method that was on PdxInstanceImpl but only used by AttributeDescriptor
and that did not use any PdxInstanceImpl data is now on AttributeDescriptor.
This also got rid of a singleton lookup of the cache.
* removed unused FieldNotFoundInPdxVersion
* removed PdxFieldNotFoundException
---
.../jdbc/JdbcAsyncWriterIntegrationTest.java | 54 +++++----
.../geode/connectors/jdbc/JdbcDistributedTest.java | 9 +-
.../connectors/jdbc/JdbcLoaderIntegrationTest.java | 17 ++-
.../connectors/jdbc/JdbcWriterIntegrationTest.java | 52 +++++----
.../jdbc/PostgresJdbcLoaderIntegrationTest.java | 10 +-
.../geode/connectors/jdbc/internal/SqlHandler.java | 24 ++--
.../connectors/jdbc/internal/SqlHandlerTest.java | 99 +++++++++-------
.../geode/pdx/PdxInstanceFactoryJUnitTest.java | 124 +++++++++++++++++++++
.../cache/query/internal/AttributeDescriptor.java | 105 ++++++++---------
.../cache/query/internal/CompiledOperation.java | 10 +-
.../geode/cache/query/internal/PathUtils.java | 6 +-
.../cache/query/internal/RuntimeIterator.java | 2 +-
.../java/org/apache/geode/pdx/PdxInstance.java | 17 ++-
.../org/apache/geode/pdx/PdxInstanceFactory.java | 9 ++
.../org/apache/geode/pdx/internal/EnumInfo.java | 3 +-
.../pdx/internal/FieldNotFoundInPdxVersion.java | 25 -----
.../geode/pdx/internal/InternalPdxInstance.java | 27 ++++-
.../apache/geode/pdx/internal/PdxInstanceEnum.java | 4 +-
.../geode/pdx/internal/PdxInstanceFactoryImpl.java | 9 +-
.../apache/geode/pdx/internal/PdxInstanceImpl.java | 20 +---
.../org/apache/geode/pdx/internal/PdxType.java | 4 +
.../sanctioned-geode-core-serializables.txt | 1 -
22 files changed, 385 insertions(+), 246 deletions(-)
diff --git a/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/JdbcAsyncWriterIntegrationTest.java b/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/JdbcAsyncWriterIntegrationTest.java
index 1557890..3b51e2b 100644
--- a/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/JdbcAsyncWriterIntegrationTest.java
+++ b/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/JdbcAsyncWriterIntegrationTest.java
@@ -24,7 +24,6 @@ import java.sql.SQLException;
import java.sql.Statement;
import org.awaitility.core.ThrowingRunnable;
-import org.json.JSONObject;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
@@ -46,7 +45,7 @@ public abstract class JdbcAsyncWriterIntegrationTest {
private static final String REGION_TABLE_NAME = "employees";
private InternalCache cache;
- private Region<String, PdxInstance> employees;
+ private Region<Object, PdxInstance> employees;
private Connection connection;
private Statement statement;
private JdbcAsyncWriter jdbcWriter;
@@ -173,17 +172,17 @@ public abstract class JdbcAsyncWriterIntegrationTest {
@Test
public void canDestroyFromTableWithCompositeKey() throws Exception {
setupRegion("id,age");
- JSONObject compositeKey1 = new JSONObject();
- compositeKey1.put("id", pdxEmployee1.getField("id"));
- compositeKey1.put("age", pdxEmployee1.getField("age"));
- JSONObject compositeKey2 = new JSONObject();
- compositeKey2.put("id", pdxEmployee2.getField("id"));
- compositeKey2.put("age", pdxEmployee2.getField("age"));
- employees.put(compositeKey1.toString(), pdxEmployee1);
- employees.put(compositeKey2.toString(), pdxEmployee2);
+ PdxInstance compositeKey1 = cache.createPdxInstanceFactory("IdAgeKeyType").neverDeserialize()
+ .writeField("id", (String) pdxEmployee1.getField("id"), String.class)
+ .writeField("age", (Integer) pdxEmployee1.getField("age"), int.class).create();
+ PdxInstance compositeKey2 = cache.createPdxInstanceFactory("IdAgeKeyType").neverDeserialize()
+ .writeField("id", (String) pdxEmployee2.getField("id"), String.class)
+ .writeField("age", (Integer) pdxEmployee2.getField("age"), int.class).create();
+ employees.put(compositeKey1, pdxEmployee1);
+ employees.put(compositeKey2, pdxEmployee2);
awaitUntil(() -> assertThat(jdbcWriter.getSuccessfulEvents()).isEqualTo(2));
- employees.destroy(compositeKey1.toString());
+ employees.destroy(compositeKey1);
awaitUntil(() -> assertThat(jdbcWriter.getSuccessfulEvents()).isEqualTo(3));
ResultSet resultSet =
@@ -209,16 +208,15 @@ public abstract class JdbcAsyncWriterIntegrationTest {
@Test
public void canInsertIntoTableWithCompositeKey() throws Exception {
setupRegion("id,age");
- JSONObject compositeKey1 = new JSONObject();
- compositeKey1.put("id", pdxEmployee1.getField("id"));
- compositeKey1.put("age", pdxEmployee1.getField("age"));
- String actualKey = compositeKey1.toString();
- JSONObject compositeKey2 = new JSONObject();
- compositeKey2.put("id", pdxEmployee2.getField("id"));
- compositeKey2.put("age", pdxEmployee2.getField("age"));
-
- employees.put(actualKey, pdxEmployee1);
- employees.put(compositeKey2.toString(), pdxEmployee2);
+ PdxInstance compositeKey1 = cache.createPdxInstanceFactory("IdAgeKeyType").neverDeserialize()
+ .writeField("id", (String) pdxEmployee1.getField("id"), String.class)
+ .writeField("age", (Integer) pdxEmployee1.getField("age"), int.class).create();
+ PdxInstance compositeKey2 = cache.createPdxInstanceFactory("IdAgeKeyType").neverDeserialize()
+ .writeField("id", (String) pdxEmployee2.getField("id"), String.class)
+ .writeField("age", (Integer) pdxEmployee2.getField("age"), int.class).create();
+
+ employees.put(compositeKey1, pdxEmployee1);
+ employees.put(compositeKey2, pdxEmployee2);
awaitUntil(() -> assertThat(jdbcWriter.getSuccessfulEvents()).isEqualTo(2));
ResultSet resultSet =
@@ -251,16 +249,16 @@ public abstract class JdbcAsyncWriterIntegrationTest {
PdxInstance myPdx = cache.createPdxInstanceFactory(Employee.class.getName())
.writeString("id", "1").writeString("name", "Emp1")
.writeInt("age", 55).create();
- JSONObject compositeKey1 = new JSONObject();
- compositeKey1.put("id", myPdx.getField("id"));
- compositeKey1.put("age", myPdx.getField("age"));
- employees.put(compositeKey1.toString(), myPdx);
+ PdxInstance compositeKey1 = cache.createPdxInstanceFactory("IdAgeKeyType").neverDeserialize()
+ .writeField("id", (String) myPdx.getField("id"), String.class)
+ .writeField("age", (Integer) myPdx.getField("age"), int.class).create();
+ employees.put(compositeKey1, myPdx);
awaitUntil(() -> assertThat(jdbcWriter.getSuccessfulEvents()).isEqualTo(1));
WritablePdxInstance updatedPdx = myPdx.createWriter();
updatedPdx.setField("name", "updated");
Employee updatedEmployee = (Employee) updatedPdx.getObject();
- employees.put(compositeKey1.toString(), updatedPdx);
+ employees.put(compositeKey1, updatedPdx);
awaitUntil(() -> assertThat(jdbcWriter.getSuccessfulEvents()).isEqualTo(2));
ResultSet resultSet =
@@ -317,13 +315,13 @@ public abstract class JdbcAsyncWriterIntegrationTest {
assertThat(resultSet.getObject("age")).isEqualTo(employee.getAge());
}
- private Region<String, PdxInstance> createRegionWithJDBCAsyncWriter(String regionName, String ids)
+ private Region<Object, PdxInstance> createRegionWithJDBCAsyncWriter(String regionName, String ids)
throws RegionMappingExistsException {
jdbcWriter = new JdbcAsyncWriter(createSqlHandler(ids), cache);
cache.createAsyncEventQueueFactory().setBatchSize(1).setBatchTimeInterval(1)
.create("jdbcAsyncQueue", jdbcWriter);
- RegionFactory<String, PdxInstance> regionFactory = cache.createRegionFactory(REPLICATE);
+ RegionFactory<Object, PdxInstance> regionFactory = cache.createRegionFactory(REPLICATE);
regionFactory.addAsyncEventQueueId("jdbcAsyncQueue");
return regionFactory.create(regionName);
}
diff --git a/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/JdbcDistributedTest.java b/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/JdbcDistributedTest.java
index 06e1cd3..66c6fa3 100644
--- a/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/JdbcDistributedTest.java
+++ b/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/JdbcDistributedTest.java
@@ -29,7 +29,6 @@ import java.sql.SQLException;
import java.sql.Statement;
import java.util.Date;
-import org.json.JSONObject;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
@@ -456,10 +455,10 @@ public abstract class JdbcDistributedTest implements Serializable {
PdxInstance pdxEmployee1 =
ClusterStartupRule.getCache().createPdxInstanceFactory(Employee.class.getName())
.writeString("id", "id1").writeString("name", "Emp1").writeInt("age", 55).create();
- JSONObject compositeKey1 = new JSONObject();
- compositeKey1.put("id", pdxEmployee1.getField("id"));
- compositeKey1.put("age", pdxEmployee1.getField("age"));
- String key = compositeKey1.toString();
+ PdxInstance key =
+ ClusterStartupRule.getCache().createPdxInstanceFactory("IdAgeKeyType").neverDeserialize()
+ .writeField("id", (String) pdxEmployee1.getField("id"), String.class)
+ .writeField("age", (Integer) pdxEmployee1.getField("age"), int.class).create();
Region<Object, Object> region = ClusterStartupRule.getCache().getRegion(REGION_NAME);
region.put(key, pdxEmployee1);
region.invalidate(key);
diff --git a/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/JdbcLoaderIntegrationTest.java b/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/JdbcLoaderIntegrationTest.java
index 33d2243..b9295e8 100644
--- a/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/JdbcLoaderIntegrationTest.java
+++ b/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/JdbcLoaderIntegrationTest.java
@@ -24,7 +24,6 @@ import java.sql.SQLException;
import java.sql.Statement;
import java.util.Date;
-import org.json.JSONObject;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
@@ -136,10 +135,10 @@ public abstract class JdbcLoaderIntegrationTest {
createRegionWithJDBCLoader(REGION_TABLE_NAME, Employee.class.getName(), ids, null, null);
createPdxType();
- JSONObject key = new JSONObject();
- key.put("id", "1");
- key.put("name", "Emp1");
- Employee value = region.get(key.toString());
+ PdxInstance key =
+ cache.createPdxInstanceFactory("MyPdxKeyType").neverDeserialize().writeString("id", "1")
+ .writeString("name", "Emp1").create();
+ Employee value = region.get(key);
assertThat(value.getId()).isEqualTo("1");
assertThat(value.getName()).isEqualTo("Emp1");
@@ -167,10 +166,10 @@ public abstract class JdbcLoaderIntegrationTest {
schema);
createPdxType();
- JSONObject key = new JSONObject();
- key.put("id", "1");
- key.put("name", "Emp1");
- Employee value = region.get(key.toString());
+ PdxInstance key =
+ cache.createPdxInstanceFactory("MyPdxKeyType").neverDeserialize().writeString("id", "1")
+ .writeString("name", "Emp1").create();
+ Employee value = region.get(key);
assertThat(value.getId()).isEqualTo("1");
assertThat(value.getName()).isEqualTo("Emp1");
diff --git a/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/JdbcWriterIntegrationTest.java b/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/JdbcWriterIntegrationTest.java
index 7432534..445b002 100644
--- a/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/JdbcWriterIntegrationTest.java
+++ b/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/JdbcWriterIntegrationTest.java
@@ -24,7 +24,6 @@ import java.sql.Statement;
import java.util.HashMap;
import java.util.Map;
-import org.json.JSONObject;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
@@ -48,7 +47,7 @@ public abstract class JdbcWriterIntegrationTest {
protected static final String REGION_TABLE_NAME = "employees";
protected InternalCache cache;
- protected Region<String, PdxInstance> employees;
+ protected Region<Object, PdxInstance> employees;
protected Connection connection;
protected Statement statement;
protected JdbcWriter jdbcWriter;
@@ -184,16 +183,15 @@ public abstract class JdbcWriterIntegrationTest {
public void canInsertIntoTableWithCompositeKey() throws Exception {
createTable();
setupRegion("id,age");
- JSONObject compositeKey1 = new JSONObject();
- compositeKey1.put("id", pdx1.getField("id"));
- compositeKey1.put("age", pdx1.getField("age"));
- String actualKey = compositeKey1.toString();
- JSONObject compositeKey2 = new JSONObject();
- compositeKey2.put("id", pdx2.getField("id"));
- compositeKey2.put("age", pdx2.getField("age"));
+ PdxInstance compositeKey1 = cache.createPdxInstanceFactory("IdAgeKeyType").neverDeserialize()
+ .writeField("id", (String) pdx1.getField("id"), String.class)
+ .writeField("age", (Integer) pdx1.getField("age"), int.class).create();
+ PdxInstance compositeKey2 = cache.createPdxInstanceFactory("IdAgeKeyType").neverDeserialize()
+ .writeField("id", (String) pdx2.getField("id"), String.class)
+ .writeField("age", (Integer) pdx2.getField("age"), int.class).create();
- employees.put(actualKey, pdx1);
- employees.put(compositeKey2.toString(), pdx2);
+ employees.put(compositeKey1, pdx1);
+ employees.put(compositeKey2, pdx2);
ResultSet resultSet =
statement.executeQuery("select * from " + REGION_TABLE_NAME + " order by id asc");
@@ -291,16 +289,16 @@ public abstract class JdbcWriterIntegrationTest {
public void canDestroyFromTableWithCompositeKey() throws Exception {
createTable();
setupRegion("id,age");
- JSONObject compositeKey1 = new JSONObject();
- compositeKey1.put("id", pdx1.getField("id"));
- compositeKey1.put("age", pdx1.getField("age"));
- JSONObject compositeKey2 = new JSONObject();
- compositeKey2.put("id", pdx2.getField("id"));
- compositeKey2.put("age", pdx2.getField("age"));
- employees.put(compositeKey1.toString(), pdx1);
- employees.put(compositeKey2.toString(), pdx2);
+ PdxInstance compositeKey1 = cache.createPdxInstanceFactory("IdAgeKeyType").neverDeserialize()
+ .writeField("id", (String) pdx1.getField("id"), String.class)
+ .writeField("age", (Integer) pdx1.getField("age"), int.class).create();
+ PdxInstance compositeKey2 = cache.createPdxInstanceFactory("IdAgeKeyType").neverDeserialize()
+ .writeField("id", (String) pdx2.getField("id"), String.class)
+ .writeField("age", (Integer) pdx2.getField("age"), int.class).create();
+ employees.put(compositeKey1, pdx1);
+ employees.put(compositeKey2, pdx2);
- employees.destroy(compositeKey1.toString());
+ employees.destroy(compositeKey1);
ResultSet resultSet =
statement.executeQuery("select * from " + REGION_TABLE_NAME + " order by id asc");
@@ -342,15 +340,15 @@ public abstract class JdbcWriterIntegrationTest {
PdxInstance myPdx = cache.createPdxInstanceFactory(Employee.class.getName())
.writeString("id", "1").writeString("name", "Emp1")
.writeInt("age", 55).create();
- JSONObject compositeKey1 = new JSONObject();
- compositeKey1.put("id", myPdx.getField("id"));
- compositeKey1.put("age", myPdx.getField("age"));
- employees.put(compositeKey1.toString(), myPdx);
+ PdxInstance compositeKey1 = cache.createPdxInstanceFactory("IdAgeKeyType").neverDeserialize()
+ .writeField("id", (String) myPdx.getField("id"), String.class)
+ .writeField("age", (Integer) myPdx.getField("age"), int.class).create();
+ employees.put(compositeKey1, myPdx);
WritablePdxInstance updatedPdx = myPdx.createWriter();
updatedPdx.setField("name", "updated");
Employee updatedEmployee = (Employee) updatedPdx.getObject();
- employees.put(compositeKey1.toString(), updatedPdx);
+ employees.put(compositeKey1, updatedPdx);
ResultSet resultSet =
statement.executeQuery("select * from " + REGION_TABLE_NAME + " order by id asc");
@@ -390,12 +388,12 @@ public abstract class JdbcWriterIntegrationTest {
assertThat(resultSet.next()).isFalse();
}
- protected Region<String, PdxInstance> createRegionWithJDBCSynchronousWriter(String regionName,
+ protected Region<Object, PdxInstance> createRegionWithJDBCSynchronousWriter(String regionName,
String ids, String catalog, String schema)
throws RegionMappingExistsException {
jdbcWriter = new JdbcWriter(createSqlHandler(ids, catalog, schema), cache);
- RegionFactory<String, PdxInstance> regionFactory =
+ RegionFactory<Object, PdxInstance> regionFactory =
cache.createRegionFactory(RegionShortcut.REPLICATE);
regionFactory.setCacheWriter(jdbcWriter);
return regionFactory.create(regionName);
diff --git a/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/PostgresJdbcLoaderIntegrationTest.java b/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/PostgresJdbcLoaderIntegrationTest.java
index aaf641a..220ebfc 100644
--- a/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/PostgresJdbcLoaderIntegrationTest.java
+++ b/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/PostgresJdbcLoaderIntegrationTest.java
@@ -21,11 +21,11 @@ import java.sql.Connection;
import java.sql.SQLException;
import java.sql.Statement;
-import org.json.JSONObject;
import org.junit.ClassRule;
import org.junit.Test;
import org.apache.geode.cache.Region;
+import org.apache.geode.pdx.PdxInstance;
import org.apache.geode.test.junit.rules.DatabaseConnectionRule;
import org.apache.geode.test.junit.rules.PostgresConnectionRule;
@@ -80,10 +80,10 @@ public class PostgresJdbcLoaderIntegrationTest extends JdbcLoaderIntegrationTest
SCHEMA_NAME);
createPdxType();
- JSONObject key = new JSONObject();
- key.put("id", "1");
- key.put("name", "Emp1");
- Employee value = region.get(key.toString());
+ PdxInstance key =
+ cache.createPdxInstanceFactory("MyPdxKeyType").neverDeserialize().writeString("id", "1")
+ .writeString("name", "Emp1").create();
+ Employee value = region.get(key);
assertThat(value.getId()).isEqualTo("1");
assertThat(value.getName()).isEqualTo("Emp1");
diff --git a/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/SqlHandler.java b/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/SqlHandler.java
index ec1fdbf..e0f81c5 100644
--- a/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/SqlHandler.java
+++ b/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/SqlHandler.java
@@ -22,13 +22,9 @@ import java.sql.Types;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
-import java.util.Set;
import javax.sql.DataSource;
-import org.json.JSONException;
-import org.json.JSONObject;
-
import org.apache.geode.InternalGemFireException;
import org.apache.geode.annotations.Experimental;
import org.apache.geode.cache.Operation;
@@ -261,20 +257,18 @@ public class SqlHandler {
new ColumnData(keyColumnName, key, tableMetaData.getColumnDataType(keyColumnName));
result.add(columnData);
} else {
- if (!(key instanceof String)) {
+ if (!(key instanceof PdxInstance)) {
throw new JdbcConnectorException(
"The key \"" + key + "\" of class \"" + key.getClass().getName()
- + "\" must be a java.lang.String because multiple columns are configured as ids.");
+ + "\" must be a PdxInstance because multiple columns are configured as ids.");
}
- JSONObject compositeKey = null;
- try {
- compositeKey = new JSONObject((String) key);
- } catch (JSONException ex) {
- throw new JdbcConnectorException("The key \"" + key
- + "\" must be a valid JSON string because multiple columns are configured as ids. Details: "
- + ex.getMessage());
+ PdxInstance compositeKey = (PdxInstance) key;
+ if (compositeKey.isDeserializable()) {
+ throw new JdbcConnectorException(
+ "The key \"" + key
+ + "\" must be a PdxInstance created with PdxInstanceFactory.neverDeserialize");
}
- Set<String> fieldNames = compositeKey.keySet();
+ List<String> fieldNames = compositeKey.getFieldNames();
if (fieldNames.size() != keyColumnNames.size()) {
throw new JdbcConnectorException("The key \"" + key + "\" should have "
+ keyColumnNames.size() + " fields but has " + fieldNames.size() + " fields.");
@@ -285,7 +279,7 @@ public class SqlHandler {
throw new JdbcConnectorException("The key \"" + key + "\" has the field \"" + fieldName
+ "\" which does not match any of the key columns: " + keyColumnNames);
}
- ColumnData columnData = new ColumnData(columnName, compositeKey.get(fieldName),
+ ColumnData columnData = new ColumnData(columnName, compositeKey.getField(fieldName),
tableMetaData.getColumnDataType(columnName));
result.add(columnData);
}
diff --git a/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/SqlHandlerTest.java b/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/SqlHandlerTest.java
index a9303ad..b020e18 100644
--- a/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/SqlHandlerTest.java
+++ b/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/SqlHandlerTest.java
@@ -38,7 +38,6 @@ import java.util.Date;
import javax.sql.DataSource;
import junitparams.JUnitParamsRunner;
-import org.json.JSONObject;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
@@ -52,6 +51,7 @@ import org.apache.geode.connectors.jdbc.JdbcConnectorException;
import org.apache.geode.connectors.jdbc.internal.SqlHandler.DataSourceFactory;
import org.apache.geode.connectors.jdbc.internal.configuration.RegionMapping;
import org.apache.geode.internal.cache.InternalCache;
+import org.apache.geode.pdx.PdxInstance;
import org.apache.geode.pdx.internal.PdxInstanceImpl;
import org.apache.geode.pdx.internal.PdxType;
@@ -374,14 +374,16 @@ public class SqlHandlerTest {
when(statement.executeUpdate()).thenReturn(1);
Object compositeKeyFieldValueOne = "fieldValueOne";
Object compositeKeyFieldValueTwo = "fieldValueTwo";
- JSONObject compositeKey = new JSONObject();
- compositeKey.put("fieldOne", compositeKeyFieldValueOne);
- compositeKey.put("fieldTwo", compositeKeyFieldValueTwo);
+ PdxInstance compositeKey = mock(PdxInstance.class);
+ when(compositeKey.isDeserializable()).thenReturn(false);
+ when(compositeKey.getFieldNames()).thenReturn(Arrays.asList("fieldOne", "fieldTwo"));
+ when(compositeKey.getField("fieldOne")).thenReturn(compositeKeyFieldValueOne);
+ when(compositeKey.getField("fieldTwo")).thenReturn(compositeKeyFieldValueTwo);
when(tableMetaDataView.getKeyColumnNames()).thenReturn(Arrays.asList("fieldOne", "fieldTwo"));
when(regionMapping.getColumnNameForField("fieldOne", tableMetaDataView)).thenReturn("fieldOne");
when(regionMapping.getColumnNameForField("fieldTwo", tableMetaDataView)).thenReturn("fieldTwo");
- handler.write(region, Operation.CREATE, compositeKey.toString(), value);
+ handler.write(region, Operation.CREATE, compositeKey, value);
verify(statement).setObject(1, compositeKeyFieldValueOne);
verify(statement).setObject(2, compositeKeyFieldValueTwo);
@@ -405,14 +407,16 @@ public class SqlHandlerTest {
when(statement.executeUpdate()).thenReturn(1);
Object compositeKeyFieldValueOne = "fieldValueOne";
Object compositeKeyFieldValueTwo = "fieldValueTwo";
- JSONObject compositeKey = new JSONObject();
- compositeKey.put("fieldOne", compositeKeyFieldValueOne);
- compositeKey.put("fieldTwo", compositeKeyFieldValueTwo);
+ PdxInstance compositeKey = mock(PdxInstance.class);
+ when(compositeKey.isDeserializable()).thenReturn(false);
+ when(compositeKey.getFieldNames()).thenReturn(Arrays.asList("fieldOne", "fieldTwo"));
+ when(compositeKey.getField("fieldOne")).thenReturn(compositeKeyFieldValueOne);
+ when(compositeKey.getField("fieldTwo")).thenReturn(compositeKeyFieldValueTwo);
when(tableMetaDataView.getKeyColumnNames()).thenReturn(Arrays.asList("fieldOne", "fieldTwo"));
when(regionMapping.getColumnNameForField("fieldOne", tableMetaDataView)).thenReturn("fieldOne");
when(regionMapping.getColumnNameForField("fieldTwo", tableMetaDataView)).thenReturn("fieldTwo");
- handler.write(region, Operation.UPDATE, compositeKey.toString(), value);
+ handler.write(region, Operation.UPDATE, compositeKey, value);
verify(statement).setObject(1, compositeKeyFieldValueOne);
verify(statement).setObject(2, compositeKeyFieldValueTwo);
@@ -436,14 +440,16 @@ public class SqlHandlerTest {
when(statement.executeUpdate()).thenReturn(1);
Object destroyKeyFieldValueOne = "fieldValueOne";
Object destroyKeyFieldValueTwo = "fieldValueTwo";
- JSONObject destroyKey = new JSONObject();
- destroyKey.put("fieldOne", destroyKeyFieldValueOne);
- destroyKey.put("fieldTwo", destroyKeyFieldValueTwo);
+ PdxInstance destroyKey = mock(PdxInstance.class);
+ when(destroyKey.isDeserializable()).thenReturn(false);
+ when(destroyKey.getFieldNames()).thenReturn(Arrays.asList("fieldOne", "fieldTwo"));
+ when(destroyKey.getField("fieldOne")).thenReturn(destroyKeyFieldValueOne);
+ when(destroyKey.getField("fieldTwo")).thenReturn(destroyKeyFieldValueTwo);
when(tableMetaDataView.getKeyColumnNames()).thenReturn(Arrays.asList("fieldOne", "fieldTwo"));
when(regionMapping.getColumnNameForField("fieldOne", tableMetaDataView)).thenReturn("fieldOne");
when(regionMapping.getColumnNameForField("fieldTwo", tableMetaDataView)).thenReturn("fieldTwo");
- handler.write(region, Operation.DESTROY, destroyKey.toString(), value);
+ handler.write(region, Operation.DESTROY, destroyKey, value);
verify(statement).setObject(1, destroyKeyFieldValueOne);
verify(statement).setObject(2, destroyKeyFieldValueTwo);
@@ -583,15 +589,17 @@ public class SqlHandlerTest {
public void returnsCorrectColumnForGetGivenCompositeKey() throws Exception {
Object compositeKeyFieldValueOne = "fieldValueOne";
Object compositeKeyFieldValueTwo = "fieldValueTwo";
- JSONObject compositeKey = new JSONObject();
- compositeKey.put("fieldOne", compositeKeyFieldValueOne);
- compositeKey.put("fieldTwo", compositeKeyFieldValueTwo);
+ PdxInstance compositeKey = mock(PdxInstance.class);
+ when(compositeKey.isDeserializable()).thenReturn(false);
+ when(compositeKey.getFieldNames()).thenReturn(Arrays.asList("fieldOne", "fieldTwo"));
+ when(compositeKey.getField("fieldOne")).thenReturn(compositeKeyFieldValueOne);
+ when(compositeKey.getField("fieldTwo")).thenReturn(compositeKeyFieldValueTwo);
when(tableMetaDataView.getKeyColumnNames()).thenReturn(Arrays.asList("fieldOne", "fieldTwo"));
when(regionMapping.getColumnNameForField("fieldOne", tableMetaDataView)).thenReturn("fieldOne");
when(regionMapping.getColumnNameForField("fieldTwo", tableMetaDataView)).thenReturn("fieldTwo");
EntryColumnData entryColumnData =
- handler.getEntryColumnData(tableMetaDataView, regionMapping, compositeKey.toString(), value,
+ handler.getEntryColumnData(tableMetaDataView, regionMapping, compositeKey, value,
Operation.GET);
assertThat(entryColumnData.getEntryKeyColumnData()).isNotNull();
@@ -605,17 +613,17 @@ public class SqlHandlerTest {
@Test
public void getEntryColumnDataGivenWrongNumberOfCompositeKeyFieldsFails() throws Exception {
- Object compositeKeyFieldValueOne = "fieldValueOne";
- JSONObject compositeKey = new JSONObject();
- compositeKey.put("fieldOne", compositeKeyFieldValueOne);
+ PdxInstance compositeKey = mock(PdxInstance.class);
+ when(compositeKey.isDeserializable()).thenReturn(false);
+ when(compositeKey.getFieldNames()).thenReturn(Arrays.asList("fieldOne"));
when(tableMetaDataView.getKeyColumnNames()).thenReturn(Arrays.asList("fieldOne", "fieldTwo"));
when(regionMapping.getColumnNameForField("fieldOne", tableMetaDataView)).thenReturn("fieldOne");
when(regionMapping.getColumnNameForField("fieldTwo", tableMetaDataView)).thenReturn("fieldTwo");
thrown.expect(JdbcConnectorException.class);
thrown.expectMessage(
- "The key \"" + compositeKey.toString() + "\" should have 2 fields but has 1 fields.");
+ "The key \"" + compositeKey + "\" should have 2 fields but has 1 fields.");
- handler.getEntryColumnData(tableMetaDataView, regionMapping, compositeKey.toString(), value,
+ handler.getEntryColumnData(tableMetaDataView, regionMapping, compositeKey, value,
Operation.GET);
}
@@ -623,17 +631,19 @@ public class SqlHandlerTest {
public void getEntryColumnDataGivenWrongFieldNameInCompositeKeyFails() throws Exception {
Object compositeKeyFieldValueOne = "fieldValueOne";
Object compositeKeyFieldValueTwo = "fieldValueTwo";
- JSONObject compositeKey = new JSONObject();
- compositeKey.put("fieldOne", compositeKeyFieldValueOne);
- compositeKey.put("fieldTwoWrong", compositeKeyFieldValueTwo);
+ PdxInstance compositeKey = mock(PdxInstance.class);
+ when(compositeKey.isDeserializable()).thenReturn(false);
+ when(compositeKey.getFieldNames()).thenReturn(Arrays.asList("fieldOne", "fieldTwoWrong"));
+ when(compositeKey.getField("fieldOne")).thenReturn(compositeKeyFieldValueOne);
+ when(compositeKey.getField("fieldTwoWrong")).thenReturn(compositeKeyFieldValueTwo);
when(tableMetaDataView.getKeyColumnNames()).thenReturn(Arrays.asList("fieldOne", "fieldTwo"));
when(regionMapping.getColumnNameForField("fieldOne", tableMetaDataView)).thenReturn("fieldOne");
when(regionMapping.getColumnNameForField("fieldTwo", tableMetaDataView)).thenReturn("fieldTwo");
thrown.expect(JdbcConnectorException.class);
- thrown.expectMessage("The key \"" + compositeKey.toString()
+ thrown.expectMessage("The key \"" + compositeKey
+ "\" has the field \"fieldTwoWrong\" which does not match any of the key columns: [fieldOne, fieldTwo]");
- handler.getEntryColumnData(tableMetaDataView, regionMapping, compositeKey.toString(), value,
+ handler.getEntryColumnData(tableMetaDataView, regionMapping, compositeKey, value,
Operation.GET);
}
@@ -678,9 +688,11 @@ public class SqlHandlerTest {
private void testGetEntryColumnDataForCreateOrUpdateWithCompositeKey(Operation operation) {
Object compositeKeyFieldValueOne = "fieldValueOne";
Object compositeKeyFieldValueTwo = "fieldValueTwo";
- JSONObject compositeKey = new JSONObject();
- compositeKey.put("fieldOne", compositeKeyFieldValueOne);
- compositeKey.put("fieldTwo", compositeKeyFieldValueTwo);
+ PdxInstance compositeKey = mock(PdxInstance.class);
+ when(compositeKey.isDeserializable()).thenReturn(false);
+ when(compositeKey.getFieldNames()).thenReturn(Arrays.asList("fieldOne", "fieldTwo"));
+ when(compositeKey.getField("fieldOne")).thenReturn(compositeKeyFieldValueOne);
+ when(compositeKey.getField("fieldTwo")).thenReturn(compositeKeyFieldValueTwo);
when(tableMetaDataView.getKeyColumnNames()).thenReturn(Arrays.asList("fieldOne", "fieldTwo"));
when(regionMapping.getColumnNameForField("fieldOne", tableMetaDataView)).thenReturn("fieldOne");
when(regionMapping.getColumnNameForField("fieldTwo", tableMetaDataView)).thenReturn("fieldTwo");
@@ -689,7 +701,7 @@ public class SqlHandlerTest {
when(value.getFieldNames()).thenReturn(Arrays.asList("fieldOne", "fieldTwo", nonKeyColumn));
EntryColumnData entryColumnData =
- handler.getEntryColumnData(tableMetaDataView, regionMapping, compositeKey.toString(), value,
+ handler.getEntryColumnData(tableMetaDataView, regionMapping, compositeKey, value,
operation);
assertThat(entryColumnData.getEntryKeyColumnData()).isNotNull();
@@ -707,15 +719,17 @@ public class SqlHandlerTest {
public void returnsCorrectColumnForDestroyWithCompositeKey() throws Exception {
Object compositeKeyFieldValueOne = "fieldValueOne";
Object compositeKeyFieldValueTwo = "fieldValueTwo";
- JSONObject compositeKey = new JSONObject();
- compositeKey.put("fieldOne", compositeKeyFieldValueOne);
- compositeKey.put("fieldTwo", compositeKeyFieldValueTwo);
+ PdxInstance compositeKey = mock(PdxInstance.class);
+ when(compositeKey.isDeserializable()).thenReturn(false);
+ when(compositeKey.getFieldNames()).thenReturn(Arrays.asList("fieldOne", "fieldTwo"));
+ when(compositeKey.getField("fieldOne")).thenReturn(compositeKeyFieldValueOne);
+ when(compositeKey.getField("fieldTwo")).thenReturn(compositeKeyFieldValueTwo);
when(tableMetaDataView.getKeyColumnNames()).thenReturn(Arrays.asList("fieldOne", "fieldTwo"));
when(regionMapping.getColumnNameForField("fieldOne", tableMetaDataView)).thenReturn("fieldOne");
when(regionMapping.getColumnNameForField("fieldTwo", tableMetaDataView)).thenReturn("fieldTwo");
EntryColumnData entryColumnData =
- handler.getEntryColumnData(tableMetaDataView, regionMapping, compositeKey.toString(), value,
+ handler.getEntryColumnData(tableMetaDataView, regionMapping, compositeKey, value,
Operation.DESTROY);
assertThat(entryColumnData.getEntryKeyColumnData()).isNotNull();
@@ -744,26 +758,29 @@ public class SqlHandlerTest {
}
@Test
- public void getEntryColumnDataWhenMultipleIdColumnsGivenNonStringFails() throws Exception {
+ public void getEntryColumnDataWhenMultipleIdColumnsGivenNonPdxInstanceFails() throws Exception {
when(tableMetaDataView.getKeyColumnNames()).thenReturn(Arrays.asList("fieldOne", "fieldTwo"));
Object nonCompositeKey = Integer.valueOf(123);
thrown.expect(JdbcConnectorException.class);
thrown.expectMessage(
- "The key \"123\" of class \"java.lang.Integer\" must be a java.lang.String because multiple columns are configured as ids.");
+ "The key \"123\" of class \"java.lang.Integer\" must be a PdxInstance because multiple columns are configured as ids.");
handler.getEntryColumnData(tableMetaDataView, regionMapping, nonCompositeKey, value,
Operation.DESTROY);
}
@Test
- public void getEntryColumnDataWhenMultipleIdColumnsGivenNonJsonStringFails() throws Exception {
+ public void getEntryColumnDataWhenMultipleIdColumnsGivenDeserializablePdxInstanceFails()
+ throws Exception {
when(tableMetaDataView.getKeyColumnNames()).thenReturn(Arrays.asList("fieldOne", "fieldTwo"));
- String nonJsonKey = "myKey";
+ PdxInstance nonCompositeKey = mock(PdxInstance.class);
+ when(nonCompositeKey.isDeserializable()).thenReturn(true);
thrown.expect(JdbcConnectorException.class);
thrown.expectMessage(
- "The key \"myKey\" must be a valid JSON string because multiple columns are configured as ids. Details: Value myKey of type java.lang.String cannot be converted to JSONObject");
+ "The key \"" + nonCompositeKey
+ + "\" must be a PdxInstance created with PdxInstanceFactory.neverDeserialize");
- handler.getEntryColumnData(tableMetaDataView, regionMapping, nonJsonKey, value,
+ handler.getEntryColumnData(tableMetaDataView, regionMapping, nonCompositeKey, value,
Operation.DESTROY);
}
diff --git a/geode-core/src/integrationTest/java/org/apache/geode/pdx/PdxInstanceFactoryJUnitTest.java b/geode-core/src/integrationTest/java/org/apache/geode/pdx/PdxInstanceFactoryJUnitTest.java
index acc7d00..3138854 100644
--- a/geode-core/src/integrationTest/java/org/apache/geode/pdx/PdxInstanceFactoryJUnitTest.java
+++ b/geode-core/src/integrationTest/java/org/apache/geode/pdx/PdxInstanceFactoryJUnitTest.java
@@ -15,6 +15,8 @@
package org.apache.geode.pdx;
import static org.apache.geode.distributed.ConfigurationProperties.MCAST_PORT;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
@@ -1231,4 +1233,126 @@ public class PdxInstanceFactoryJUnitTest {
return DataSerializer.readObject(new DataInputStream(new ByteArrayInputStream(bytes)));
}
+ @Test
+ public void undeserializablePdxInstanceGetObjectReturnsThePdxInstance() {
+ PdxInstanceFactory factory =
+ cache.createPdxInstanceFactory("myPdxInstanceType").neverDeserialize();
+ PdxInstance instance = factory.create();
+
+ Object object = instance.getObject();
+
+ assertThat(object).isSameAs(instance);
+ }
+
+ @Test
+ public void normalPdxInstanceGetObjectThrowsClassNotFoundGivenABadClass() {
+ PdxInstanceFactory factory = cache.createPdxInstanceFactory("badClass");
+ PdxInstance instance = factory.create();
+
+ assertThatThrownBy(() -> instance.getObject()).isInstanceOf(PdxSerializationException.class)
+ .hasCauseInstanceOf(ClassNotFoundException.class);
+ }
+
+ @Test
+ public void undeserializablePdxInstanceAddedToRegionWithPdxReadSerializedFalseReturnsEqualPdxInstanceWhenRegionGet() {
+ // make sure the cache has pdx-read-serialized set to false
+ this.cache.close();
+ this.cache = (GemFireCacheImpl) new CacheFactory().set(MCAST_PORT, "0")
+ .setPdxReadSerialized(false).create();
+ PdxInstanceFactory factory =
+ cache.createPdxInstanceFactory("myPdxInstanceType").neverDeserialize();
+ factory.writeString("fieldOne", "valueOne");
+ PdxInstance instance = factory.create();
+ Region region = cache.createRegionFactory(RegionShortcut.PARTITION).create("myRegion");
+ region.put("key", instance);
+
+ Object getValue = region.get("key");
+
+ assertThat(getValue).isEqualTo(instance);
+ }
+
+ @Test
+ public void undeserializablePdxInstanceCanBeUsedAsRegionKey() {
+ // make sure the cache has pdx-read-serialized set to false
+ this.cache.close();
+ this.cache = (GemFireCacheImpl) new CacheFactory().set(MCAST_PORT, "0")
+ .setPdxReadSerialized(false).create();
+ PdxInstanceFactory factory =
+ cache.createPdxInstanceFactory("myPdxInstanceType").neverDeserialize();
+ factory.writeString("fieldOne", "valueOne");
+ PdxInstance putKey = factory.create();
+ Region region = cache.createRegionFactory(RegionShortcut.PARTITION).create("myRegion");
+ region.put(putKey, "value");
+ factory = cache.createPdxInstanceFactory("myPdxInstanceType").neverDeserialize();
+ factory.writeString("fieldOne", "valueOne");
+ PdxInstance getKey = factory.create();
+
+ Object getValue = region.get(getKey);
+
+ assertThat(getValue).isEqualTo("value");
+ }
+
+ @Test
+ public void undeserializablePdxInstanceWithDifferentTypeNameAreNotEqual() {
+ PdxInstanceFactory factory =
+ cache.createPdxInstanceFactory("myPdxInstanceType").neverDeserialize();
+ factory.writeString("fieldOne", "valueOne");
+ PdxInstance instance = factory.create();
+ factory = cache.createPdxInstanceFactory("myPdxInstanceType2").neverDeserialize();
+ factory.writeString("fieldOne", "valueOne");
+ PdxInstance instance2 = factory.create();
+
+ assertThat(instance).isNotEqualTo(instance2);
+ }
+
+ @Test
+ public void undeserializablePdxInstanceWithMultipleEqualFieldsInDifferentOrderAreEqual() {
+ PdxInstanceFactory factory =
+ cache.createPdxInstanceFactory("myPdxInstanceType").neverDeserialize();
+ factory.writeString("fieldOne", "valueOne");
+ factory.writeString("fieldTwo", "valueTwo");
+ PdxInstance instance = factory.create();
+ factory = cache.createPdxInstanceFactory("myPdxInstanceType").neverDeserialize();
+ factory.writeString("fieldTwo", "valueTwo");
+ factory.writeString("fieldOne", "valueOne");
+ PdxInstance instance2 = factory.create();
+
+ assertThat(instance).isEqualTo(instance2);
+ }
+
+ @Test
+ public void normalPdxInstanceAddedToRegionWithPdxReadSerializedFalseAndABadClassThrowsClassNotFoundWhenRegionGet() {
+ // make sure the cache has pdx-read-serialized set to false
+ this.cache.close();
+ this.cache = (GemFireCacheImpl) new CacheFactory().set(MCAST_PORT, "0")
+ .setPdxReadSerialized(false).create();
+ PdxInstanceFactory factory = cache.createPdxInstanceFactory("badClass");
+ PdxInstance instance = factory.create();
+ Region region = cache.createRegionFactory(RegionShortcut.PARTITION).create("myRegion");
+ region.put("key", instance);
+
+ assertThatThrownBy(() -> region.get("key")).isInstanceOf(PdxSerializationException.class)
+ .hasCauseInstanceOf(ClassNotFoundException.class);
+ }
+
+ @Test
+ public void undeserializablePdxInstanceReturnsFalseFromIsDeserializable() {
+ PdxInstanceFactory factory =
+ cache.createPdxInstanceFactory("myPdxInstanceType").neverDeserialize();
+ factory.writeString("fieldOne", "valueOne");
+ factory.writeString("fieldTwo", "valueTwo");
+ PdxInstance instance = factory.create();
+
+ assertThat(instance.isDeserializable()).isFalse();
+ }
+
+ @Test
+ public void normalPdxInstanceReturnsTrueFromIsDeserializable() {
+ PdxInstanceFactory factory = cache.createPdxInstanceFactory("className");
+ factory.writeString("fieldOne", "valueOne");
+ factory.writeString("fieldTwo", "valueTwo");
+ PdxInstance instance = factory.create();
+
+ assertThat(instance.isDeserializable()).isTrue();
+ }
}
diff --git a/geode-core/src/main/java/org/apache/geode/cache/query/internal/AttributeDescriptor.java b/geode-core/src/main/java/org/apache/geode/cache/query/internal/AttributeDescriptor.java
index 1b5ae6c..274468a 100644
--- a/geode-core/src/main/java/org/apache/geode/cache/query/internal/AttributeDescriptor.java
+++ b/geode-core/src/main/java/org/apache/geode/cache/query/internal/AttributeDescriptor.java
@@ -34,10 +34,10 @@ import org.apache.geode.cache.query.QueryInvocationTargetException;
import org.apache.geode.cache.query.QueryService;
import org.apache.geode.internal.cache.Token;
import org.apache.geode.pdx.JSONFormatter;
-import org.apache.geode.pdx.PdxInstance;
import org.apache.geode.pdx.PdxSerializationException;
-import org.apache.geode.pdx.internal.FieldNotFoundInPdxVersion;
-import org.apache.geode.pdx.internal.PdxInstanceImpl;
+import org.apache.geode.pdx.internal.InternalPdxInstance;
+import org.apache.geode.pdx.internal.PdxType;
+import org.apache.geode.pdx.internal.TypeRegistry;
/**
* Utility for managing an attribute
@@ -49,12 +49,15 @@ import org.apache.geode.pdx.internal.PdxInstanceImpl;
public class AttributeDescriptor {
private final String _name;
private final MethodInvocationAuthorizer _methodInvocationAuthorizer;
+ private final TypeRegistry _pdxRegistry;
/** cache for remembering the correct Member for a class and attribute */
private static final ConcurrentMap<List, Member> _localCache = new ConcurrentHashMap();
- public AttributeDescriptor(MethodInvocationAuthorizer methodInvocationAuthorizer, String name) {
+ public AttributeDescriptor(TypeRegistry pdxRegistry,
+ MethodInvocationAuthorizer methodInvocationAuthorizer, String name) {
+ _pdxRegistry = pdxRegistry;
_methodInvocationAuthorizer = methodInvocationAuthorizer;
_name = name;
}
@@ -75,8 +78,8 @@ public class AttributeDescriptor {
if (target == null || target == QueryService.UNDEFINED) {
return QueryService.UNDEFINED;
}
- if (target instanceof PdxInstance) {
- return readPdx((PdxInstance) target);
+ if (target instanceof InternalPdxInstance) {
+ return readPdx((InternalPdxInstance) target);
}
// for non pdx objects
return readReflection(target);
@@ -202,70 +205,58 @@ public class AttributeDescriptor {
*
* @return the value of the field from PdxInstance
*/
- private Object readPdx(PdxInstance target)
+ private Object readPdx(InternalPdxInstance pdxInstance)
throws NameNotFoundException, QueryInvocationTargetException {
- if (target instanceof PdxInstanceImpl) {
- PdxInstanceImpl pdxInstance = (PdxInstanceImpl) target;
- // if the field is present in the pdxinstance
- if (pdxInstance.hasField(_name)) {
- // return PdxString if field is a String otherwise invoke readField
- return pdxInstance.getRawField(_name);
- } else {
- // field not found in the pdx instance, look for the field in any of the
- // PdxTypes (versions of the pdxinstance) in the type registry
- String className = pdxInstance.getClassName();
-
- // don't look further for field or method or reflect on GemFire JSON data
- if (className.equals(JSONFormatter.JSON_CLASSNAME)) {
- return QueryService.UNDEFINED;
- }
+ // if the field is present in the pdxinstance
+ if (pdxInstance.hasField(_name)) {
+ // return PdxString if field is a String otherwise invoke readField
+ return pdxInstance.getRawField(_name);
+ } else {
+ // field not found in the pdx instance, look for the field in any of the
+ // PdxTypes (versions of the pdxinstance) in the type registry
+ String className = pdxInstance.getClassName();
+ // don't look further for field or method or reflect on GemFire JSON data
+ if (className.equals(JSONFormatter.JSON_CLASSNAME)) {
+ return QueryService.UNDEFINED;
+ }
- // check if the field was not found previously
- if (!isFieldAlreadySearchedAndNotFound(className, _name)) {
- try {
- return pdxInstance.getDefaultValueIfFieldExistsInAnyPdxVersions(_name, className);
- } catch (FieldNotFoundInPdxVersion e1) {
- // remember the field that is not present in any version to avoid
- // trips to the registry next time
- updateClassToFieldsMap(className, _name);
- }
+ // check if the field was not found previously
+ if (!isFieldAlreadySearchedAndNotFound(className, _name)) {
+ PdxType pdxType = _pdxRegistry.getPdxTypeForField(_name, className);
+ if (pdxType == null) {
+ // remember the field that is not present in any version to avoid
+ // trips to the registry next time
+ updateClassToFieldsMap(className, _name);
+ } else {
+ return pdxType.getPdxField(_name).getFieldType().getDefaultValue();
}
- // if the field is not present in any of the versions try to
- // invoke implicit method call
- if (!this.isMethodAlreadySearchedAndNotFound(className, _name)) {
- try {
- return readFieldFromDeserializedObject(pdxInstance, target);
- } catch (NameNotFoundException ex) {
- updateClassToMethodsMap(pdxInstance.getClassName(), _name);
- throw ex;
- }
- } else
- return QueryService.UNDEFINED;
- }
- } else {
- // target could be another implementation of PdxInstance like
- // PdxInstanceEnum, in this case getRawField and getCachedOjects methods are
- // not available
- if (((PdxInstance) target).hasField(_name)) {
- return ((PdxInstance) target).getField(_name);
}
- throw new NameNotFoundException(
- String.format("Field ' %s ' in class ' %s ' is not accessible to the query processor",
- new Object[] {_name, target.getClass().getName()}));
+ // if the field is not present in any of the versions try to
+ // invoke implicit method call
+ if (!this.isMethodAlreadySearchedAndNotFound(className, _name)) {
+ try {
+ return readFieldFromDeserializedObject(pdxInstance);
+ } catch (NameNotFoundException ex) {
+ updateClassToMethodsMap(pdxInstance.getClassName(), _name);
+ throw ex;
+ }
+ } else
+ return QueryService.UNDEFINED;
}
}
- private Object readFieldFromDeserializedObject(PdxInstanceImpl pdxInstance, Object target)
+ private Object readFieldFromDeserializedObject(InternalPdxInstance pdxInstance)
throws NameNotFoundException, QueryInvocationTargetException {
+ Object obj = null;
try {
- Object obj = pdxInstance.getCachedObject();
- return readReflection(obj);
+ obj = pdxInstance.getCachedObject();
} catch (PdxSerializationException e) {
throw new NameNotFoundException( // the domain object is not available
- String.format("Field ' %s ' in class ' %s ' is not accessible to the query processor",
- new Object[] {_name, target.getClass().getName()}));
+ String.format("Field '%s' is not accessible to the query processor because: %s",
+ new Object[] {_name, e.getMessage()}));
}
+ return readReflection(obj);
}
private void updateClassToFieldsMap(String className, String field) {
diff --git a/geode-core/src/main/java/org/apache/geode/cache/query/internal/CompiledOperation.java b/geode-core/src/main/java/org/apache/geode/cache/query/internal/CompiledOperation.java
index 3ce809f..fd34189 100644
--- a/geode-core/src/main/java/org/apache/geode/cache/query/internal/CompiledOperation.java
+++ b/geode-core/src/main/java/org/apache/geode/cache/query/internal/CompiledOperation.java
@@ -35,7 +35,7 @@ import org.apache.geode.internal.InternalDataSerializer;
import org.apache.geode.internal.cache.PartitionedRegion;
import org.apache.geode.pdx.PdxInstance;
import org.apache.geode.pdx.PdxSerializationException;
-import org.apache.geode.pdx.internal.PdxInstanceImpl;
+import org.apache.geode.pdx.internal.InternalPdxInstance;
import org.apache.geode.pdx.internal.PdxString;
/**
@@ -282,13 +282,9 @@ public class CompiledOperation extends AbstractCompiledValue {
// cache
CompiledOperation.cache.putIfAbsent(key, methodDispatch);
}
- if (receiver instanceof PdxInstance) {
+ if (receiver instanceof InternalPdxInstance) {
try {
- if (receiver instanceof PdxInstanceImpl) {
- receiver = ((PdxInstanceImpl) receiver).getCachedObject();
- } else {
- receiver = ((PdxInstance) receiver).getObject();
- }
+ receiver = ((InternalPdxInstance) receiver).getCachedObject();
} catch (PdxSerializationException ex) {
throw new QueryInvocationTargetException(ex);
}
diff --git a/geode-core/src/main/java/org/apache/geode/cache/query/internal/PathUtils.java b/geode-core/src/main/java/org/apache/geode/cache/query/internal/PathUtils.java
index 62236f4..0fb3fed 100644
--- a/geode-core/src/main/java/org/apache/geode/cache/query/internal/PathUtils.java
+++ b/geode-core/src/main/java/org/apache/geode/cache/query/internal/PathUtils.java
@@ -72,7 +72,7 @@ public class PathUtils {
}
}
try {
- return new AttributeDescriptor(
+ return new AttributeDescriptor(context.getCache().getPdxRegistry(),
context.getCache().getQueryService().getMethodInvocationAuthorizer(), attribute)
.read(target);
} catch (NameNotFoundException nfe) {
@@ -112,7 +112,7 @@ public class PathUtils {
for (int i = 1; i < types.length; i++) {
ObjectType currentType = types[i - 1];
- Member member = new AttributeDescriptor(
+ Member member = new AttributeDescriptor(context.getCache().getPdxRegistry(),
context.getCache().getQueryService().getMethodInvocationAuthorizer(), pathArray[i - 1])
.getReadMember(currentType.resolveClass());
@@ -170,7 +170,7 @@ public class PathUtils {
stepStr = stepStr.substring(0, stepStr.length() - 2);
member = clazz.getMethod(stepStr, (Class[]) null);
} else {
- member = new AttributeDescriptor(
+ member = new AttributeDescriptor(context.getCache().getPdxRegistry(),
context.getCache().getQueryService().getMethodInvocationAuthorizer(), stepStr)
.getReadMember(clazz);
}
diff --git a/geode-core/src/main/java/org/apache/geode/cache/query/internal/RuntimeIterator.java b/geode-core/src/main/java/org/apache/geode/cache/query/internal/RuntimeIterator.java
index 73480f8..e395999 100644
--- a/geode-core/src/main/java/org/apache/geode/cache/query/internal/RuntimeIterator.java
+++ b/geode-core/src/main/java/org/apache/geode/cache/query/internal/RuntimeIterator.java
@@ -198,7 +198,7 @@ public class RuntimeIterator extends AbstractCompiledValue {
// if there are zero arguments and it's an attribute, then defer to
// AttributeDescriptor
// to see if there's a match
- return new AttributeDescriptor(
+ return new AttributeDescriptor(context.getCache().getPdxRegistry(),
context.getCache().getQueryService().getMethodInvocationAuthorizer(), name)
.validateReadType(clazz);
}
diff --git a/geode-core/src/main/java/org/apache/geode/pdx/PdxInstance.java b/geode-core/src/main/java/org/apache/geode/pdx/PdxInstance.java
index f381ba2..abd4942 100644
--- a/geode-core/src/main/java/org/apache/geode/pdx/PdxInstance.java
+++ b/geode-core/src/main/java/org/apache/geode/pdx/PdxInstance.java
@@ -64,8 +64,9 @@ public interface PdxInstance extends java.io.Serializable {
/**
* Deserializes and returns the domain object that this instance represents.
+ * If this instance is one that never deserializes then getObject returns "this".
*
- * @return the deserialized domain object.
+ * @return the deserialized domain object or "this" if this instance never deserializes
* @throws PdxSerializationException if the instance could not be deserialized
*/
Object getObject();
@@ -211,4 +212,18 @@ public interface PdxInstance extends java.io.Serializable {
*/
WritablePdxInstance createWriter();
+
+ /**
+ * Returns false if this instance will never be deserialized to a domain class.
+ * Instances that never deserialize can be created using
+ * {@link PdxInstanceFactory#neverDeserialize}.
+ *
+ * @return false if this instance will never be deserialized to a domain class.
+ *
+ * @since Geode 1.9
+ */
+ default boolean isDeserializable() {
+ return false;
+ }
+
}
diff --git a/geode-core/src/main/java/org/apache/geode/pdx/PdxInstanceFactory.java b/geode-core/src/main/java/org/apache/geode/pdx/PdxInstanceFactory.java
index 5a17968..b902bbf 100644
--- a/geode-core/src/main/java/org/apache/geode/pdx/PdxInstanceFactory.java
+++ b/geode-core/src/main/java/org/apache/geode/pdx/PdxInstanceFactory.java
@@ -552,4 +552,13 @@ public interface PdxInstanceFactory {
* @throws PdxFieldDoesNotExistException if the named field has not already been written.
*/
PdxInstanceFactory markIdentityField(String fieldName);
+
+ /**
+ * The instance created by this factory will be one that is never
+ * deserialized to a domain class. It will always be a PdxInstance.
+ *
+ * @return this PdxInstanceFactory
+ * @since Geode 1.9
+ */
+ PdxInstanceFactory neverDeserialize();
}
diff --git a/geode-core/src/main/java/org/apache/geode/pdx/internal/EnumInfo.java b/geode-core/src/main/java/org/apache/geode/pdx/internal/EnumInfo.java
index 998d65b..cd15db1 100644
--- a/geode-core/src/main/java/org/apache/geode/pdx/internal/EnumInfo.java
+++ b/geode-core/src/main/java/org/apache/geode/pdx/internal/EnumInfo.java
@@ -27,7 +27,6 @@ import org.apache.geode.DataSerializer;
import org.apache.geode.internal.DataSerializableFixedID;
import org.apache.geode.internal.HeapDataOutputStream;
import org.apache.geode.internal.InternalDataSerializer;
-import org.apache.geode.internal.Sendable;
import org.apache.geode.internal.Version;
import org.apache.geode.pdx.PdxInstance;
import org.apache.geode.pdx.PdxSerializationException;
@@ -194,7 +193,7 @@ public class EnumInfo implements DataSerializableFixedID {
}
public static class PdxInstanceEnumInfo
- implements PdxInstance, Sendable, ConvertableToBytes, ComparableEnum {
+ implements InternalPdxInstance, ComparableEnum {
private static final long serialVersionUID = 7907582104525106416L;
private final int enumId;
private final EnumInfo ei;
diff --git a/geode-core/src/main/java/org/apache/geode/pdx/internal/FieldNotFoundInPdxVersion.java b/geode-core/src/main/java/org/apache/geode/pdx/internal/FieldNotFoundInPdxVersion.java
deleted file mode 100644
index 8b32295..0000000
--- a/geode-core/src/main/java/org/apache/geode/pdx/internal/FieldNotFoundInPdxVersion.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
- * agreements. See the NOTICE file distributed with this work for additional information regarding
- * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License. You may obtain a
- * copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the License
- * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
- * or implied. See the License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.geode.pdx.internal;
-
-public class FieldNotFoundInPdxVersion extends Exception {
-
- private static final long serialVersionUID = 1292033563588485577L;
-
- public FieldNotFoundInPdxVersion(String message) {
- super(message);
- }
-
-}
diff --git a/geode-core/src/main/java/org/apache/geode/pdx/internal/InternalPdxInstance.java b/geode-core/src/main/java/org/apache/geode/pdx/internal/InternalPdxInstance.java
index f2d895b..8c88dea 100644
--- a/geode-core/src/main/java/org/apache/geode/pdx/internal/InternalPdxInstance.java
+++ b/geode-core/src/main/java/org/apache/geode/pdx/internal/InternalPdxInstance.java
@@ -14,7 +14,32 @@
*/
package org.apache.geode.pdx.internal;
+import org.apache.geode.internal.Sendable;
import org.apache.geode.pdx.PdxInstance;
+import org.apache.geode.pdx.PdxSerializationException;
-public interface InternalPdxInstance extends PdxInstance, ConvertableToBytes {
+public interface InternalPdxInstance extends PdxInstance, ConvertableToBytes, Sendable {
+ /**
+ * The same as calling getObject() but may also cache the result and future calls
+ * of this method will return the cached object instead of recomputing it.
+ * Implementors that do not want to support a cache can just use the default implementation
+ * which simply calls getObject().
+ *
+ * @throws PdxSerializationException if the instance could not be deserialized
+ */
+ default Object getCachedObject() {
+ return getObject();
+ }
+
+ /**
+ * This same as calling getField(fieldName) except that some implementations may support
+ * returning a PdxString instead of String.
+ * Implementors that do not support PdxString can use use the default implementation
+ * which simply calls getField(fieldName).
+ *
+ * @throws PdxSerializationException if the field could not be deserialized
+ */
+ default Object getRawField(String fieldName) {
+ return getField(fieldName);
+ }
}
diff --git a/geode-core/src/main/java/org/apache/geode/pdx/internal/PdxInstanceEnum.java b/geode-core/src/main/java/org/apache/geode/pdx/internal/PdxInstanceEnum.java
index b57f112..8d68b9b 100644
--- a/geode-core/src/main/java/org/apache/geode/pdx/internal/PdxInstanceEnum.java
+++ b/geode-core/src/main/java/org/apache/geode/pdx/internal/PdxInstanceEnum.java
@@ -24,9 +24,7 @@ import org.apache.geode.DataSerializer;
import org.apache.geode.internal.DSCODE;
import org.apache.geode.internal.HeapDataOutputStream;
import org.apache.geode.internal.InternalDataSerializer;
-import org.apache.geode.internal.Sendable;
import org.apache.geode.internal.Version;
-import org.apache.geode.pdx.PdxInstance;
import org.apache.geode.pdx.PdxSerializationException;
import org.apache.geode.pdx.WritablePdxInstance;
@@ -35,7 +33,7 @@ import org.apache.geode.pdx.WritablePdxInstance;
*
* @since GemFire 6.6.2
*/
-public class PdxInstanceEnum implements PdxInstance, Sendable, ConvertableToBytes, ComparableEnum {
+public class PdxInstanceEnum implements InternalPdxInstance, ComparableEnum {
private static final long serialVersionUID = -7417287878052772302L;
private final String className;
private final String enumName;
diff --git a/geode-core/src/main/java/org/apache/geode/pdx/internal/PdxInstanceFactoryImpl.java b/geode-core/src/main/java/org/apache/geode/pdx/internal/PdxInstanceFactoryImpl.java
index a353761..8ccc330 100644
--- a/geode-core/src/main/java/org/apache/geode/pdx/internal/PdxInstanceFactoryImpl.java
+++ b/geode-core/src/main/java/org/apache/geode/pdx/internal/PdxInstanceFactoryImpl.java
@@ -33,12 +33,13 @@ import org.apache.geode.pdx.PdxInstanceFactory;
public class PdxInstanceFactoryImpl implements PdxInstanceFactory {
private final PdxWriterImpl writer;
+ private final PdxType pdxType;
private boolean created = false;
private PdxInstanceFactoryImpl(String name, boolean expectDomainClass, TypeRegistry pdxRegistry) {
PdxOutputStream pdxOutputStream = new PdxOutputStream();
- PdxType pdxType = new PdxType(name, expectDomainClass);
+ this.pdxType = new PdxType(name, expectDomainClass);
this.writer = new PdxWriterImpl(pdxType, pdxRegistry, pdxOutputStream);
}
@@ -275,4 +276,10 @@ public class PdxInstanceFactoryImpl implements PdxInstanceFactory {
return ei.getPdxInstance(tr.defineEnum(ei));
}
+ @Override
+ public PdxInstanceFactory neverDeserialize() {
+ this.pdxType.setNoDomainClass(true);
+ return this;
+ }
+
}
diff --git a/geode-core/src/main/java/org/apache/geode/pdx/internal/PdxInstanceImpl.java b/geode-core/src/main/java/org/apache/geode/pdx/internal/PdxInstanceImpl.java
index 8557594..dfb9b6b 100644
--- a/geode-core/src/main/java/org/apache/geode/pdx/internal/PdxInstanceImpl.java
+++ b/geode-core/src/main/java/org/apache/geode/pdx/internal/PdxInstanceImpl.java
@@ -35,7 +35,6 @@ import org.apache.geode.distributed.internal.DMStats;
import org.apache.geode.internal.ClassPathLoader;
import org.apache.geode.internal.DSCODE;
import org.apache.geode.internal.InternalDataSerializer;
-import org.apache.geode.internal.Sendable;
import org.apache.geode.internal.cache.GemFireCacheImpl;
import org.apache.geode.internal.cache.InternalCache;
import org.apache.geode.internal.tcp.ByteBufferInputStream;
@@ -55,7 +54,7 @@ import org.apache.geode.pdx.WritablePdxInstance;
* We do not use this normal java io serialization when serializing this class in GemFire because
* Sendable takes precedence over Serializable.
*/
-public class PdxInstanceImpl extends PdxReaderImpl implements InternalPdxInstance, Sendable {
+public class PdxInstanceImpl extends PdxReaderImpl implements InternalPdxInstance {
private static final long serialVersionUID = -1669268527103938431L;
@@ -186,7 +185,7 @@ public class PdxInstanceImpl extends PdxReaderImpl implements InternalPdxInstanc
}
}
- // this is for internal use of the query engine.
+ @Override
public Object getCachedObject() {
Object result = this.cachedObjectForm;
if (result == null) {
@@ -652,20 +651,13 @@ public class PdxInstanceImpl extends PdxReaderImpl implements InternalPdxInstanc
return false;
}
+ @Override
public Object getRawField(String fieldName) {
return getUnmodifiableReader(fieldName).readRawField(fieldName);
}
- public Object getDefaultValueIfFieldExistsInAnyPdxVersions(String fieldName, String className)
- throws FieldNotFoundInPdxVersion {
- PdxType pdxType =
- GemFireCacheImpl.getForPdx("PDX registry is unavailable because the Cache has been closed.")
- .getPdxRegistry().getPdxTypeForField(fieldName, className);
- if (pdxType == null) {
- throw new FieldNotFoundInPdxVersion(
- "PdxType with field " + fieldName + " is not found for class " + className);
- }
- return pdxType.getPdxField(fieldName).getFieldType().getDefaultValue();
+ @Override
+ public boolean isDeserializable() {
+ return !getPdxType().getNoDomainClass();
}
-
}
diff --git a/geode-core/src/main/java/org/apache/geode/pdx/internal/PdxType.java b/geode-core/src/main/java/org/apache/geode/pdx/internal/PdxType.java
index bde8c07..31f3833 100644
--- a/geode-core/src/main/java/org/apache/geode/pdx/internal/PdxType.java
+++ b/geode-core/src/main/java/org/apache/geode/pdx/internal/PdxType.java
@@ -231,6 +231,10 @@ public class PdxType implements DataSerializable {
return this.noDomainClass;
}
+ public void setNoDomainClass(boolean noDomainClass) {
+ this.noDomainClass = noDomainClass;
+ }
+
public int getTypeId() {
return this.typeId;
}
diff --git a/geode-core/src/main/resources/org/apache/geode/internal/sanctioned-geode-core-serializables.txt b/geode-core/src/main/resources/org/apache/geode/internal/sanctioned-geode-core-serializables.txt
index 427736e..0bc47a9 100644
--- a/geode-core/src/main/resources/org/apache/geode/internal/sanctioned-geode-core-serializables.txt
+++ b/geode-core/src/main/resources/org/apache/geode/internal/sanctioned-geode-core-serializables.txt
@@ -640,7 +640,6 @@ org/apache/geode/pdx/PdxInitializationException,true,5098737377658808834
org/apache/geode/pdx/PdxRegistryMismatchException,true,-2329989020829052537
org/apache/geode/pdx/PdxSerializationException,true,-3843814927034345635
org/apache/geode/pdx/internal/EnumInfo$PdxInstanceEnumInfo,true,7907582104525106416,ei:org/apache/geode/pdx/internal/EnumInfo,enumId:int
-org/apache/geode/pdx/internal/FieldNotFoundInPdxVersion,true,1292033563588485577
org/apache/geode/pdx/internal/PdxInputStream,false
org/apache/geode/pdx/internal/PdxReaderImpl,true,-6094553093860427759,blobType:org/apache/geode/pdx/internal/PdxType,dis:org/apache/geode/pdx/internal/PdxInputStream
org/apache/geode/redis/internal/CoderException,true,4707944288714910949