You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@geode.apache.org by ds...@apache.org on 2019/01/25 20:37:34 UTC

[geode] branch feature/GEODE-6291 updated: create mapping will now using existing pdx types when creating the field mappings

This is an automated email from the ASF dual-hosted git repository.

dschneider pushed a commit to branch feature/GEODE-6291
in repository https://gitbox.apache.org/repos/asf/geode.git


The following commit(s) were added to refs/heads/feature/GEODE-6291 by this push:
     new ceb148d  create mapping will now using existing pdx types when creating the field mappings
ceb148d is described below

commit ceb148d252b43e581f4508073be6f5b529a63fcd
Author: Darrel Schneider <ds...@pivotal.io>
AuthorDate: Fri Jan 25 12:36:35 2019 -0800

    create mapping will now using existing pdx types when creating the field mappings
---
 .../jdbc/internal/SqlToPdxInstanceCreator.java     |  70 ++++++++
 .../CreateMappingPreconditionCheckFunction.java    | 196 ++++++---------------
 .../jdbc/internal/configuration/FieldMapping.java  |   4 +
 .../jdbc/internal/SqlToPdxInstanceCreatorTest.java |  91 ++++++++++
 ...CreateMappingPreconditionCheckFunctionTest.java | 188 +++++++++-----------
 5 files changed, 306 insertions(+), 243 deletions(-)

diff --git a/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/SqlToPdxInstanceCreator.java b/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/SqlToPdxInstanceCreator.java
index 335dd85..f8c2056 100644
--- a/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/SqlToPdxInstanceCreator.java
+++ b/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/SqlToPdxInstanceCreator.java
@@ -258,4 +258,74 @@ class SqlToPdxInstanceCreator {
     throw new JdbcConnectorException("Could not find PdxType for field " + fieldName
         + ". Add class " + pdxClassName + " with " + fieldName + " to pdx registry.");
   }
+
+  static FieldType computeFieldType(boolean isNullable, JDBCType jdbcType) {
+    switch (jdbcType) {
+      case BIT: // 1 bit
+        return computeType(isNullable, FieldType.BOOLEAN);
+      case TINYINT: // unsigned 8 bits
+        return computeType(isNullable, FieldType.SHORT);
+      case SMALLINT: // signed 16 bits
+        return computeType(isNullable, FieldType.SHORT);
+      case INTEGER: // signed 32 bits
+        return computeType(isNullable, FieldType.INT);
+      case BIGINT: // signed 64 bits
+        return computeType(isNullable, FieldType.LONG);
+      case FLOAT:
+        return computeType(isNullable, FieldType.DOUBLE);
+      case REAL:
+        return computeType(isNullable, FieldType.FLOAT);
+      case DOUBLE:
+        return computeType(isNullable, FieldType.DOUBLE);
+      case CHAR:
+        return FieldType.STRING;
+      case VARCHAR:
+        return FieldType.STRING;
+      case LONGVARCHAR:
+        return FieldType.STRING;
+      case DATE:
+        return computeDate(isNullable);
+      case TIME:
+        return computeDate(isNullable);
+      case TIMESTAMP:
+        return computeDate(isNullable);
+      case BINARY:
+        return FieldType.BYTE_ARRAY;
+      case VARBINARY:
+        return FieldType.BYTE_ARRAY;
+      case LONGVARBINARY:
+        return FieldType.BYTE_ARRAY;
+      case NULL:
+        throw new IllegalStateException("unexpected NULL jdbc column type");
+      case BLOB:
+        return FieldType.BYTE_ARRAY;
+      case BOOLEAN:
+        return computeType(isNullable, FieldType.BOOLEAN);
+      case NCHAR:
+        return FieldType.STRING;
+      case NVARCHAR:
+        return FieldType.STRING;
+      case LONGNVARCHAR:
+        return FieldType.STRING;
+      case TIME_WITH_TIMEZONE:
+        return computeDate(isNullable);
+      case TIMESTAMP_WITH_TIMEZONE:
+        return computeDate(isNullable);
+      default:
+        return FieldType.OBJECT;
+    }
+  }
+
+  private static FieldType computeType(boolean isNullable, FieldType nonNullType) {
+    if (isNullable) {
+      return FieldType.OBJECT;
+    }
+    return nonNullType;
+
+  }
+
+  private static FieldType computeDate(boolean isNullable) {
+    return computeType(isNullable, FieldType.DATE);
+  }
+
 }
diff --git a/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingPreconditionCheckFunction.java b/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingPreconditionCheckFunction.java
index dfb1992..e2c52e7 100644
--- a/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingPreconditionCheckFunction.java
+++ b/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingPreconditionCheckFunction.java
@@ -19,7 +19,9 @@ import java.sql.Connection;
 import java.sql.JDBCType;
 import java.sql.SQLException;
 import java.util.ArrayList;
+import java.util.HashSet;
 import java.util.List;
+import java.util.Set;
 
 import javax.sql.DataSource;
 
@@ -31,12 +33,13 @@ import org.apache.geode.connectors.jdbc.internal.TableMetaDataManager;
 import org.apache.geode.connectors.jdbc.internal.TableMetaDataView;
 import org.apache.geode.connectors.jdbc.internal.configuration.FieldMapping;
 import org.apache.geode.connectors.jdbc.internal.configuration.RegionMapping;
+import org.apache.geode.internal.cache.InternalCache;
 import org.apache.geode.internal.jndi.JNDIInvoker;
 import org.apache.geode.management.cli.CliFunction;
 import org.apache.geode.management.internal.cli.functions.CliFunctionResult;
-import org.apache.geode.pdx.FieldType;
-import org.apache.geode.pdx.PdxInstance;
-import org.apache.geode.pdx.PdxInstanceFactory;
+import org.apache.geode.pdx.internal.PdxField;
+import org.apache.geode.pdx.internal.PdxType;
+import org.apache.geode.pdx.internal.TypeRegistry;
 
 @Experimental
 public class CreateMappingPreconditionCheckFunction extends CliFunction<RegionMapping> {
@@ -70,34 +73,23 @@ public class CreateMappingPreconditionCheckFunction extends CliFunction<RegionMa
           + "\" not found. Create it with gfsh 'create data-source --pooled --name="
           + dataSourceName + "'.");
     }
+    InternalCache cache = (InternalCache) context.getCache();
+    TypeRegistry typeRegistry = cache.getPdxRegistry();
     try (Connection connection = dataSource.getConnection()) {
       TableMetaDataView tableMetaData =
           tableMetaDataManager.getTableMetaDataView(connection, regionMapping);
-      PdxInstanceFactory pdxInstanceFactory =
-          context.getCache().createPdxInstanceFactory(regionMapping.getPdxName());
       Object[] output = new Object[2];
       ArrayList<FieldMapping> fieldMappings = new ArrayList<>();
       output[1] = fieldMappings;
       for (String jdbcName : tableMetaData.getColumnNames()) {
         boolean isNullable = tableMetaData.isColumnNullable(jdbcName);
         JDBCType jdbcType = tableMetaData.getColumnDataType(jdbcName);
-        String pdxName = jdbcName;
-        // TODO: look for existing pdx types to picked pdxName
-        // It seems very unlikely that when a mapping is being
-        // created that a pdx type will already exist. So I'm not
-        // sure trying to look for an existing type is worth the effort.
-        // But that does mean that the pdx field name will always be the
-        // same as the column name. If we added a gfsh command that allowed
-        // you to create a pdx type (pretty easy to implement using PdxInstanceFactory)
-        // then it would be much more likely that a pdx type could exist
-        // when the mapping is created. In that case we should then do
-        // some extra work here to look at existing types.
-        FieldType pdxType = computeFieldType(isNullable, jdbcType);
-        pdxInstanceFactory.writeField(pdxName, null, pdxType.getFieldClass());
-        fieldMappings.add(
-            new FieldMapping(pdxName, pdxType.name(), jdbcName, jdbcType.getName(), isNullable));
+        FieldMapping fieldMapping =
+            new FieldMapping("", "", jdbcName, jdbcType.getName(), isNullable);
+        updateFieldMappingFromExistingPdxType(fieldMapping, typeRegistry,
+            regionMapping.getPdxName());
+        fieldMappings.add(fieldMapping);
       }
-      PdxInstance pdxInstance = pdxInstanceFactory.create();
       if (regionMapping.getIds() == null || regionMapping.getIds().isEmpty()) {
         List<String> keyColummnNames = tableMetaData.getKeyColumnNames();
         output[0] = String.join(",", keyColummnNames);
@@ -110,134 +102,52 @@ public class CreateMappingPreconditionCheckFunction extends CliFunction<RegionMa
     }
   }
 
-  static FieldType computeFieldType(boolean isNullable, JDBCType jdbcType) {
-    switch (jdbcType) {
-      case BIT: // 1 bit
-        return computeType(isNullable, FieldType.BOOLEAN);
-      case TINYINT: // unsigned 8 bits
-        return computeType(isNullable, FieldType.SHORT);
-      case SMALLINT: // signed 16 bits
-        return computeType(isNullable, FieldType.SHORT);
-      case INTEGER: // signed 32 bits
-        return computeType(isNullable, FieldType.INT);
-      case BIGINT: // signed 64 bits
-        return computeType(isNullable, FieldType.LONG);
-      case FLOAT:
-        return computeType(isNullable, FieldType.DOUBLE);
-      case REAL:
-        return computeType(isNullable, FieldType.FLOAT);
-      case DOUBLE:
-        return computeType(isNullable, FieldType.DOUBLE);
-      case CHAR:
-        return FieldType.STRING;
-      case VARCHAR:
-        return FieldType.STRING;
-      case LONGVARCHAR:
-        return FieldType.STRING;
-      case DATE:
-        return computeDate(isNullable);
-      case TIME:
-        return computeDate(isNullable);
-      case TIMESTAMP:
-        return computeDate(isNullable);
-      case BINARY:
-        return FieldType.BYTE_ARRAY;
-      case VARBINARY:
-        return FieldType.BYTE_ARRAY;
-      case LONGVARBINARY:
-        return FieldType.BYTE_ARRAY;
-      case NULL:
-        throw new IllegalStateException("unexpected NULL jdbc column type");
-      case BLOB:
-        return FieldType.BYTE_ARRAY;
-      case BOOLEAN:
-        return computeType(isNullable, FieldType.BOOLEAN);
-      case NCHAR:
-        return FieldType.STRING;
-      case NVARCHAR:
-        return FieldType.STRING;
-      case LONGNVARCHAR:
-        return FieldType.STRING;
-      case TIME_WITH_TIMEZONE:
-        return computeDate(isNullable);
-      case TIMESTAMP_WITH_TIMEZONE:
-        return computeDate(isNullable);
-      default:
-        return FieldType.OBJECT;
+  private void updateFieldMappingFromExistingPdxType(FieldMapping fieldMapping,
+      TypeRegistry typeRegistry, String pdxClassName) {
+    String columnName = fieldMapping.getJdbcName();
+    Set<PdxType> pdxTypes = typeRegistry.getPdxTypesForClassName(pdxClassName);
+    if (pdxTypes.isEmpty()) {
+      return;
+    }
+    PdxField foundField = findExactMatch(columnName, pdxTypes);
+    if (foundField == null) {
+      foundField = findCaseInsensitiveMatch(columnName, pdxTypes);
+    }
+    if (foundField != null) {
+      fieldMapping.setPdxName(foundField.getFieldName());
+      fieldMapping.setPdxType(foundField.getFieldType().name());
     }
   }
 
-  private static FieldType computeType(boolean isNullable, FieldType nonNullType) {
-    if (isNullable) {
-      return FieldType.OBJECT;
+  private PdxField findCaseInsensitiveMatch(String columnName, Set<PdxType> pdxTypes) {
+    HashSet<String> matchingFieldNames = new HashSet<>();
+    for (PdxType pdxType : pdxTypes) {
+      for (String existingFieldName : pdxType.getFieldNames()) {
+        if (existingFieldName.equalsIgnoreCase(columnName)) {
+          matchingFieldNames.add(existingFieldName);
+        }
+      }
     }
-    return nonNullType;
-
+    if (matchingFieldNames.isEmpty()) {
+      return null;
+    } else if (matchingFieldNames.size() > 1) {
+      throw new JdbcConnectorException(
+          "Could not determine what pdx field to use for the column name " + columnName
+              + " because the pdx fields " + String.join(", ", matchingFieldNames)
+              + " all match it.");
+    }
+    String matchingFieldName = matchingFieldNames.iterator().next();
+    return findExactMatch(matchingFieldName, pdxTypes);
   }
 
-  private static FieldType computeDate(boolean isNullable) {
-    return computeType(isNullable, FieldType.DATE);
+  private PdxField findExactMatch(String columnName, Set<PdxType> pdxTypes) {
+    for (PdxType pdxType : pdxTypes) {
+      PdxField foundField = pdxType.getPdxField(columnName);
+      if (foundField != null) {
+        return foundField;
+      }
+    }
+    return null;
   }
 
-  // Set<PdxType> pdxTypes = getPdxTypesForClassName(typeRegistry);
-  // String fieldName = findExactMatch(columnName, pdxTypes);
-  // if (fieldName == null) {
-  // fieldName = findCaseInsensitiveMatch(columnName, pdxTypes);
-  // }
-  // return fieldName;
-
-  // private Set<PdxType> getPdxTypesForClassName(TypeRegistry typeRegistry) {
-  // Set<PdxType> pdxTypes = typeRegistry.getPdxTypesForClassName(getPdxName());
-  // if (pdxTypes.isEmpty()) {
-  // throw new JdbcConnectorException(
-  // "The class " + getPdxName() + " has not been pdx serialized.");
-  // }
-  // return pdxTypes;
-  // }
-
-  // /**
-  // * Given a column name and a set of pdx types, find the field name in those types that
-  // match,
-  // * ignoring case, the column name.
-  // *
-  // * @return the matching field name or null if no match
-  // * @throws JdbcConnectorException if no fields match
-  // * @throws JdbcConnectorException if more than one field matches
-  // */
-  // private String findCaseInsensitiveMatch(String columnName, Set<PdxType> pdxTypes) {
-  // HashSet<String> matchingFieldNames = new HashSet<>();
-  // for (PdxType pdxType : pdxTypes) {
-  // for (String existingFieldName : pdxType.getFieldNames()) {
-  // if (existingFieldName.equalsIgnoreCase(columnName)) {
-  // matchingFieldNames.add(existingFieldName);
-  // }
-  // }
-  // }
-  // if (matchingFieldNames.isEmpty()) {
-  // throw new JdbcConnectorException("The class " + getPdxName()
-  // + " does not have a field that matches the column " + columnName);
-  // } else if (matchingFieldNames.size() > 1) {
-  // throw new JdbcConnectorException(
-  // "Could not determine what pdx field to use for the column name " + columnName
-  // + " because the pdx fields " + matchingFieldNames + " all match it.");
-  // }
-  // return matchingFieldNames.iterator().next();
-  // }
-  //
-  // /**
-  // * Given a column name, search the given pdxTypes for a field whose name exactly matches the
-  // * column name.
-  // *
-  // * @return the matching field name or null if no match
-  // */
-  // private String findExactMatch(String columnName, Set<PdxType> pdxTypes) {
-  // for (PdxType pdxType : pdxTypes) {
-  // if (pdxType.getPdxField(columnName) != null) {
-  // return columnName;
-  // }
-  // }
-  // return null;
-  // }
-
-
 }
diff --git a/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/configuration/FieldMapping.java b/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/configuration/FieldMapping.java
index 1b87d4b..26680e7 100644
--- a/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/configuration/FieldMapping.java
+++ b/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/configuration/FieldMapping.java
@@ -61,6 +61,10 @@ public class FieldMapping implements Serializable {
     this.pdxName = value;
   }
 
+  public void setPdxType(String value) {
+    this.pdxType = value;
+  }
+
   public String getJdbcName() {
     return jdbcName;
   }
diff --git a/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/SqlToPdxInstanceCreatorTest.java b/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/SqlToPdxInstanceCreatorTest.java
index 97fc4d0..a97ccf5 100644
--- a/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/SqlToPdxInstanceCreatorTest.java
+++ b/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/SqlToPdxInstanceCreatorTest.java
@@ -15,6 +15,7 @@
 package org.apache.geode.connectors.jdbc.internal;
 
 import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.catchThrowable;
 import static org.mockito.ArgumentMatchers.any;
 import static org.mockito.ArgumentMatchers.anyBoolean;
 import static org.mockito.ArgumentMatchers.anyString;
@@ -828,4 +829,94 @@ public class SqlToPdxInstanceCreatorTest {
     when(metaData.getColumnName(2)).thenReturn(COLUMN_NAME_2);
   }
 
+  @Test
+  public void computeFieldTypeTest() {
+    assertThat(SqlToPdxInstanceCreator.computeFieldType(false, JDBCType.BOOLEAN))
+        .isEqualTo(FieldType.BOOLEAN);
+    assertThat(SqlToPdxInstanceCreator.computeFieldType(true, JDBCType.BOOLEAN))
+        .isEqualTo(FieldType.OBJECT);
+    assertThat(SqlToPdxInstanceCreator.computeFieldType(false, JDBCType.BIT))
+        .isEqualTo(FieldType.BOOLEAN);
+    assertThat(SqlToPdxInstanceCreator.computeFieldType(true, JDBCType.BIT))
+        .isEqualTo(FieldType.OBJECT);
+    assertThat(SqlToPdxInstanceCreator.computeFieldType(false, JDBCType.TINYINT))
+        .isEqualTo(FieldType.SHORT);
+    assertThat(SqlToPdxInstanceCreator.computeFieldType(true, JDBCType.TINYINT))
+        .isEqualTo(FieldType.OBJECT);
+    assertThat(SqlToPdxInstanceCreator.computeFieldType(false, JDBCType.SMALLINT))
+        .isEqualTo(FieldType.SHORT);
+    assertThat(SqlToPdxInstanceCreator.computeFieldType(true, JDBCType.SMALLINT))
+        .isEqualTo(FieldType.OBJECT);
+    assertThat(SqlToPdxInstanceCreator.computeFieldType(false, JDBCType.INTEGER))
+        .isEqualTo(FieldType.INT);
+    assertThat(SqlToPdxInstanceCreator.computeFieldType(true, JDBCType.INTEGER))
+        .isEqualTo(FieldType.OBJECT);
+    assertThat(SqlToPdxInstanceCreator.computeFieldType(false, JDBCType.BIGINT))
+        .isEqualTo(FieldType.LONG);
+    assertThat(SqlToPdxInstanceCreator.computeFieldType(true, JDBCType.BIGINT))
+        .isEqualTo(FieldType.OBJECT);
+    assertThat(SqlToPdxInstanceCreator.computeFieldType(false, JDBCType.REAL))
+        .isEqualTo(FieldType.FLOAT);
+    assertThat(SqlToPdxInstanceCreator.computeFieldType(true, JDBCType.REAL))
+        .isEqualTo(FieldType.OBJECT);
+    assertThat(SqlToPdxInstanceCreator.computeFieldType(false, JDBCType.FLOAT))
+        .isEqualTo(FieldType.DOUBLE);
+    assertThat(SqlToPdxInstanceCreator.computeFieldType(true, JDBCType.FLOAT))
+        .isEqualTo(FieldType.OBJECT);
+    assertThat(SqlToPdxInstanceCreator.computeFieldType(false, JDBCType.DOUBLE))
+        .isEqualTo(FieldType.DOUBLE);
+    assertThat(SqlToPdxInstanceCreator.computeFieldType(true, JDBCType.DOUBLE))
+        .isEqualTo(FieldType.OBJECT);
+    assertThat(SqlToPdxInstanceCreator.computeFieldType(false, JDBCType.DATE))
+        .isEqualTo(FieldType.DATE);
+    assertThat(SqlToPdxInstanceCreator.computeFieldType(true, JDBCType.DATE))
+        .isEqualTo(FieldType.OBJECT);
+    assertThat(SqlToPdxInstanceCreator.computeFieldType(false, JDBCType.TIME))
+        .isEqualTo(FieldType.DATE);
+    assertThat(SqlToPdxInstanceCreator.computeFieldType(true, JDBCType.TIME))
+        .isEqualTo(FieldType.OBJECT);
+    assertThat(SqlToPdxInstanceCreator.computeFieldType(false, JDBCType.TIMESTAMP))
+        .isEqualTo(FieldType.DATE);
+    assertThat(SqlToPdxInstanceCreator.computeFieldType(true, JDBCType.TIMESTAMP))
+        .isEqualTo(FieldType.OBJECT);
+    assertThat(
+        SqlToPdxInstanceCreator.computeFieldType(false, JDBCType.TIME_WITH_TIMEZONE))
+            .isEqualTo(FieldType.DATE);
+    assertThat(
+        SqlToPdxInstanceCreator.computeFieldType(true, JDBCType.TIME_WITH_TIMEZONE))
+            .isEqualTo(FieldType.OBJECT);
+    assertThat(SqlToPdxInstanceCreator.computeFieldType(false,
+        JDBCType.TIMESTAMP_WITH_TIMEZONE)).isEqualTo(FieldType.DATE);
+    assertThat(SqlToPdxInstanceCreator.computeFieldType(true,
+        JDBCType.TIMESTAMP_WITH_TIMEZONE)).isEqualTo(FieldType.OBJECT);
+    assertThat(SqlToPdxInstanceCreator.computeFieldType(false, JDBCType.CHAR))
+        .isEqualTo(FieldType.STRING);
+    assertThat(SqlToPdxInstanceCreator.computeFieldType(false, JDBCType.VARCHAR))
+        .isEqualTo(FieldType.STRING);
+    assertThat(SqlToPdxInstanceCreator.computeFieldType(false, JDBCType.LONGVARCHAR))
+        .isEqualTo(FieldType.STRING);
+    assertThat(SqlToPdxInstanceCreator.computeFieldType(false, JDBCType.NCHAR))
+        .isEqualTo(FieldType.STRING);
+    assertThat(SqlToPdxInstanceCreator.computeFieldType(false, JDBCType.NVARCHAR))
+        .isEqualTo(FieldType.STRING);
+    assertThat(
+        SqlToPdxInstanceCreator.computeFieldType(false, JDBCType.LONGNVARCHAR))
+            .isEqualTo(FieldType.STRING);
+    assertThat(SqlToPdxInstanceCreator.computeFieldType(false, JDBCType.BLOB))
+        .isEqualTo(FieldType.BYTE_ARRAY);
+    assertThat(SqlToPdxInstanceCreator.computeFieldType(false, JDBCType.BINARY))
+        .isEqualTo(FieldType.BYTE_ARRAY);
+    assertThat(SqlToPdxInstanceCreator.computeFieldType(false, JDBCType.VARBINARY))
+        .isEqualTo(FieldType.BYTE_ARRAY);
+    assertThat(
+        SqlToPdxInstanceCreator.computeFieldType(false, JDBCType.LONGVARBINARY))
+            .isEqualTo(FieldType.BYTE_ARRAY);
+    assertThat(SqlToPdxInstanceCreator.computeFieldType(false, JDBCType.ROWID))
+        .isEqualTo(FieldType.OBJECT);
+    Throwable throwable = catchThrowable(
+        () -> SqlToPdxInstanceCreator.computeFieldType(false, JDBCType.NULL));
+    assertThat(throwable).isInstanceOf(IllegalStateException.class);
+  }
+
+
 }
diff --git a/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingPreconditionCheckFunctionTest.java b/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingPreconditionCheckFunctionTest.java
index fb4ad26..01e87e9 100644
--- a/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingPreconditionCheckFunctionTest.java
+++ b/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingPreconditionCheckFunctionTest.java
@@ -17,7 +17,6 @@ package org.apache.geode.connectors.jdbc.internal.cli;
 import static org.assertj.core.api.Assertions.assertThat;
 import static org.assertj.core.api.Assertions.catchThrowable;
 import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.verify;
 import static org.mockito.Mockito.when;
 
 import java.io.Serializable;
@@ -27,6 +26,7 @@ import java.sql.SQLException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
+import java.util.HashSet;
 import java.util.LinkedHashSet;
 import java.util.Set;
 
@@ -47,7 +47,9 @@ import org.apache.geode.connectors.jdbc.internal.configuration.RegionMapping;
 import org.apache.geode.internal.cache.InternalCache;
 import org.apache.geode.management.internal.cli.functions.CliFunctionResult;
 import org.apache.geode.pdx.FieldType;
-import org.apache.geode.pdx.PdxInstanceFactory;
+import org.apache.geode.pdx.internal.PdxField;
+import org.apache.geode.pdx.internal.PdxType;
+import org.apache.geode.pdx.internal.TypeRegistry;
 
 public class CreateMappingPreconditionCheckFunctionTest {
 
@@ -60,7 +62,7 @@ public class CreateMappingPreconditionCheckFunctionTest {
   private FunctionContext<RegionMapping> context;
   private ResultSender<Object> resultSender;
   private InternalCache cache;
-  private PdxInstanceFactory pdxInstanceFactory;
+  private TypeRegistry typeRegistry;
   private DataSourceFactory dataSourceFactory;
   private TableMetaDataManager tableMetaDataManager;
   private TableMetaDataView tableMetaDataView;
@@ -73,8 +75,8 @@ public class CreateMappingPreconditionCheckFunctionTest {
     context = mock(FunctionContext.class);
     resultSender = mock(ResultSender.class);
     cache = mock(InternalCache.class);
-    pdxInstanceFactory = mock(PdxInstanceFactory.class);
-    when(cache.createPdxInstanceFactory(PDX_CLASS_NAME)).thenReturn(pdxInstanceFactory);
+    typeRegistry = mock(TypeRegistry.class);
+    when(cache.getPdxRegistry()).thenReturn(typeRegistry);
     regionMapping = mock(RegionMapping.class);
 
     when(regionMapping.getRegionName()).thenReturn(REGION_NAME);
@@ -170,13 +172,89 @@ public class CreateMappingPreconditionCheckFunctionTest {
     assertThat(fieldsMappings).hasSize(2);
     assertThat(fieldsMappings.get(0))
         .isEqualTo(
-            new FieldMapping("col1", FieldType.DATE.name(), "col1", JDBCType.DATE.name(), false));
+            new FieldMapping("", "", "col1", JDBCType.DATE.name(), false));
     assertThat(fieldsMappings.get(1))
         .isEqualTo(
-            new FieldMapping("col2", FieldType.OBJECT.name(), "col2", JDBCType.DATE.name(), true));
-    verify(pdxInstanceFactory).writeField("col1", null, FieldType.DATE.getFieldClass());
-    verify(pdxInstanceFactory).writeField("col2", null, FieldType.OBJECT.getFieldClass());
-    verify(pdxInstanceFactory).create();
+            new FieldMapping("", "", "col2", JDBCType.DATE.name(), true));
+  }
+
+  @Test
+  public void executeFunctionReturnsFieldMappingsThatMatchTableMetaDataAndExistingPdxType()
+      throws Exception {
+    Set<String> columnNames = new LinkedHashSet<>(Arrays.asList("col1", "col2"));
+    when(tableMetaDataView.getColumnNames()).thenReturn(columnNames);
+    when(tableMetaDataView.isColumnNullable("col1")).thenReturn(false);
+    when(tableMetaDataView.getColumnDataType("col1")).thenReturn(JDBCType.DATE);
+    when(tableMetaDataView.isColumnNullable("col2")).thenReturn(true);
+    when(tableMetaDataView.getColumnDataType("col2")).thenReturn(JDBCType.DATE);
+    PdxType pdxType = mock(PdxType.class);
+    PdxField pdxField1 = mock(PdxField.class);
+    when(pdxField1.getFieldName()).thenReturn("pdxField1");
+    when(pdxField1.getFieldType()).thenReturn(FieldType.LONG);
+    when(pdxType.getPdxField("col1")).thenReturn(pdxField1);
+    Set<PdxType> pdxTypes = new HashSet<>(Arrays.asList(pdxType));
+    when(typeRegistry.getPdxTypesForClassName(PDX_CLASS_NAME)).thenReturn(pdxTypes);
+
+    CliFunctionResult result = function.executeFunction(context);
+
+    assertThat(result.isSuccessful()).isTrue();
+    Object[] outputs = (Object[]) result.getResultObject();
+    ArrayList<FieldMapping> fieldsMappings = (ArrayList<FieldMapping>) outputs[1];
+    assertThat(fieldsMappings).hasSize(2);
+    assertThat(fieldsMappings.get(0))
+        .isEqualTo(
+            new FieldMapping("pdxField1", FieldType.LONG.name(), "col1", JDBCType.DATE.name(),
+                false));
+    assertThat(fieldsMappings.get(1))
+        .isEqualTo(
+            new FieldMapping("", "", "col2", JDBCType.DATE.name(), true));
+  }
+
+  @Test
+  public void executeFunctionReturnsFieldMappingsThatMatchTableMetaDataAndExistingPdxTypeWithInexactMatch()
+      throws Exception {
+    Set<String> columnNames = new LinkedHashSet<>(Arrays.asList("col1"));
+    when(tableMetaDataView.getColumnNames()).thenReturn(columnNames);
+    when(tableMetaDataView.isColumnNullable("col1")).thenReturn(false);
+    when(tableMetaDataView.getColumnDataType("col1")).thenReturn(JDBCType.DATE);
+    PdxType pdxType = mock(PdxType.class);
+    PdxField pdxField1 = mock(PdxField.class);
+    when(pdxField1.getFieldName()).thenReturn("COL1");
+    when(pdxField1.getFieldType()).thenReturn(FieldType.LONG);
+    when(pdxType.getPdxField("col1")).thenReturn(null);
+    when(pdxType.getFieldNames()).thenReturn(Arrays.asList("someOtherField", "COL1"));
+    when(pdxType.getPdxField("COL1")).thenReturn(pdxField1);
+    Set<PdxType> pdxTypes = new HashSet<>(Arrays.asList(pdxType));
+    when(typeRegistry.getPdxTypesForClassName(PDX_CLASS_NAME)).thenReturn(pdxTypes);
+
+    CliFunctionResult result = function.executeFunction(context);
+
+    assertThat(result.isSuccessful()).isTrue();
+    Object[] outputs = (Object[]) result.getResultObject();
+    ArrayList<FieldMapping> fieldsMappings = (ArrayList<FieldMapping>) outputs[1];
+    assertThat(fieldsMappings).hasSize(1);
+    assertThat(fieldsMappings.get(0))
+        .isEqualTo(
+            new FieldMapping("COL1", FieldType.LONG.name(), "col1", JDBCType.DATE.name(), false));
+  }
+
+  @Test
+  public void executeFunctionThrowsGivenExistingPdxTypeWithMultipleInexactMatches()
+      throws Exception {
+    Set<String> columnNames = new LinkedHashSet<>(Arrays.asList("col1"));
+    when(tableMetaDataView.getColumnNames()).thenReturn(columnNames);
+    when(tableMetaDataView.isColumnNullable("col1")).thenReturn(false);
+    when(tableMetaDataView.getColumnDataType("col1")).thenReturn(JDBCType.DATE);
+    PdxType pdxType = mock(PdxType.class);
+    when(pdxType.getFieldNames()).thenReturn(Arrays.asList("Col1", "COL1"));
+    Set<PdxType> pdxTypes = new HashSet<>(Arrays.asList(pdxType));
+    when(typeRegistry.getPdxTypesForClassName(PDX_CLASS_NAME)).thenReturn(pdxTypes);
+
+    Throwable throwable = catchThrowable(() -> function.executeFunction(context));
+
+    assertThat(throwable).isInstanceOf(JdbcConnectorException.class)
+        .hasMessage("Could not determine what pdx field to use for the column name col1"
+            + " because the pdx fields Col1, COL1 all match it.");
   }
 
   @Test
@@ -225,94 +303,4 @@ public class CreateMappingPreconditionCheckFunctionTest {
     Object[] outputs = (Object[]) result.getResultObject();
     assertThat(outputs[0]).isEqualTo("keyCol1");
   }
-
-  @Test
-  public void computeFieldTypeTest() {
-    assertThat(CreateMappingPreconditionCheckFunction.computeFieldType(false, JDBCType.BOOLEAN))
-        .isEqualTo(FieldType.BOOLEAN);
-    assertThat(CreateMappingPreconditionCheckFunction.computeFieldType(true, JDBCType.BOOLEAN))
-        .isEqualTo(FieldType.OBJECT);
-    assertThat(CreateMappingPreconditionCheckFunction.computeFieldType(false, JDBCType.BIT))
-        .isEqualTo(FieldType.BOOLEAN);
-    assertThat(CreateMappingPreconditionCheckFunction.computeFieldType(true, JDBCType.BIT))
-        .isEqualTo(FieldType.OBJECT);
-    assertThat(CreateMappingPreconditionCheckFunction.computeFieldType(false, JDBCType.TINYINT))
-        .isEqualTo(FieldType.SHORT);
-    assertThat(CreateMappingPreconditionCheckFunction.computeFieldType(true, JDBCType.TINYINT))
-        .isEqualTo(FieldType.OBJECT);
-    assertThat(CreateMappingPreconditionCheckFunction.computeFieldType(false, JDBCType.SMALLINT))
-        .isEqualTo(FieldType.SHORT);
-    assertThat(CreateMappingPreconditionCheckFunction.computeFieldType(true, JDBCType.SMALLINT))
-        .isEqualTo(FieldType.OBJECT);
-    assertThat(CreateMappingPreconditionCheckFunction.computeFieldType(false, JDBCType.INTEGER))
-        .isEqualTo(FieldType.INT);
-    assertThat(CreateMappingPreconditionCheckFunction.computeFieldType(true, JDBCType.INTEGER))
-        .isEqualTo(FieldType.OBJECT);
-    assertThat(CreateMappingPreconditionCheckFunction.computeFieldType(false, JDBCType.BIGINT))
-        .isEqualTo(FieldType.LONG);
-    assertThat(CreateMappingPreconditionCheckFunction.computeFieldType(true, JDBCType.BIGINT))
-        .isEqualTo(FieldType.OBJECT);
-    assertThat(CreateMappingPreconditionCheckFunction.computeFieldType(false, JDBCType.REAL))
-        .isEqualTo(FieldType.FLOAT);
-    assertThat(CreateMappingPreconditionCheckFunction.computeFieldType(true, JDBCType.REAL))
-        .isEqualTo(FieldType.OBJECT);
-    assertThat(CreateMappingPreconditionCheckFunction.computeFieldType(false, JDBCType.FLOAT))
-        .isEqualTo(FieldType.DOUBLE);
-    assertThat(CreateMappingPreconditionCheckFunction.computeFieldType(true, JDBCType.FLOAT))
-        .isEqualTo(FieldType.OBJECT);
-    assertThat(CreateMappingPreconditionCheckFunction.computeFieldType(false, JDBCType.DOUBLE))
-        .isEqualTo(FieldType.DOUBLE);
-    assertThat(CreateMappingPreconditionCheckFunction.computeFieldType(true, JDBCType.DOUBLE))
-        .isEqualTo(FieldType.OBJECT);
-    assertThat(CreateMappingPreconditionCheckFunction.computeFieldType(false, JDBCType.DATE))
-        .isEqualTo(FieldType.DATE);
-    assertThat(CreateMappingPreconditionCheckFunction.computeFieldType(true, JDBCType.DATE))
-        .isEqualTo(FieldType.OBJECT);
-    assertThat(CreateMappingPreconditionCheckFunction.computeFieldType(false, JDBCType.TIME))
-        .isEqualTo(FieldType.DATE);
-    assertThat(CreateMappingPreconditionCheckFunction.computeFieldType(true, JDBCType.TIME))
-        .isEqualTo(FieldType.OBJECT);
-    assertThat(CreateMappingPreconditionCheckFunction.computeFieldType(false, JDBCType.TIMESTAMP))
-        .isEqualTo(FieldType.DATE);
-    assertThat(CreateMappingPreconditionCheckFunction.computeFieldType(true, JDBCType.TIMESTAMP))
-        .isEqualTo(FieldType.OBJECT);
-    assertThat(
-        CreateMappingPreconditionCheckFunction.computeFieldType(false, JDBCType.TIME_WITH_TIMEZONE))
-            .isEqualTo(FieldType.DATE);
-    assertThat(
-        CreateMappingPreconditionCheckFunction.computeFieldType(true, JDBCType.TIME_WITH_TIMEZONE))
-            .isEqualTo(FieldType.OBJECT);
-    assertThat(CreateMappingPreconditionCheckFunction.computeFieldType(false,
-        JDBCType.TIMESTAMP_WITH_TIMEZONE)).isEqualTo(FieldType.DATE);
-    assertThat(CreateMappingPreconditionCheckFunction.computeFieldType(true,
-        JDBCType.TIMESTAMP_WITH_TIMEZONE)).isEqualTo(FieldType.OBJECT);
-    assertThat(CreateMappingPreconditionCheckFunction.computeFieldType(false, JDBCType.CHAR))
-        .isEqualTo(FieldType.STRING);
-    assertThat(CreateMappingPreconditionCheckFunction.computeFieldType(false, JDBCType.VARCHAR))
-        .isEqualTo(FieldType.STRING);
-    assertThat(CreateMappingPreconditionCheckFunction.computeFieldType(false, JDBCType.LONGVARCHAR))
-        .isEqualTo(FieldType.STRING);
-    assertThat(CreateMappingPreconditionCheckFunction.computeFieldType(false, JDBCType.NCHAR))
-        .isEqualTo(FieldType.STRING);
-    assertThat(CreateMappingPreconditionCheckFunction.computeFieldType(false, JDBCType.NVARCHAR))
-        .isEqualTo(FieldType.STRING);
-    assertThat(
-        CreateMappingPreconditionCheckFunction.computeFieldType(false, JDBCType.LONGNVARCHAR))
-            .isEqualTo(FieldType.STRING);
-    assertThat(CreateMappingPreconditionCheckFunction.computeFieldType(false, JDBCType.BLOB))
-        .isEqualTo(FieldType.BYTE_ARRAY);
-    assertThat(CreateMappingPreconditionCheckFunction.computeFieldType(false, JDBCType.BINARY))
-        .isEqualTo(FieldType.BYTE_ARRAY);
-    assertThat(CreateMappingPreconditionCheckFunction.computeFieldType(false, JDBCType.VARBINARY))
-        .isEqualTo(FieldType.BYTE_ARRAY);
-    assertThat(
-        CreateMappingPreconditionCheckFunction.computeFieldType(false, JDBCType.LONGVARBINARY))
-            .isEqualTo(FieldType.BYTE_ARRAY);
-    assertThat(CreateMappingPreconditionCheckFunction.computeFieldType(false, JDBCType.ROWID))
-        .isEqualTo(FieldType.OBJECT);
-    Throwable throwable = catchThrowable(
-        () -> CreateMappingPreconditionCheckFunction.computeFieldType(false, JDBCType.NULL));
-    assertThat(throwable).isInstanceOf(IllegalStateException.class);
-  }
-
 }