You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@geode.apache.org by ds...@apache.org on 2019/01/19 01:44:51 UTC
[geode] branch feature/GEODE-6291 updated: added
CreateMappingPrecondtionCheckFunction. It is still WIP
This is an automated email from the ASF dual-hosted git repository.
dschneider pushed a commit to branch feature/GEODE-6291
in repository https://gitbox.apache.org/repos/asf/geode.git
The following commit(s) were added to refs/heads/feature/GEODE-6291 by this push:
new ddd8831 added CreateMappingPrecondtionCheckFunction. It is still WIP
ddd8831 is described below
commit ddd88314f4c9e6cf7b12d90e0e74a4bcccd44ac9
Author: Darrel Schneider <ds...@pivotal.io>
AuthorDate: Fri Jan 18 17:43:56 2019 -0800
added CreateMappingPrecondtionCheckFunction. It is still WIP
---
.../jdbc/internal/cli/CreateMappingCommand.java | 26 ++++---
.../CreateMappingPreconditionCheckFunction.java | 82 ++++++++++++++++++++++
.../jdbc/internal/configuration/RegionMapping.java | 4 ++
.../internal/cli/CreateMappingCommandTest.java | 46 +++++++++++-
.../main/java/org/apache/geode/pdx/FieldType.java | 68 ++++++++++++------
5 files changed, 191 insertions(+), 35 deletions(-)
diff --git a/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingCommand.java b/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingCommand.java
index 1f9d475..3010ba8 100644
--- a/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingCommand.java
+++ b/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingCommand.java
@@ -16,6 +16,7 @@ package org.apache.geode.connectors.jdbc.internal.cli;
+import java.util.ArrayList;
import java.util.List;
import java.util.Set;
@@ -33,6 +34,7 @@ import org.apache.geode.cache.configuration.RegionConfig;
import org.apache.geode.connectors.jdbc.JdbcAsyncWriter;
import org.apache.geode.connectors.jdbc.JdbcLoader;
import org.apache.geode.connectors.jdbc.JdbcWriter;
+import org.apache.geode.connectors.jdbc.internal.configuration.FieldMapping;
import org.apache.geode.connectors.jdbc.internal.configuration.RegionMapping;
import org.apache.geode.connectors.util.internal.MappingConstants;
import org.apache.geode.distributed.ConfigurationPersistenceService;
@@ -125,17 +127,19 @@ public class CreateMappingCommand extends SingleGfshCommand {
return ResultModel.createError(ex.getMessage());
}
- // Do the following on one and only one targetMember:
- // 1. connect to the database (using dataSourceName)
- // 2. read the table meta data
- // 3. generates a pdx type using PdxInstanceFactory with null field values
- // 4. return the field mapping info
- // The field mapping info will be passed to CreateMappingFunction
- // The field mapping will have the following:
- // 1. pdx field name
- // 2. pdx field type (an instance of org.apache.geode.pdx.FieldType)
- // 3. jdbc column name
- // 4. jdbc column type (an instance of java.sql.JDBCType)
+ CliFunctionResult preconditionCheckResult =
+ executeFunctionAndGetFunctionResult(new CreateMappingPreconditionCheckFunction(), mapping,
+ targetMembers.iterator().next());
+ if (preconditionCheckResult.isSuccessful()) {
+ ArrayList<FieldMapping> fieldMappings =
+ (ArrayList<FieldMapping>) preconditionCheckResult.getResultObject();
+ for (FieldMapping fieldMapping : fieldMappings) {
+ mapping.addFieldMapping(fieldMapping);
+ }
+ } else {
+ String message = preconditionCheckResult.getStatusMessage();
+ return ResultModel.createError(message);
+ }
// action
Object[] arguments = new Object[] {mapping, synchronous};
diff --git a/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingPreconditionCheckFunction.java b/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingPreconditionCheckFunction.java
new file mode 100644
index 0000000..53de411
--- /dev/null
+++ b/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingPreconditionCheckFunction.java
@@ -0,0 +1,82 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
+ * agreements. See the NOTICE file distributed with this work for additional information regarding
+ * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.geode.connectors.jdbc.internal.cli;
+
+import java.sql.Connection;
+import java.sql.JDBCType;
+import java.util.ArrayList;
+
+import javax.sql.DataSource;
+
+import org.apache.geode.annotations.Experimental;
+import org.apache.geode.cache.execute.FunctionContext;
+import org.apache.geode.connectors.jdbc.JdbcConnectorException;
+import org.apache.geode.connectors.jdbc.internal.TableMetaDataManager;
+import org.apache.geode.connectors.jdbc.internal.TableMetaDataView;
+import org.apache.geode.connectors.jdbc.internal.configuration.FieldMapping;
+import org.apache.geode.connectors.jdbc.internal.configuration.RegionMapping;
+import org.apache.geode.internal.jndi.JNDIInvoker;
+import org.apache.geode.management.cli.CliFunction;
+import org.apache.geode.management.internal.cli.functions.CliFunctionResult;
+import org.apache.geode.pdx.FieldType;
+import org.apache.geode.pdx.PdxInstance;
+import org.apache.geode.pdx.PdxInstanceFactory;
+
+@Experimental
+public class CreateMappingPreconditionCheckFunction extends CliFunction<RegionMapping> {
+
+ CreateMappingPreconditionCheckFunction() {
+ super();
+ }
+
+ @Override
+ public CliFunctionResult executeFunction(FunctionContext<RegionMapping> context)
+ throws Exception {
+ RegionMapping regionMapping = context.getArguments();
+ String dataSourceName = regionMapping.getDataSourceName();
+ DataSource dataSource = JNDIInvoker.getDataSource(dataSourceName);
+ if (dataSource == null) {
+ throw new JdbcConnectorException("JDBC data-source named \"" + dataSourceName
+ + "\" not found. Create it with gfsh 'create data-source --pooled --name="
+ + dataSourceName + "'.");
+ }
+ try (Connection connection = dataSource.getConnection()) {
+ TableMetaDataView tableMetaData =
+ new TableMetaDataManager().getTableMetaDataView(connection, regionMapping);
+ PdxInstanceFactory pdxInstanceFactory =
+ context.getCache().createPdxInstanceFactory(regionMapping.getPdxName());
+ ArrayList<FieldMapping> fieldMappings = new ArrayList<>();
+ for (String jdbcName : tableMetaData.getColumnNames()) {
+ boolean isNullable = tableMetaData.isColumnNullable(jdbcName);
+ JDBCType jdbcType = tableMetaData.getColumnDataType(jdbcName);
+ String pdxName = jdbcName;
+ FieldType pdxType = computeFieldType(isNullable, jdbcType);
+ pdxInstanceFactory.writeField(pdxName, null, pdxType.getFieldClass());
+ fieldMappings.add(new FieldMapping(pdxName, pdxType.name(), jdbcName, jdbcType.getName()));
+ }
+ PdxInstance pdxInstance = pdxInstanceFactory.create();
+ // TODO look for existing PdxType in the registry whose names differ in case
+
+ String member = context.getMemberName();
+ return new CliFunctionResult(member, fieldMappings);
+ }
+ }
+
+ private FieldType computeFieldType(boolean isNullable, JDBCType jdbcType) {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+}
diff --git a/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/configuration/RegionMapping.java b/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/configuration/RegionMapping.java
index f80606a..342945d 100644
--- a/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/configuration/RegionMapping.java
+++ b/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/configuration/RegionMapping.java
@@ -170,6 +170,10 @@ public class RegionMapping implements CacheElement {
return tableName;
}
+ public List<FieldMapping> getFieldMappings() {
+ return this.fieldMappings;
+ }
+
public void addFieldMapping(FieldMapping value) {
this.fieldMappings.add(value);
this.pdxToFieldMappings.put(value.getPdxName(), value);
diff --git a/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingCommandTest.java b/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingCommandTest.java
index 2d66fbf..46dd01d 100644
--- a/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingCommandTest.java
+++ b/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingCommandTest.java
@@ -41,6 +41,7 @@ import org.apache.geode.cache.configuration.RegionAttributesDataPolicy;
import org.apache.geode.cache.configuration.RegionAttributesType;
import org.apache.geode.cache.configuration.RegionConfig;
import org.apache.geode.connectors.jdbc.JdbcAsyncWriter;
+import org.apache.geode.connectors.jdbc.internal.configuration.FieldMapping;
import org.apache.geode.connectors.jdbc.internal.configuration.RegionMapping;
import org.apache.geode.distributed.ConfigurationPersistenceService;
import org.apache.geode.distributed.internal.membership.InternalDistributedMember;
@@ -59,6 +60,8 @@ public class CreateMappingCommandTest {
private String tableName;
private String pdxClass;
private Set<InternalDistributedMember> members;
+ private CliFunctionResult preconditionCheckResults;
+ private ArrayList<FieldMapping> fieldMappings = new ArrayList<>();
private List<CliFunctionResult> results;
private CliFunctionResult successFunctionResult;
private RegionMapping mapping;
@@ -81,7 +84,11 @@ public class CreateMappingCommandTest {
results = new ArrayList<>();
successFunctionResult = mock(CliFunctionResult.class);
when(successFunctionResult.isSuccessful()).thenReturn(true);
-
+ preconditionCheckResults = mock(CliFunctionResult.class);
+ when(preconditionCheckResults.isSuccessful()).thenReturn(true);
+ when(preconditionCheckResults.getResultObject()).thenReturn(fieldMappings);
+ doReturn(preconditionCheckResults).when(createRegionMappingCommand)
+ .executeFunctionAndGetFunctionResult(any(), any(), any());
doReturn(results).when(createRegionMappingCommand).executeAndGetFunctionResult(any(), any(),
any());
doReturn(members).when(createRegionMappingCommand).findMembersForRegion(regionName);
@@ -135,10 +142,47 @@ public class CreateMappingCommandTest {
assertThat(regionMapping.getIds()).isEqualTo(ids);
assertThat(regionMapping.getCatalog()).isEqualTo(catalog);
assertThat(regionMapping.getSchema()).isEqualTo(schema);
+ assertThat(regionMapping.getFieldMappings()).isEmpty();
assertThat(synchronous).isFalse();
}
@Test
+ public void createsMappingReturnsCorrectFieldMappings() {
+ setupRequiredPreconditions();
+ results.add(successFunctionResult);
+ String ids = "ids";
+ String catalog = "catalog";
+ String schema = "schema";
+ this.fieldMappings.add(new FieldMapping("pdx1", "pdx1type", "jdbc1", "jdbc1type"));
+ this.fieldMappings.add(new FieldMapping("pdx2", "pdx2type", "jdbc2", "jdbc2type"));
+
+ ResultModel result = createRegionMappingCommand.createMapping(regionName, dataSourceName,
+ tableName, pdxClass, false, ids, catalog, schema);
+
+ assertThat(result.getStatus()).isSameAs(Result.Status.OK);
+ Object[] results = (Object[]) result.getConfigObject();
+ RegionMapping regionMapping = (RegionMapping) results[0];
+ assertThat(regionMapping.getFieldMappings()).isEqualTo(this.fieldMappings);
+ }
+
+ @Test
+ public void createsMappingReturnsErrorIfPreconditionCheckErrors() {
+ setupRequiredPreconditions();
+ results.add(successFunctionResult);
+ String ids = "ids";
+ String catalog = "catalog";
+ String schema = "schema";
+ when(preconditionCheckResults.isSuccessful()).thenReturn(false);
+ when(preconditionCheckResults.getStatusMessage()).thenReturn("precondition check failed");
+
+ ResultModel result = createRegionMappingCommand.createMapping(regionName, dataSourceName,
+ tableName, pdxClass, false, ids, catalog, schema);
+
+ assertThat(result.getStatus()).isSameAs(Result.Status.ERROR);
+ assertThat(result.toString()).contains("precondition check failed");
+ }
+
+ @Test
public void createsMappingWithRegionPathCreatesMappingWithSlashRemoved() {
setupRequiredPreconditions();
results.add(successFunctionResult);
diff --git a/geode-core/src/main/java/org/apache/geode/pdx/FieldType.java b/geode-core/src/main/java/org/apache/geode/pdx/FieldType.java
index 5c56c86..c8bee22 100644
--- a/geode-core/src/main/java/org/apache/geode/pdx/FieldType.java
+++ b/geode-core/src/main/java/org/apache/geode/pdx/FieldType.java
@@ -31,28 +31,40 @@ import org.apache.geode.pdx.internal.DataSize;
* @since GemFire 6.6.2
*/
public enum FieldType {
- BOOLEAN(true, DataSize.BOOLEAN_SIZE, "boolean", new byte[] {0}, false),
- BYTE(true, DataSize.BYTE_SIZE, "byte", new byte[] {0}, 0),
- CHAR(true, DataSize.CHAR_SIZE, "char", new byte[] {0, 0}, (char) 0),
- SHORT(true, DataSize.SHORT_SIZE, "short", new byte[] {0, 0}, 0),
- INT(true, DataSize.INTEGER_SIZE, "int", new byte[] {0, 0, 0, 0}, 0),
- LONG(true, DataSize.LONG_SIZE, "long", new byte[] {0, 0, 0, 0, 0, 0, 0, 0}, 0),
- FLOAT(true, DataSize.FLOAT_SIZE, "float", new byte[] {0, 0, 0, 0}, 0),
- DOUBLE(true, DataSize.DOUBLE_SIZE, "double", new byte[] {0, 0, 0, 0, 0, 0, 0, 0}, 0),
- DATE(true, DataSize.DATE_SIZE, "Date", new byte[] {-1, -1, -1, -1, -1, -1, -1, -1}, null),
- STRING(false, -1, "String", new byte[] {DSCODE.NULL_STRING.toByte()}, null),
- OBJECT(false, -1, "Object", new byte[] {DSCODE.NULL.toByte()}, null),
- BOOLEAN_ARRAY(false, -1, "boolean[]", new byte[] {InternalDataSerializer.NULL_ARRAY}, null),
- CHAR_ARRAY(false, -1, "char[]", new byte[] {InternalDataSerializer.NULL_ARRAY}, null),
- BYTE_ARRAY(false, -1, "byte[]", new byte[] {InternalDataSerializer.NULL_ARRAY}, null),
- SHORT_ARRAY(false, -1, "short[]", new byte[] {InternalDataSerializer.NULL_ARRAY}, null),
- INT_ARRAY(false, -1, "int[]", new byte[] {InternalDataSerializer.NULL_ARRAY}, null),
- LONG_ARRAY(false, -1, "long[]", new byte[] {InternalDataSerializer.NULL_ARRAY}, null),
- FLOAT_ARRAY(false, -1, "float[]", new byte[] {InternalDataSerializer.NULL_ARRAY}, null),
- DOUBLE_ARRAY(false, -1, "double[]", new byte[] {InternalDataSerializer.NULL_ARRAY}, null),
- STRING_ARRAY(false, -1, "String[]", new byte[] {InternalDataSerializer.NULL_ARRAY}, null),
- OBJECT_ARRAY(false, -1, "Object[]", new byte[] {InternalDataSerializer.NULL_ARRAY}, null),
- ARRAY_OF_BYTE_ARRAYS(false, -1, "byte[][]", new byte[] {InternalDataSerializer.NULL_ARRAY}, null);
+ BOOLEAN(true, DataSize.BOOLEAN_SIZE, "boolean", new byte[] {0}, false, boolean.class),
+ BYTE(true, DataSize.BYTE_SIZE, "byte", new byte[] {0}, 0, byte.class),
+ CHAR(true, DataSize.CHAR_SIZE, "char", new byte[] {0, 0}, (char) 0, char.class),
+ SHORT(true, DataSize.SHORT_SIZE, "short", new byte[] {0, 0}, 0, short.class),
+ INT(true, DataSize.INTEGER_SIZE, "int", new byte[] {0, 0, 0, 0}, 0, int.class),
+ LONG(true, DataSize.LONG_SIZE, "long", new byte[] {0, 0, 0, 0, 0, 0, 0, 0}, 0, long.class),
+ FLOAT(true, DataSize.FLOAT_SIZE, "float", new byte[] {0, 0, 0, 0}, 0, float.class),
+ DOUBLE(true, DataSize.DOUBLE_SIZE, "double", new byte[] {0, 0, 0, 0, 0, 0, 0, 0}, 0,
+ double.class),
+ DATE(true, DataSize.DATE_SIZE, "Date", new byte[] {-1, -1, -1, -1, -1, -1, -1, -1}, null,
+ Date.class),
+ STRING(false, -1, "String", new byte[] {DSCODE.NULL_STRING.toByte()}, null, String.class),
+ OBJECT(false, -1, "Object", new byte[] {DSCODE.NULL.toByte()}, null, Object.class),
+ BOOLEAN_ARRAY(false, -1, "boolean[]", new byte[] {InternalDataSerializer.NULL_ARRAY}, null,
+ boolean[].class),
+ CHAR_ARRAY(false, -1, "char[]", new byte[] {InternalDataSerializer.NULL_ARRAY}, null,
+ char[].class),
+ BYTE_ARRAY(false, -1, "byte[]", new byte[] {InternalDataSerializer.NULL_ARRAY}, null,
+ byte[].class),
+ SHORT_ARRAY(false, -1, "short[]", new byte[] {InternalDataSerializer.NULL_ARRAY}, null,
+ short[].class),
+ INT_ARRAY(false, -1, "int[]", new byte[] {InternalDataSerializer.NULL_ARRAY}, null, int[].class),
+ LONG_ARRAY(false, -1, "long[]", new byte[] {InternalDataSerializer.NULL_ARRAY}, null,
+ long[].class),
+ FLOAT_ARRAY(false, -1, "float[]", new byte[] {InternalDataSerializer.NULL_ARRAY}, null,
+ float[].class),
+ DOUBLE_ARRAY(false, -1, "double[]", new byte[] {InternalDataSerializer.NULL_ARRAY}, null,
+ double[].class),
+ STRING_ARRAY(false, -1, "String[]", new byte[] {InternalDataSerializer.NULL_ARRAY}, null,
+ String[].class),
+ OBJECT_ARRAY(false, -1, "Object[]", new byte[] {InternalDataSerializer.NULL_ARRAY}, null,
+ Object[].class),
+ ARRAY_OF_BYTE_ARRAYS(false, -1, "byte[][]", new byte[] {InternalDataSerializer.NULL_ARRAY}, null,
+ byte[][].class);
private final boolean isFixedWidth;
/**
@@ -62,14 +74,16 @@ public enum FieldType {
private final String name;
private final ByteBuffer defaultSerializedValue;
private final Object defaultValue;
+ private final Class<?> fieldClass;
private FieldType(boolean isFixedWidth, int width, String name, byte[] defaultBytes,
- Object defaultValue) {
+ Object defaultValue, Class<?> fieldClass) {
this.isFixedWidth = isFixedWidth;
this.width = width;
this.name = name;
this.defaultSerializedValue = ByteBuffer.wrap(defaultBytes).asReadOnlyBuffer();
this.defaultValue = defaultValue;
+ this.fieldClass = fieldClass;
}
/**
@@ -100,6 +114,14 @@ public enum FieldType {
}
/**
+ * @return the Class that corresponds to this FieldType.
+ * @since Geode 1.9
+ */
+ public Class<?> getFieldClass() {
+ return this.fieldClass;
+ }
+
+ /**
* Given a Class return the corresponding FieldType.
*/
public static FieldType get(Class<?> c) {