You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@metamodel.apache.org by ka...@apache.org on 2017/07/22 17:06:36 UTC

metamodel git commit: METAMODEL-1145: Fixed

Repository: metamodel
Updated Branches:
  refs/heads/master 6f9e09449 -> 018359172


METAMODEL-1145: Fixed

Closes #149

Project: http://git-wip-us.apache.org/repos/asf/metamodel/repo
Commit: http://git-wip-us.apache.org/repos/asf/metamodel/commit/01835917
Tree: http://git-wip-us.apache.org/repos/asf/metamodel/tree/01835917
Diff: http://git-wip-us.apache.org/repos/asf/metamodel/diff/01835917

Branch: refs/heads/master
Commit: 018359172caf0fdb51336fdba3e87fcace741e05
Parents: 6f9e094
Author: Joerg Unbehauen <un...@informatik.uni-leipzig.de>
Authored: Sat Jul 22 10:06:34 2017 -0700
Committer: Kasper Sørensen <i....@gmail.com>
Committed: Sat Jul 22 10:06:34 2017 -0700

----------------------------------------------------------------------
 CHANGES.md                                      |   1 +
 .../metamodel/jdbc/JdbcMetadataLoader.java      |  92 ++++++++++++----
 .../apache/metamodel/jdbc/H2databaseTest.java   | 104 +++++++++++++------
 3 files changed, 142 insertions(+), 55 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/metamodel/blob/01835917/CHANGES.md
----------------------------------------------------------------------
diff --git a/CHANGES.md b/CHANGES.md
index a468ab1..1b8361c 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -7,6 +7,7 @@
  * [METAMODEL-1139] - Employed Java 8 functional types (java.util.function) in favor of (now deprecated) Ref, Action, Func. 
  * [METAMODEL-1140] - Allowed SalesforceDataContext without a security token.
  * [METAMODEL-1141] - Added RFC 4180 compliant CSV parsing.
+ * [METAMODEL-1145] - Fixed bug with modelling JDBC table relationships when there are multiple keys involved in the relationship.
 
 ### Apache MetaModel 4.6.0
 

http://git-wip-us.apache.org/repos/asf/metamodel/blob/01835917/jdbc/src/main/java/org/apache/metamodel/jdbc/JdbcMetadataLoader.java
----------------------------------------------------------------------
diff --git a/jdbc/src/main/java/org/apache/metamodel/jdbc/JdbcMetadataLoader.java b/jdbc/src/main/java/org/apache/metamodel/jdbc/JdbcMetadataLoader.java
index 837fb18..2c29405 100644
--- a/jdbc/src/main/java/org/apache/metamodel/jdbc/JdbcMetadataLoader.java
+++ b/jdbc/src/main/java/org/apache/metamodel/jdbc/JdbcMetadataLoader.java
@@ -22,7 +22,12 @@ import java.sql.Connection;
 import java.sql.DatabaseMetaData;
 import java.sql.ResultSet;
 import java.sql.SQLException;
+import java.util.ArrayList;
 import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
 import java.util.Set;
 import java.util.StringTokenizer;
 import java.util.concurrent.ConcurrentHashMap;
@@ -58,7 +63,8 @@ final class JdbcMetadataLoader implements MetadataLoader {
     private final Set<Integer> _loadedIndexes;
     private final Set<Integer> _loadedPrimaryKeys;
 
-    public JdbcMetadataLoader(JdbcDataContext dataContext, boolean usesCatalogsAsSchemas, String identifierQuoteString) {
+    public JdbcMetadataLoader(JdbcDataContext dataContext, boolean usesCatalogsAsSchemas,
+            String identifierQuoteString) {
         _dataContext = dataContext;
         _usesCatalogsAsSchemas = usesCatalogsAsSchemas;
         _identifierQuoteString = identifierQuoteString;
@@ -92,7 +98,7 @@ final class JdbcMetadataLoader implements MetadataLoader {
     }
 
     private String getJdbcSchemaName(Schema schema) {
-        if(_usesCatalogsAsSchemas) {
+        if (_usesCatalogsAsSchemas) {
             return null;
         } else {
             return schema.getName();
@@ -100,7 +106,7 @@ final class JdbcMetadataLoader implements MetadataLoader {
     }
 
     private String getCatalogName(Schema schema) {
-        if(_usesCatalogsAsSchemas) {
+        if (_usesCatalogsAsSchemas) {
             return schema.getName();
         } else {
             return _dataContext.getCatalogName();
@@ -109,8 +115,8 @@ final class JdbcMetadataLoader implements MetadataLoader {
 
     private void loadTables(JdbcSchema schema, DatabaseMetaData metaData, String[] types) {
         try (ResultSet rs = metaData.getTables(getCatalogName(schema), getJdbcSchemaName(schema), null, types)) {
-            logger.debug("Querying for table types {}, in catalog: {}, schema: {}", types,
-                    _dataContext.getCatalogName(), schema.getName());
+            logger.debug("Querying for table types {}, in catalog: {}, schema: {}", types, _dataContext
+                    .getCatalogName(), schema.getName());
 
             schema.clearTables();
             int tableNumber = -1;
@@ -138,15 +144,15 @@ final class JdbcMetadataLoader implements MetadataLoader {
             if (tablesReturned == 0) {
                 logger.info("No table metadata records returned for schema '{}'", schema.getName());
             } else {
-                logger.debug("Returned {} table metadata records for schema '{}'", new Object[] { tablesReturned,
-                        schema.getName() });
+                logger.debug("Returned {} table metadata records for schema '{}'", new Object[] { tablesReturned, schema
+                        .getName() });
             }
 
         } catch (SQLException e) {
             throw JdbcUtils.wrapException(e, "retrieve table metadata for " + schema.getName());
         }
     }
-    
+
     @Override
     public void loadIndexes(JdbcTable jdbcTable) {
         final int identity = System.identityHashCode(jdbcTable);
@@ -182,7 +188,7 @@ final class JdbcMetadataLoader implements MetadataLoader {
             }
         }
     }
-    
+
     @Override
     public void loadPrimaryKeys(JdbcTable jdbcTable) {
         final int identity = System.identityHashCode(jdbcTable);
@@ -220,7 +226,8 @@ final class JdbcMetadataLoader implements MetadataLoader {
 
     private void loadPrimaryKeys(JdbcTable table, DatabaseMetaData metaData) throws MetaModelException {
         Schema schema = table.getSchema();
-        try (ResultSet rs = metaData.getPrimaryKeys(getCatalogName(schema), getJdbcSchemaName(schema), table.getName());){
+        try (ResultSet rs = metaData.getPrimaryKeys(getCatalogName(schema), getJdbcSchemaName(schema), table
+                .getName());) {
             while (rs.next()) {
                 String columnName = rs.getString(4);
                 if (columnName != null) {
@@ -242,7 +249,8 @@ final class JdbcMetadataLoader implements MetadataLoader {
 
         // Ticket #170: IndexInfo is nice-to-have, not need-to-have, so
         // we will do a nice failover on SQLExceptions
-        try (ResultSet rs = metaData.getIndexInfo(getCatalogName(schema), getJdbcSchemaName(schema), table.getName(), false, true)) {
+        try (ResultSet rs = metaData.getIndexInfo(getCatalogName(schema), getJdbcSchemaName(schema), table.getName(),
+                false, true)) {
             while (rs.next()) {
                 String columnName = rs.getString(9);
                 if (columnName != null) {
@@ -258,7 +266,7 @@ final class JdbcMetadataLoader implements MetadataLoader {
             throw JdbcUtils.wrapException(e, "retrieve index information for " + table.getName());
         }
     }
-    
+
     @Override
     public void loadColumns(JdbcTable jdbcTable) {
         final int identity = System.identityHashCode(jdbcTable);
@@ -309,7 +317,8 @@ final class JdbcMetadataLoader implements MetadataLoader {
         final boolean convertLobs = isLobConversionEnabled();
         final Schema schema = table.getSchema();
 
-        try (ResultSet rs = metaData.getColumns(getCatalogName(schema), getJdbcSchemaName(schema), table.getName(), null)) {
+        try (ResultSet rs = metaData.getColumns(getCatalogName(schema), getJdbcSchemaName(schema), table.getName(),
+                null)) {
             if (logger.isDebugEnabled()) {
                 logger.debug("Querying for columns in table: " + table.getName());
             }
@@ -327,8 +336,8 @@ final class JdbcMetadataLoader implements MetadataLoader {
                 final Integer columnSize = rs.getInt(7);
 
                 if (logger.isDebugEnabled()) {
-                    logger.debug("Found column: table=" + table.getName() + ",columnName=" + columnName
-                            + ",nativeType=" + nativeType + ",columnSize=" + columnSize);
+                    logger.debug("Found column: table=" + table.getName() + ",columnName=" + columnName + ",nativeType="
+                            + nativeType + ",columnSize=" + columnSize);
                 }
 
                 ColumnType columnType = _dataContext.getQueryRewriter().getColumnType(jdbcType, nativeType, columnSize);
@@ -362,17 +371,17 @@ final class JdbcMetadataLoader implements MetadataLoader {
 
             final int columnsReturned = columnNumber + 1;
             if (columnsReturned == 0) {
-                logger.info("No column metadata records returned for table '{}' in schema '{}'", table.getName(),
-                        schema.getName());
+                logger.info("No column metadata records returned for table '{}' in schema '{}'", table.getName(), schema
+                        .getName());
             } else {
-                logger.debug("Returned {} column metadata records for table '{}' in schema '{}'", columnsReturned,
-                        table.getName(), schema.getName());
+                logger.debug("Returned {} column metadata records for table '{}' in schema '{}'", columnsReturned, table
+                        .getName(), schema.getName());
             }
         } catch (SQLException e) {
             throw JdbcUtils.wrapException(e, "retrieve table metadata for " + table.getName());
         }
     }
-    
+
     @Override
     public void loadRelations(JdbcSchema jdbcSchema) {
         final int identity = System.identityHashCode(jdbcSchema);
@@ -412,7 +421,8 @@ final class JdbcMetadataLoader implements MetadataLoader {
 
     private void loadRelations(Table table, DatabaseMetaData metaData) {
         Schema schema = table.getSchema();
-        try (ResultSet rs = metaData.getImportedKeys(getCatalogName(schema), getJdbcSchemaName(schema), table.getName())) {
+        try (ResultSet rs = metaData.getImportedKeys(getCatalogName(schema), getJdbcSchemaName(schema), table
+                .getName())) {
             loadRelations(rs, schema);
         } catch (SQLException e) {
             throw JdbcUtils.wrapException(e, "retrieve imported keys for " + table.getName());
@@ -420,7 +430,12 @@ final class JdbcMetadataLoader implements MetadataLoader {
     }
 
     private void loadRelations(ResultSet rs, Schema schema) throws SQLException {
+        // by using nested maps, we can associate a list of pk/fk columns with
+        // the tables they belong to
+        // the result set comes flattened out.
+        Map<Table, Map<Table, ColumnsTuple>> relations = new HashMap<>();
         while (rs.next()) {
+
             String pkTableName = rs.getString(3);
             String pkColumnName = rs.getString(4);
 
@@ -453,9 +468,42 @@ final class JdbcMetadataLoader implements MetadataLoader {
                 logger.error("pkColumn={}", pkColumn);
                 logger.error("fkColumn={}", fkColumn);
             } else {
-                MutableRelationship.createRelationship(new Column[] { pkColumn }, new Column[] { fkColumn });
+
+                if (!relations.containsKey(pkTable)) {
+                    relations.put(pkTable, new HashMap<>());
+                }
+
+                // get or init the columns tuple
+                ColumnsTuple ct = relations.get(pkTable).get(fkTable);
+                if (Objects.isNull(ct)) {
+                    ct = new ColumnsTuple();
+                    relations.get(pkTable).put(fkTable, ct);
+                }
+                // we can now safely add the columns
+                ct.getPkCols().add(pkColumn);
+                ct.getFkCols().add(fkColumn);
             }
         }
+
+        relations.values().stream().flatMap(map -> map.values().stream()).forEach(ct -> MutableRelationship
+                .createRelationship(ct.getPkCols().toArray(new Column[0]), ct.getFkCols().toArray(new Column[0])));
+    }
+
+    /**
+     * Represents the columns of a relationship while it is being built from a
+     * {@link ResultSet}.
+     */
+    private static class ColumnsTuple {
+        private final List<Column> pkCols = new ArrayList<>();
+        private final List<Column> fkCols = new ArrayList<>();
+
+        public List<Column> getFkCols() {
+            return fkCols;
+        }
+
+        public List<Column> getPkCols() {
+            return pkCols;
+        }
     }
 
 }

http://git-wip-us.apache.org/repos/asf/metamodel/blob/01835917/jdbc/src/test/java/org/apache/metamodel/jdbc/H2databaseTest.java
----------------------------------------------------------------------
diff --git a/jdbc/src/test/java/org/apache/metamodel/jdbc/H2databaseTest.java b/jdbc/src/test/java/org/apache/metamodel/jdbc/H2databaseTest.java
index 6e21ae5..d46bc31 100644
--- a/jdbc/src/test/java/org/apache/metamodel/jdbc/H2databaseTest.java
+++ b/jdbc/src/test/java/org/apache/metamodel/jdbc/H2databaseTest.java
@@ -22,6 +22,7 @@ import java.sql.Connection;
 import java.sql.DriverManager;
 import java.sql.PreparedStatement;
 import java.sql.SQLException;
+import java.sql.Statement;
 import java.util.Arrays;
 import java.util.List;
 import java.util.concurrent.TimeUnit;
@@ -41,6 +42,7 @@ import org.apache.metamodel.query.Query;
 import org.apache.metamodel.query.SelectItem;
 import org.apache.metamodel.schema.Column;
 import org.apache.metamodel.schema.ColumnType;
+import org.apache.metamodel.schema.Relationship;
 import org.apache.metamodel.schema.Schema;
 import org.apache.metamodel.schema.Table;
 import org.apache.metamodel.update.Update;
@@ -52,12 +54,12 @@ import junit.framework.TestCase;
  * Test case that tests interaction with the H2 embedded database
  */
 public class H2databaseTest extends TestCase {
-    
+
     public static final String DRIVER_CLASS = "org.h2.Driver";
     public static final String URL_MEMORY_DATABASE = "jdbc:h2:mem:";
 
-    private final String[] FIRST_NAMES = { "Suzy", "Barbara", "John", "Ken", "Billy", "Larry", "Joe", "Margareth", "Bobby",
-            "Elizabeth" };
+    private final String[] FIRST_NAMES = { "Suzy", "Barbara", "John", "Ken", "Billy", "Larry", "Joe", "Margareth",
+            "Bobby", "Elizabeth" };
     private final String[] LAST_NAMES = { "Doe", "Gates", "Jobs", "Ellison", "Trump" };
 
     private Connection conn;
@@ -74,7 +76,7 @@ public class H2databaseTest extends TestCase {
         super.tearDown();
         conn.close();
     }
-    
+
     public void testCreateInsertAndUpdate() throws Exception {
         JdbcDataContext dc = new JdbcDataContext(conn);
         JdbcTestTemplates.simpleCreateInsertUpdateAndDrop(dc, "metamodel_test_simple");
@@ -84,14 +86,14 @@ public class H2databaseTest extends TestCase {
         JdbcDataContext dc = new JdbcDataContext(conn);
         JdbcTestTemplates.compositeKeyCreation(dc, "metamodel_test_composite_keys");
     }
-    
+
     public void testTimestampValueInsertSelect() throws Exception {
         JdbcTestTemplates.timestampValueInsertSelect(conn, TimeUnit.NANOSECONDS);
     }
 
     public void testUsingSingleUpdates() throws Exception {
         final JdbcDataContext dc = new JdbcDataContext(conn);
-        
+
         final Schema schema = dc.getDefaultSchema();
         dc.executeUpdate(new CreateTable(schema, "test_table").withColumn("id").ofType(ColumnType.VARCHAR));
 
@@ -108,7 +110,6 @@ public class H2databaseTest extends TestCase {
         ds.close();
 
         dc.executeUpdate(new DeleteFrom(table).where("id").eq("bar"));
-        
 
         ds = dc.query().from(table).selectCount().execute();
         assertTrue(ds.next());
@@ -117,7 +118,7 @@ public class H2databaseTest extends TestCase {
         ds.close();
 
         dc.executeUpdate(new Update(table).where("id").eq("foo").value("id", "baz"));
-        
+
         ds = dc.query().from(table).selectAll().execute();
         assertTrue(ds.next());
         assertEquals("Row[values=[baz]]", ds.getRow().toString());
@@ -166,7 +167,8 @@ public class H2databaseTest extends TestCase {
 
         Query q = dc.query().from(table).selectCount().and(FunctionType.MAX, ageColumn).and(FunctionType.MIN, ageColumn)
                 .toQuery();
-        assertEquals("SELECT COUNT(*), MAX(\"TEST_TABLE\".\"AGE\"), MIN(\"TEST_TABLE\".\"AGE\") FROM PUBLIC.\"TEST_TABLE\"",
+        assertEquals(
+                "SELECT COUNT(*), MAX(\"TEST_TABLE\".\"AGE\"), MIN(\"TEST_TABLE\".\"AGE\") FROM PUBLIC.\"TEST_TABLE\"",
                 q.toSql());
 
         assertEquals(1, dc.getFetchSizeCalculator().getFetchSize(q));
@@ -182,9 +184,10 @@ public class H2databaseTest extends TestCase {
         int minAge = ((Number) row.getValue(2)).intValue();
         assertTrue("Minimum age was: " + minAge, minAge < 10 && minAge >= 0);
 
-        q = dc.query().from(table).as("t").select(ageColumn).selectCount().where(ageColumn).greaterThan(50).groupBy(ageColumn)
-                .toQuery();
-        assertEquals("SELECT t.\"AGE\", COUNT(*) FROM PUBLIC.\"TEST_TABLE\" t WHERE t.\"AGE\" > 50 GROUP BY t.\"AGE\"", q.toSql());
+        q = dc.query().from(table).as("t").select(ageColumn).selectCount().where(ageColumn).greaterThan(50).groupBy(
+                ageColumn).toQuery();
+        assertEquals("SELECT t.\"AGE\", COUNT(*) FROM PUBLIC.\"TEST_TABLE\" t WHERE t.\"AGE\" > 50 GROUP BY t.\"AGE\"",
+                q.toSql());
 
         ds = dc.executeQuery(q);
         List<Object[]> objectArrays = ds.toObjectArrays();
@@ -232,8 +235,10 @@ public class H2databaseTest extends TestCase {
 
             @Override
             public void run(UpdateCallback cb) {
-                JdbcCreateTableBuilder createTableBuilder = (JdbcCreateTableBuilder) cb.createTable(schema, "test_table");
-                Table writtenTable = createTableBuilder.withColumn("id").asPrimaryKey().ofType(ColumnType.INTEGER).execute();
+                JdbcCreateTableBuilder createTableBuilder = (JdbcCreateTableBuilder) cb.createTable(schema,
+                        "test_table");
+                Table writtenTable = createTableBuilder.withColumn("id").asPrimaryKey().ofType(ColumnType.INTEGER)
+                        .execute();
 
                 for (int i = 0; i < 10; i++) {
                     cb.insertInto(writtenTable).value("id", i + 1).execute();
@@ -276,12 +281,15 @@ public class H2databaseTest extends TestCase {
 
             @Override
             public void run(UpdateCallback cb) {
-                JdbcCreateTableBuilder createTableBuilder = (JdbcCreateTableBuilder) cb.createTable(schema, "test_table");
+                JdbcCreateTableBuilder createTableBuilder = (JdbcCreateTableBuilder) cb.createTable(schema,
+                        "test_table");
                 Table writtenTable = createTableBuilder.withColumn("id").asPrimaryKey().ofType(ColumnType.INTEGER)
-                        .withColumn("name").ofSize(255).ofType(ColumnType.VARCHAR).withColumn("age").ofType(ColumnType.INTEGER)
-                        .execute();
+                        .withColumn("name").ofSize(255).ofType(ColumnType.VARCHAR).withColumn("age").ofType(
+                                ColumnType.INTEGER).execute();
                 String sql = createTableBuilder.createSqlStatement();
-                assertEquals("CREATE TABLE PUBLIC.test_table (id INTEGER, name VARCHAR(255), age INTEGER, PRIMARY KEY(id))", sql);
+                assertEquals(
+                        "CREATE TABLE PUBLIC.test_table (id INTEGER, name VARCHAR(255), age INTEGER, PRIMARY KEY(id))",
+                        sql);
                 assertNotNull(writtenTable);
                 assertEquals("[ID, NAME, AGE]", Arrays.toString(writtenTable.getColumnNames()));
 
@@ -310,11 +318,13 @@ public class H2databaseTest extends TestCase {
             @Override
             public void run(UpdateCallback cb) {
                 cb.insertInto(writtenTableRef.get()).value("age", 14).value("name", "hello").value("id", 1).execute();
-                JdbcInsertBuilder insertBuilder = (JdbcInsertBuilder) cb.insertInto(writtenTableRef.get()).value("age", 15)
-                        .value("name", "wor'ld").value("id", 2);
-                assertEquals("INSERT INTO PUBLIC.\"TEST_TABLE\" (ID,NAME,AGE) VALUES (?,?,?)", insertBuilder.createSqlStatement());
+                JdbcInsertBuilder insertBuilder = (JdbcInsertBuilder) cb.insertInto(writtenTableRef.get()).value("age",
+                        15).value("name", "wor'ld").value("id", 2);
+                assertEquals("INSERT INTO PUBLIC.\"TEST_TABLE\" (ID,NAME,AGE) VALUES (?,?,?)", insertBuilder
+                        .createSqlStatement());
                 insertBuilder.execute();
-                cb.insertInto(writtenTableRef.get()).value("age", 16).value("name", "escobar!").value("id", 3).execute();
+                cb.insertInto(writtenTableRef.get()).value("age", 16).value("name", "escobar!").value("id", 3)
+                        .execute();
             }
         });
 
@@ -331,10 +341,10 @@ public class H2databaseTest extends TestCase {
         dc.executeUpdate(new UpdateScript() {
             @Override
             public void run(UpdateCallback callback) {
-                JdbcUpdateBuilder updateCallback = (JdbcUpdateBuilder) callback.update("test_table").value("age", 18).where("id")
-                        .greaterThan(1);
-                assertEquals("UPDATE PUBLIC.\"TEST_TABLE\" SET AGE=? WHERE \"TEST_TABLE\".\"ID\" > ?",
-                        updateCallback.createSqlStatement());
+                JdbcUpdateBuilder updateCallback = (JdbcUpdateBuilder) callback.update("test_table").value("age", 18)
+                        .where("id").greaterThan(1);
+                assertEquals("UPDATE PUBLIC.\"TEST_TABLE\" SET AGE=? WHERE \"TEST_TABLE\".\"ID\" > ?", updateCallback
+                        .createSqlStatement());
                 updateCallback.execute();
             }
         });
@@ -388,8 +398,8 @@ public class H2databaseTest extends TestCase {
         dc.executeUpdate(new UpdateScript() {
             @Override
             public void run(UpdateCallback callback) {
-                Table table = callback.createTable(dc.getDefaultSchema(), "test_table").withColumn("foo")
-                        .ofType(ColumnType.INTEGER).withColumn("bar").ofType(ColumnType.VARCHAR).execute();
+                Table table = callback.createTable(dc.getDefaultSchema(), "test_table").withColumn("foo").ofType(
+                        ColumnType.INTEGER).withColumn("bar").ofType(ColumnType.VARCHAR).execute();
                 callback.insertInto(table).value("foo", 1).value("bar", "hello").execute();
                 callback.insertInto(table).value("foo", 2).value("bar", "there").execute();
                 callback.insertInto(table).value("foo", 3).value("bar", "world").execute();
@@ -400,8 +410,8 @@ public class H2databaseTest extends TestCase {
         Query query = new Query().from(table, "a").from(table, "b");
         query.select(table.getColumnByName("foo"), query.getFromClause().getItem(0));
         query.select(table.getColumnByName("foo"), query.getFromClause().getItem(1));
-        query.where(new SelectItem(table.getColumnByName("bar"), query.getFromClause().getItem(0)), OperatorType.EQUALS_TO,
-                "hello");
+        query.where(new SelectItem(table.getColumnByName("bar"), query.getFromClause().getItem(0)),
+                OperatorType.EQUALS_TO, "hello");
 
         assertEquals(
                 "SELECT a.\"FOO\", b.\"FOO\" FROM PUBLIC.\"TEST_TABLE\" a, PUBLIC.\"TEST_TABLE\" b WHERE a.\"BAR\" = 'hello'",
@@ -440,8 +450,8 @@ public class H2databaseTest extends TestCase {
         dc.executeUpdate(new UpdateScript() {
             @Override
             public void run(UpdateCallback callback) {
-                Table table = callback.createTable(dc.getDefaultSchema(), "test_table").withColumn("foo")
-                        .ofType(ColumnType.INTEGER).withColumn("bar").ofType(ColumnType.VARCHAR).execute();
+                Table table = callback.createTable(dc.getDefaultSchema(), "test_table").withColumn("foo").ofType(
+                        ColumnType.INTEGER).withColumn("bar").ofType(ColumnType.VARCHAR).execute();
                 callback.insertInto(table).value("foo", 1).value("bar", "hello").execute();
                 callback.insertInto(table).value("foo", 2).value("bar", "there").execute();
                 callback.insertInto(table).value("foo", 3).value("bar", "world").execute();
@@ -517,8 +527,36 @@ public class H2databaseTest extends TestCase {
     public void testCharOfSizeOne() throws Exception {
         JdbcTestTemplates.meaningOfOneSizeChar(conn);
     }
-    
+
     public void testInterpretationOfNull() throws Exception {
         JdbcTestTemplates.interpretationOfNulls(conn);
     }
+
+    public void testCompositeFkRelation() throws Exception {
+
+        try (Statement stmt = conn.createStatement()) {
+            stmt.execute(
+                    "CREATE TABLE PARENT (P1 INTEGER, P2 INTEGER, P3 INTEGER, P4 INTEGER, PRIMARY  KEY (P1,P2, P3, P4))");
+            stmt.execute(
+                    "CREATE TABLE CHILD (C1 INTEGER PRIMARY KEY, CP1 INTEGER , CP2 INTEGER, CP3 INTEGER, CP4 INTEGER, FOREIGN  KEY (CP1,CP2,CP3,CP4) REFERENCES  PARENT(P1,P2,P3,P4))");
+        }
+
+        final JdbcDataContext dc = new JdbcDataContext(conn);
+
+        final Schema schema = dc.getDefaultSchema();
+
+        assertEquals(1, schema.getRelationships().length);
+
+        Relationship rel = schema.getRelationships()[0];
+
+        assertEquals("CP1", rel.getForeignColumns()[0].getName());
+        assertEquals("CP2", rel.getForeignColumns()[1].getName());
+        assertEquals("CP3", rel.getForeignColumns()[2].getName());
+        assertEquals("CP4", rel.getForeignColumns()[3].getName());
+
+        assertEquals("P1", rel.getPrimaryColumns()[0].getName());
+        assertEquals("P2", rel.getPrimaryColumns()[1].getName());
+        assertEquals("P3", rel.getPrimaryColumns()[2].getName());
+        assertEquals("P4", rel.getPrimaryColumns()[3].getName());
+    }
 }
\ No newline at end of file