You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@cayenne.apache.org by aa...@apache.org on 2016/09/29 17:38:58 UTC

[12/15] cayenne git commit: CAY-2116 Split schema synchronization code in a separate module

http://git-wip-us.apache.org/repos/asf/cayenne/blob/2f7b1d53/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/DbLoader.java
----------------------------------------------------------------------
diff --git a/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/DbLoader.java b/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/DbLoader.java
new file mode 100644
index 0000000..a6f286a
--- /dev/null
+++ b/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/DbLoader.java
@@ -0,0 +1,829 @@
+/*****************************************************************
+ *   Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ ****************************************************************/
+package org.apache.cayenne.dbsync.reverse;
+
+import org.apache.cayenne.dba.DbAdapter;
+import org.apache.cayenne.dba.TypesMapping;
+import org.apache.cayenne.dbsync.reverse.filters.CatalogFilter;
+import org.apache.cayenne.dbsync.reverse.filters.FiltersConfig;
+import org.apache.cayenne.dbsync.reverse.filters.PatternFilter;
+import org.apache.cayenne.dbsync.reverse.filters.SchemaFilter;
+import org.apache.cayenne.dbsync.reverse.filters.TableFilter;
+import org.apache.cayenne.map.DataMap;
+import org.apache.cayenne.map.DbAttribute;
+import org.apache.cayenne.map.DbEntity;
+import org.apache.cayenne.map.DbJoin;
+import org.apache.cayenne.map.DbRelationship;
+import org.apache.cayenne.map.DbRelationshipDetected;
+import org.apache.cayenne.map.ObjEntity;
+import org.apache.cayenne.map.Procedure;
+import org.apache.cayenne.map.ProcedureParameter;
+import org.apache.cayenne.map.naming.DefaultUniqueNameGenerator;
+import org.apache.cayenne.map.naming.ExportedKey;
+import org.apache.cayenne.map.naming.LegacyNameGenerator;
+import org.apache.cayenne.map.naming.NameCheckers;
+import org.apache.cayenne.map.naming.ObjectNameGenerator;
+import org.apache.cayenne.dbsync.merge.EntityMergeSupport;
+import org.apache.cayenne.util.EqualsBuilder;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+import java.sql.Connection;
+import java.sql.DatabaseMetaData;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeSet;
+
+/**
+ * Performs reverse engineering of the database. It can create
+ * DataMaps using database meta data obtained via JDBC driver.
+ *
+ * @since 4.0
+ */
+public class DbLoader {
+
+	private static final Log LOGGER = LogFactory.getLog(DbLoader.class);
+
+	public static final String WILDCARD = "%";
+	public static final String WILDCARD_PATTERN = ".*";
+
+	private final Connection connection;
+	private final DbAdapter adapter;
+	private final DbLoaderDelegate delegate;
+
+	private boolean creatingMeaningfulPK;
+
+	private DatabaseMetaData metaData;
+
+	/**
+	 * Strategy for choosing names for entities, attributes and relationships
+	 */
+	private ObjectNameGenerator nameGenerator;
+
+	/**
+	 * Creates new DbLoader.
+	 */
+	public DbLoader(Connection connection, DbAdapter adapter, DbLoaderDelegate delegate) {
+		this(connection, adapter, delegate, new LegacyNameGenerator());
+	}
+
+	/**
+	 * Creates new DbLoader with specified naming strategy.
+	 *
+	 * @since 3.0
+	 */
+	public DbLoader(Connection connection, DbAdapter adapter, DbLoaderDelegate delegate, ObjectNameGenerator strategy) {
+		this.adapter = adapter;
+		this.connection = connection;
+		this.delegate = delegate == null ? new DefaultDbLoaderDelegate() : delegate;
+
+		setNameGenerator(strategy);
+	}
+
+	/**
+	 * Returns DatabaseMetaData object associated with this DbLoader.
+	 */
+	private DatabaseMetaData getMetaData() throws SQLException {
+		if (metaData == null) {
+			metaData = connection.getMetaData();
+		}
+		return metaData;
+	}
+
+	/**
+	 * Check if database support schemas.
+	 */
+	protected boolean supportSchemas() throws SQLException {
+		if (metaData == null) {
+			metaData = connection.getMetaData();
+		}
+		return metaData.supportsSchemasInTableDefinitions();
+	}
+
+	/**
+	 * Check if database support catalogs.
+	 */
+	protected boolean supportCatalogs() throws SQLException {
+		if (metaData == null) {
+			metaData = connection.getMetaData();
+		}
+		return metaData.supportsCatalogsInTableDefinitions();
+	}
+
+	/**
+	 * @since 3.0
+	 */
+	public void setCreatingMeaningfulPK(boolean creatingMeaningfulPK) {
+		this.creatingMeaningfulPK = creatingMeaningfulPK;
+	}
+
+	/**
+	 * Returns true if the generator should map all primary key columns as
+	 * ObjAttributes.
+	 *
+	 * @since 3.0
+	 */
+	public boolean isCreatingMeaningfulPK() {
+		return creatingMeaningfulPK;
+	}
+
+	/**
+	 * Returns database connection used by this DbLoader.
+	 *
+	 * @since 3.0
+	 */
+	public Connection getConnection() {
+		return connection;
+	}
+
+	/**
+	 * Returns DbAdapter associated with this DbLoader.
+	 *
+	 * @since 1.1
+	 */
+	public DbAdapter getAdapter() {
+		return adapter;
+	}
+
+	/**
+	 * Retrieves catalogs for the database associated with this DbLoader.
+	 *
+	 * @return List with the catalog names, empty Array if none found.
+	 */
+	public List<String> getCatalogs() throws SQLException {
+		try (ResultSet rs = getMetaData().getCatalogs()) {
+			return getStrings(rs);
+		}
+	}
+
+	/**
+	 * Retrieves the schemas for the database.
+	 *
+	 * @return List with the schema names, empty Array if none found.
+	 */
+	public List<String> getSchemas() throws SQLException {
+
+		try (ResultSet rs = getMetaData().getSchemas()) {
+			return getStrings(rs);
+		}
+	}
+
+	private static List<String> getStrings(ResultSet rs) throws SQLException {
+		List<String> strings = new ArrayList<String>();
+
+		while (rs.next()) {
+			strings.add(rs.getString(1));
+		}
+
+		return strings;
+	}
+
+	/**
+	 * Returns all the table types for the given database. Types may be such as
+	 * Typical types are "TABLE", "VIEW", "SYSTEM TABLE", "GLOBAL TEMPORARY",
+	 * "LOCAL TEMPORARY", "ALIAS", "SYNONYM"., etc.
+	 *
+	 * @return List of Strings, empty array if nothing found.
+	 */
+	public List<String> getTableTypes() throws SQLException {
+		List<String> types = new ArrayList<String>();
+
+		try (ResultSet rs = getMetaData().getTableTypes();) {
+			while (rs.next()) {
+				types.add(rs.getString("TABLE_TYPE").trim());
+			}
+		}
+
+		return types;
+	}
+
+	/**
+	 * Creates an ObjEntity for each DbEntity in the map.
+	 */
+	public Collection<ObjEntity> loadObjEntities(DataMap map, DbLoaderConfiguration config,
+			Collection<DbEntity> entities) {
+		Collection<ObjEntity> loadedEntities = DbLoader.loadObjEntities(map, config, entities, nameGenerator);
+
+		createEntityMerger(map).synchronizeWithDbEntities(loadedEntities);
+
+		return loadedEntities;
+	}
+
+	public static Collection<ObjEntity> loadObjEntities(DataMap map, DbLoaderConfiguration config,
+			Collection<DbEntity> entities, ObjectNameGenerator nameGenerator) {
+		if (entities.isEmpty()) {
+			return Collections.emptyList();
+		}
+
+		Collection<ObjEntity> loadedEntities = new ArrayList<ObjEntity>(entities.size());
+
+		// doLoad empty ObjEntities for all the tables
+		for (DbEntity dbEntity : entities) {
+
+			// check if there are existing entities
+
+			// TODO: performance. This is an O(n^2) search and it shows on
+			// YourKit profiles. Pre-cache mapped entities perhaps (?)
+			Collection<ObjEntity> existing = map.getMappedEntities(dbEntity);
+			if (!existing.isEmpty()) {
+				loadedEntities.addAll(existing);
+				continue;
+			}
+
+			String objEntityName = DefaultUniqueNameGenerator.generate(NameCheckers.objEntity, map,
+					nameGenerator.createObjEntityName(dbEntity));
+
+			ObjEntity objEntity = new ObjEntity(objEntityName);
+			objEntity.setDbEntity(dbEntity);
+			objEntity.setClassName(config.getGenericClassName() != null ? config.getGenericClassName() : map
+					.getNameWithDefaultPackage(objEntity.getName()));
+
+			map.addObjEntity(objEntity);
+			loadedEntities.add(objEntity);
+		}
+
+		return loadedEntities;
+	}
+
+	/**
+	 * @since 4.0
+	 */
+	protected EntityMergeSupport createEntityMerger(DataMap map) {
+		return new EntityMergeSupport(map, nameGenerator, !creatingMeaningfulPK);
+	}
+
+	protected void loadDbRelationships(DbLoaderConfiguration config, String catalog, String schema,
+			List<DbEntity> tables) throws SQLException {
+		if (config.isSkipRelationshipsLoading()) {
+			return;
+		}
+
+		// Get all the foreign keys referencing this table
+		Map<String, DbEntity> tablesMap = new HashMap<>();
+		for (DbEntity table : tables) {
+			tablesMap.put(table.getName(), table);
+		}
+
+		Map<String, Set<ExportedKey>> keys = loadExportedKeys(config, catalog, schema, tablesMap);
+		for (Map.Entry<String, Set<ExportedKey>> entry : keys.entrySet()) {
+			if (LOGGER.isDebugEnabled()) {
+				LOGGER.debug("Process keys for: " + entry.getKey());
+			}
+
+			Set<ExportedKey> exportedKeys = entry.getValue();
+			ExportedKey key = exportedKeys.iterator().next();
+			if (key == null) {
+				throw new IllegalStateException();
+			}
+
+			DbEntity pkEntity = tablesMap.get(key.getPKTableName());
+			if (pkEntity == null) {
+				skipRelationLog(key, key.getPKTableName());
+				continue;
+			}
+
+			DbEntity fkEntity = tablesMap.get(key.getFKTableName());
+			if (fkEntity == null) {
+				skipRelationLog(key, key.getFKTableName());
+				continue;
+			}
+
+			if (!new EqualsBuilder().append(pkEntity.getCatalog(), key.pkCatalog)
+					.append(pkEntity.getSchema(), key.pkSchema).append(fkEntity.getCatalog(), key.fkCatalog)
+					.append(fkEntity.getSchema(), key.fkSchema).isEquals()) {
+
+				LOGGER.info("Skip relation: '" + key + "' because it related to objects from other catalog/schema");
+				LOGGER.info("     relation primary key: '" + key.pkCatalog + "." + key.pkSchema + "'");
+				LOGGER.info("       primary key entity: '" + pkEntity.getCatalog() + "." + pkEntity.getSchema() + "'");
+				LOGGER.info("     relation foreign key: '" + key.fkCatalog + "." + key.fkSchema + "'");
+				LOGGER.info("       foreign key entity: '" + fkEntity.getCatalog() + "." + fkEntity.getSchema() + "'");
+				continue;
+			}
+
+			// forwardRelationship is a reference from table with primary key
+			DbRelationship forwardRelationship = new DbRelationship(generateName(pkEntity, key, true));
+			forwardRelationship.setSourceEntity(pkEntity);
+			forwardRelationship.setTargetEntityName(fkEntity);
+
+			// forwardRelationship is a reference from table with foreign key,
+			// it is what exactly we load from db
+			DbRelationshipDetected reverseRelationship = new DbRelationshipDetected(generateName(fkEntity, key, false));
+			reverseRelationship.setFkName(key.getFKName());
+			reverseRelationship.setSourceEntity(fkEntity);
+			reverseRelationship.setTargetEntityName(pkEntity);
+			reverseRelationship.setToMany(false);
+
+			createAndAppendJoins(exportedKeys, pkEntity, fkEntity, forwardRelationship, reverseRelationship);
+
+			boolean toDependentPK = isToDependentPK(forwardRelationship);
+			forwardRelationship.setToDependentPK(toDependentPK);
+
+			boolean isOneToOne = toDependentPK
+					&& fkEntity.getPrimaryKeys().size() == forwardRelationship.getJoins().size();
+
+			forwardRelationship.setToMany(!isOneToOne);
+			forwardRelationship.setName(generateName(pkEntity, key, !isOneToOne));
+
+			if (delegate.dbRelationshipLoaded(fkEntity, reverseRelationship)) {
+				fkEntity.addRelationship(reverseRelationship);
+			}
+			if (delegate.dbRelationshipLoaded(pkEntity, forwardRelationship)) {
+				pkEntity.addRelationship(forwardRelationship);
+			}
+		}
+	}
+
+	private boolean isToDependentPK(DbRelationship forwardRelationship) {
+		for (DbJoin dbJoin : forwardRelationship.getJoins()) {
+			if (!dbJoin.getTarget().isPrimaryKey()) {
+				return false;
+			}
+		}
+
+		return true;
+	}
+
+	private void createAndAppendJoins(Set<ExportedKey> exportedKeys, DbEntity pkEntity, DbEntity fkEntity,
+			DbRelationship forwardRelationship, DbRelationshipDetected reverseRelationship) {
+		for (ExportedKey exportedKey : exportedKeys) {
+			// Create and append joins
+			String pkName = exportedKey.getPKColumnName();
+			String fkName = exportedKey.getFKColumnName();
+
+			// skip invalid joins...
+			DbAttribute pkAtt = pkEntity.getAttribute(pkName);
+			if (pkAtt == null) {
+				LOGGER.info("no attribute for declared primary key: " + pkName);
+				continue;
+			}
+
+			DbAttribute fkAtt = fkEntity.getAttribute(fkName);
+			if (fkAtt == null) {
+				LOGGER.info("no attribute for declared foreign key: " + fkName);
+				continue;
+			}
+
+			forwardRelationship.addJoin(new DbJoin(forwardRelationship, pkName, fkName));
+			reverseRelationship.addJoin(new DbJoin(reverseRelationship, fkName, pkName));
+		}
+	}
+
+	private Map<String, Set<ExportedKey>> loadExportedKeys(DbLoaderConfiguration config, String catalog, String schema,
+			Map<String, DbEntity> tables) throws SQLException {
+		Map<String, Set<ExportedKey>> keys = new HashMap<>();
+
+		for (DbEntity dbEntity : tables.values()) {
+			if (!delegate.dbRelationship(dbEntity)) {
+				continue;
+			}
+
+			ResultSet rs;
+			try {
+				rs = getMetaData().getExportedKeys(catalog, schema, dbEntity.getName());
+			} catch (SQLException cay182Ex) {
+				// Sybase-specific - the line above blows on VIEWS, see CAY-182.
+				LOGGER.info(
+						"Error getting relationships for '" + catalog + "." + schema + "', ignoring. "
+								+ cay182Ex.getMessage(), cay182Ex);
+				return new HashMap<>();
+			}
+
+			try {
+				while (rs.next()) {
+					ExportedKey key = ExportedKey.extractData(rs);
+
+					DbEntity fkEntity = tables.get(key.getFKTableName());
+					if (fkEntity == null) {
+						skipRelationLog(key, key.getFKTableName());
+						continue;
+					}
+
+					if (config.getFiltersConfig().tableFilter(fkEntity.getCatalog(), fkEntity.getSchema())
+							.isIncludeTable(fkEntity.getName()) == null) {
+						continue;
+					}
+
+					Set<ExportedKey> exportedKeys = keys.get(key.getStrKey());
+					if (exportedKeys == null) {
+						exportedKeys = new TreeSet<ExportedKey>();
+
+						keys.put(key.getStrKey(), exportedKeys);
+					}
+					exportedKeys.add(key);
+				}
+
+			} finally {
+				rs.close();
+			}
+		}
+		return keys;
+	}
+
+	private void skipRelationLog(ExportedKey key, String tableName) {
+		// if (LOGGER.isDebugEnabled()) {
+		LOGGER.info("Skip relation: '" + key + "' because table '" + tableName + "' not found");
+		// }
+	}
+
+	private String generateName(DbEntity entity, ExportedKey key, boolean toMany) {
+		String forwardPreferredName = nameGenerator.createDbRelationshipName(key, toMany);
+		return DefaultUniqueNameGenerator.generate(NameCheckers.dbRelationship, entity, forwardPreferredName);
+	}
+
+	/**
+	 * Flattens many-to-many relationships in the generated model.
+	 */
+	public static void flattenManyToManyRelationships(DataMap map, Collection<ObjEntity> loadedObjEntities,
+			ObjectNameGenerator objectNameGenerator) {
+		if (loadedObjEntities.isEmpty()) {
+			return;
+		}
+		Collection<ObjEntity> entitiesForDelete = new LinkedList<ObjEntity>();
+
+		for (ObjEntity curEntity : loadedObjEntities) {
+			ManyToManyCandidateEntity entity = ManyToManyCandidateEntity.build(curEntity);
+
+			if (entity != null) {
+				entity.optimizeRelationships(objectNameGenerator);
+				entitiesForDelete.add(curEntity);
+			}
+		}
+
+		// remove needed entities
+		for (ObjEntity curDeleteEntity : entitiesForDelete) {
+			map.removeObjEntity(curDeleteEntity.getName(), true);
+		}
+		loadedObjEntities.removeAll(entitiesForDelete);
+	}
+
+	private void fireObjEntitiesAddedEvents(Collection<ObjEntity> loadedObjEntities) {
+		for (ObjEntity curEntity : loadedObjEntities) {
+			// notify delegate
+			if (delegate != null) {
+				delegate.objEntityAdded(curEntity);
+			}
+		}
+	}
+
+	/**
+	 * By default we want to load Tables and Views for mo types
+	 *
+	 * @see DbLoader#getTableTypes()
+	 * @since 4.0
+	 */
+	public String[] getDefaultTableTypes() {
+		List<String> list = new ArrayList<String>(2);
+
+		String viewType = adapter.tableTypeForView();
+		if (viewType != null) {
+			list.add(viewType);
+		}
+
+		String tableType = adapter.tableTypeForTable();
+		if (tableType != null) {
+			list.add(tableType);
+		}
+
+		return list.toArray(new String[list.size()]);
+	}
+
+	/**
+	 * Performs database reverse engineering and generates DataMap that contains
+	 * default mapping of the tables and views. By default will include regular
+	 * tables and views.
+	 *
+	 * @since 1.0.7
+	 * @deprecated since 4.0 use
+	 *             {@link #load(DataMap, DbLoaderConfiguration)}
+	 *             method that supports catalogs.
+	 */
+	@Deprecated
+	public DataMap loadDataMapFromDB(String schemaPattern, String tablePattern, DataMap dataMap) throws SQLException {
+
+		DbLoaderConfiguration configuration = new DbLoaderConfiguration();
+		configuration.setFiltersConfig(FiltersConfig.create(null, schemaPattern, TableFilter.include(tablePattern),
+				PatternFilter.INCLUDE_NOTHING));
+
+		load(dataMap, configuration);
+		return dataMap;
+	}
+
+	/**
+	 * Performs database reverse engineering and generates DataMap object that
+	 * contains default mapping of the tables and views. Allows to limit types
+	 * of tables to read.
+	 *
+	 * @deprecated since 4.0 use
+	 *             {@link #load(DataMap, DbLoaderConfiguration)}
+	 *             method that supports catalogs.
+	 */
+	@Deprecated
+	public DataMap loadDataMapFromDB(String schemaPattern, String tablePattern, String[] tableTypes, DataMap dataMap)
+			throws SQLException {
+		dataMap.clear();
+
+		DbLoaderConfiguration config = new DbLoaderConfiguration();
+		config.setFiltersConfig(FiltersConfig.create(null, schemaPattern, TableFilter.include(tablePattern),
+				PatternFilter.INCLUDE_NOTHING));
+		config.setTableTypes(tableTypes);
+
+		load(dataMap, config);
+		return dataMap;
+	}
+
+	/**
+	 * Performs database reverse engineering based on the specified config and
+	 * fills the specified DataMap object with DB and object mapping info.
+	 *
+	 * @since 4.0
+	 */
+	public void load(DataMap dataMap, DbLoaderConfiguration config) throws SQLException {
+		LOGGER.info("Schema loading...");
+
+		String[] types = config.getTableTypes();
+		if (types == null || types.length == 0) {
+			types = getDefaultTableTypes();
+		}
+
+		for (CatalogFilter catalog : config.getFiltersConfig().catalogs) {
+			for (SchemaFilter schema : catalog.schemas) {
+
+				List<DbEntity> entities = createTableLoader(catalog.name, schema.name, schema.tables).loadDbEntities(
+						dataMap, config, types);
+
+				if (entities != null) {
+					loadDbRelationships(config, catalog.name, schema.name, entities);
+
+					prepareObjLayer(dataMap, config, entities);
+				}
+			}
+		}
+	}
+
+	protected DbTableLoader createTableLoader(String catalog, String schema, TableFilter filter) throws SQLException {
+		return new DbTableLoader(catalog, schema, getMetaData(), delegate, new DbAttributesPerSchemaLoader(catalog,
+				schema, getMetaData(), adapter, filter));
+	}
+
+	public void prepareObjLayer(DataMap dataMap, DbLoaderConfiguration config, Collection<DbEntity> entities) {
+		Collection<ObjEntity> loadedObjEntities = loadObjEntities(dataMap, config, entities);
+		flattenManyToManyRelationships(dataMap, loadedObjEntities, getNameGenerator());
+		fireObjEntitiesAddedEvents(loadedObjEntities);
+	}
+
+	/**
+	 * Performs database reverse engineering to match the specified catalog,
+	 * schema, table name and table type patterns and fills the specified
+	 * DataMap object with DB and object mapping info.
+	 *
+	 * @since 4.0
+	 */
+	public DataMap load(DbLoaderConfiguration config) throws SQLException {
+
+		DataMap dataMap = new DataMap();
+		load(dataMap, config);
+		loadProcedures(dataMap, config);
+
+		return dataMap;
+	}
+
+	/**
+	 * Loads database stored procedures into the DataMap.
+	 * <p>
+	 * <i>As of 1.1 there is no boolean property or delegate method to make
+	 * procedure loading optional or to implement custom merging logic, so
+	 * currently this method is NOT CALLED from "loadDataMapFromDB" and should
+	 * be invoked explicitly by the user. </i>
+	 * </p>
+	 *
+	 * @since 1.1
+	 * @deprecated since 4.0 use loadProcedures(DataMap, String, String, String)
+	 *             that supports "catalog" pattern.
+	 */
+	@Deprecated
+	public void loadProceduresFromDB(String schemaPattern, String namePattern, DataMap dataMap) throws SQLException {
+		DbLoaderConfiguration configuration = new DbLoaderConfiguration();
+		configuration.setFiltersConfig(FiltersConfig.create(null, schemaPattern, TableFilter.everything(),
+				new PatternFilter().include(namePattern)));
+
+		loadProcedures(dataMap, configuration);
+	}
+
+	/**
+	 * Loads database stored procedures into the DataMap.
+	 * <p>
+	 * <i>As of 1.1 there is no boolean property or delegate method to make
+	 * procedure loading optional or to implement custom merging logic, so
+	 * currently this method is NOT CALLED from "loadDataMapFromDB" and should
+	 * be invoked explicitly by the user. </i>
+	 * </p>
+	 *
+	 * @since 4.0
+	 */
+	public Map<String, Procedure> loadProcedures(DataMap dataMap, DbLoaderConfiguration config) throws SQLException {
+
+		Map<String, Procedure> procedures = loadProcedures(config);
+		if (procedures.isEmpty()) {
+			return procedures;
+		}
+
+		loadProceduresColumns(config, procedures);
+
+		for (Procedure procedure : procedures.values()) {
+			dataMap.addProcedure(procedure);
+		}
+
+		return procedures;
+	}
+
+	private void loadProceduresColumns(DbLoaderConfiguration config, Map<String, Procedure> procedures)
+			throws SQLException {
+
+		for (CatalogFilter catalog : config.getFiltersConfig().catalogs) {
+			for (SchemaFilter schema : catalog.schemas) {
+				loadProceduresColumns(procedures, catalog.name, schema.name);
+			}
+		}
+	}
+
+	private void loadProceduresColumns(Map<String, Procedure> procedures, String catalog, String schema)
+			throws SQLException {
+
+		try (ResultSet columnsRS = getMetaData().getProcedureColumns(catalog, schema, null, null);) {
+			while (columnsRS.next()) {
+
+				String s = columnsRS.getString("PROCEDURE_SCHEM");
+				String name = columnsRS.getString("PROCEDURE_NAME");
+				String key = (s == null ? "" : s + '.') + name;
+				Procedure procedure = procedures.get(key);
+				if (procedure == null) {
+					continue;
+				}
+
+				ProcedureParameter column = loadProcedureParams(columnsRS, key, procedure);
+				if (column == null) {
+					continue;
+				}
+				procedure.addCallParameter(column);
+			}
+		}
+	}
+
+	private ProcedureParameter loadProcedureParams(ResultSet columnsRS, String key, Procedure procedure)
+			throws SQLException {
+		String columnName = columnsRS.getString("COLUMN_NAME");
+
+		// skip ResultSet columns, as they are not described in Cayenne
+		// procedures yet...
+		short type = columnsRS.getShort("COLUMN_TYPE");
+		if (type == DatabaseMetaData.procedureColumnResult) {
+			LOGGER.debug("skipping ResultSet column: " + key + "." + columnName);
+		}
+
+		if (columnName == null) {
+			if (type == DatabaseMetaData.procedureColumnReturn) {
+				LOGGER.debug("null column name, assuming result column: " + key);
+				columnName = "_return_value";
+				procedure.setReturningValue(true);
+			} else {
+				LOGGER.info("invalid null column name, skipping column : " + key);
+				return null;
+			}
+		}
+
+		int columnType = columnsRS.getInt("DATA_TYPE");
+
+		// ignore precision of non-decimal columns
+		int decimalDigits = -1;
+		if (TypesMapping.isDecimal(columnType)) {
+			decimalDigits = columnsRS.getShort("SCALE");
+			if (columnsRS.wasNull()) {
+				decimalDigits = -1;
+			}
+		}
+
+		ProcedureParameter column = new ProcedureParameter(columnName);
+		int direction = getDirection(type);
+		if (direction != -1) {
+			column.setDirection(direction);
+		}
+
+		column.setType(columnType);
+		column.setMaxLength(columnsRS.getInt("LENGTH"));
+		column.setPrecision(decimalDigits);
+
+		column.setProcedure(procedure);
+		return column;
+	}
+
+	private static int getDirection(short type) {
+		switch (type) {
+		case DatabaseMetaData.procedureColumnIn:
+			return ProcedureParameter.IN_PARAMETER;
+		case DatabaseMetaData.procedureColumnInOut:
+			return ProcedureParameter.IN_OUT_PARAMETER;
+		case DatabaseMetaData.procedureColumnOut:
+			return ProcedureParameter.OUT_PARAMETER;
+		default:
+			return -1;
+		}
+	}
+
+	private Map<String, Procedure> loadProcedures(DbLoaderConfiguration config) throws SQLException {
+		Map<String, Procedure> procedures = new HashMap<>();
+
+		FiltersConfig filters = config.getFiltersConfig();
+		for (CatalogFilter catalog : filters.catalogs) {
+			for (SchemaFilter schema : catalog.schemas) {
+				if (filters.proceduresFilter(catalog.name, schema.name).isEmpty()) {
+					continue;
+				}
+
+				procedures.putAll(loadProcedures(filters, catalog.name, schema.name));
+			}
+		}
+
+		return procedures;
+	}
+
+	private Map<String, Procedure> loadProcedures(FiltersConfig filters, String catalog, String schema)
+			throws SQLException {
+		Map<String, Procedure> procedures = new HashMap<>();
+		// get procedures
+
+		try (ResultSet rs = getMetaData().getProcedures(catalog, schema, WILDCARD);) {
+			while (rs.next()) {
+
+				String name = rs.getString("PROCEDURE_NAME");
+				Procedure procedure = new Procedure(name);
+				procedure.setCatalog(rs.getString("PROCEDURE_CAT"));
+				procedure.setSchema(rs.getString("PROCEDURE_SCHEM"));
+
+				if (!filters.proceduresFilter(procedure.getCatalog(), procedure.getSchema()).isInclude(
+						procedure.getName())) {
+					LOGGER.info("skipping Cayenne PK procedure: " + name);
+					continue;
+				}
+
+				switch (rs.getShort("PROCEDURE_TYPE")) {
+				case DatabaseMetaData.procedureNoResult:
+				case DatabaseMetaData.procedureResultUnknown:
+					procedure.setReturningValue(false);
+					break;
+				case DatabaseMetaData.procedureReturnsResult:
+					procedure.setReturningValue(true);
+					break;
+				}
+
+				procedures.put(procedure.getFullyQualifiedName(), procedure);
+			}
+		}
+		return procedures;
+	}
+
+	/**
+	 * Sets new naming strategy for reverse engineering
+	 *
+	 * @since 3.0
+	 */
+	public void setNameGenerator(ObjectNameGenerator strategy) {
+		if (strategy == null) {
+			LOGGER.warn("Attempt to set null into NameGenerator. LegacyNameGenerator will be used.");
+			this.nameGenerator = new LegacyNameGenerator();
+		} else {
+			this.nameGenerator = strategy;
+		}
+	}
+
+	/**
+	 * @return naming strategy for reverse engineering
+	 * @since 3.0
+	 */
+	public ObjectNameGenerator getNameGenerator() {
+		return nameGenerator;
+	}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/cayenne/blob/2f7b1d53/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/DbLoaderConfiguration.java
----------------------------------------------------------------------
diff --git a/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/DbLoaderConfiguration.java b/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/DbLoaderConfiguration.java
new file mode 100644
index 0000000..e5421fe
--- /dev/null
+++ b/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/DbLoaderConfiguration.java
@@ -0,0 +1,150 @@
+/*****************************************************************
+ *   Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ ****************************************************************/
+package org.apache.cayenne.dbsync.reverse;
+
+import org.apache.cayenne.dbsync.reverse.filters.TableFilter;
+import org.apache.cayenne.dbsync.reverse.filters.FiltersConfig;
+import org.apache.cayenne.dbsync.reverse.filters.PatternFilter;
+
+/**
+ * @since 4.0
+ */
+public class DbLoaderConfiguration {
+
+    /**
+     * Returns a name of a generic class that should be used for all
+     * ObjEntities. The most common generic class is
+     * {@link org.apache.cayenne.CayenneDataObject}. If generic class name is
+     * null (which is the default), DbLoader will assign each entity a unique
+     * class name derived from the table name.
+     *
+     */
+    private String genericClassName;
+
+/*
+    // TODO: Andrus, 10/29/2005 - this type of filtering should be delegated to adapter
+       TODO by default should skip name.startsWith("BIN$")
+
+    private NameFilter tableFilter = NamePatternMatcher.build(null, null, "BIN$");
+
+    private NameFilter columnFilter;
+
+    private NameFilter proceduresFilter = new NameFilter() {
+        private final Collection<String> excludedProcedures = Arrays.asList(
+                "auto_pk_for_table",
+                "auto_pk_for_table;1" // the last name is some Mac OS X Sybase artifact
+        );
+
+        @Override
+        public boolean isIncluded(String string) {
+            return !excludedProcedures.contains(string);
+        }
+    };
+*/
+
+
+    /**
+     * Java class implementing org.apache.cayenne.map.naming.NamingStrategy.
+     * This is used to specify how ObjEntities will be mapped from the imported
+     * DB schema.
+     */
+    private String namingStrategy;
+
+    private Boolean skipRelationshipsLoading;
+
+    private Boolean skipPrimaryKeyLoading;
+
+    private String[] tableTypes;
+
+    private FiltersConfig filtersConfig;
+
+    public String getGenericClassName() {
+        return genericClassName;
+    }
+
+    public void setGenericClassName(String genericClassName) {
+        this.genericClassName = genericClassName;
+    }
+
+    public String[] getTableTypes() {
+        return tableTypes;
+    }
+
+    public void setTableTypes(String[] tableTypes) {
+        this.tableTypes = tableTypes;
+    }
+
+    public String getNamingStrategy() {
+        return namingStrategy;
+    }
+
+    public void setNamingStrategy(String namingStrategy) {
+        this.namingStrategy = namingStrategy;
+    }
+
+    public FiltersConfig getFiltersConfig() {
+        if (filtersConfig == null) {
+            // this case is used often in tests where config not initialized properly
+            return FiltersConfig.create(null, null, TableFilter.everything(), PatternFilter.INCLUDE_NOTHING);
+        }
+        return filtersConfig;
+    }
+
+    public void setFiltersConfig(FiltersConfig filtersConfig) {
+        this.filtersConfig = filtersConfig;
+    }
+
+    public boolean isSkipRelationshipsLoading() {
+        return skipRelationshipsLoading != null && skipRelationshipsLoading;
+    }
+
+    public Boolean getSkipRelationshipsLoading() {
+        return skipRelationshipsLoading;
+    }
+
+    public void setSkipRelationshipsLoading(Boolean skipRelationshipsLoading) {
+        this.skipRelationshipsLoading = skipRelationshipsLoading;
+    }
+
+    public void setSkipPrimaryKeyLoading(Boolean skipPrimaryKeyLoading) {
+        this.skipPrimaryKeyLoading = skipPrimaryKeyLoading;
+    }
+
+    public boolean getSkipPrimaryKeyLoading() {
+        return skipPrimaryKeyLoading;
+    }
+
+    public boolean isSkipPrimaryKeyLoading() {
+        return skipPrimaryKeyLoading != null && skipPrimaryKeyLoading;
+    }
+
+    @Override
+    public String toString() {
+        String res = "EntitiesFilters: " + getFiltersConfig();
+        if (isSkipRelationshipsLoading()) {
+            res += "\n Skip Loading Relationships! \n";
+        }
+
+        if (isSkipPrimaryKeyLoading()) {
+            res += "\n Skip Loading PrimaryKeys! \n";
+        }
+
+        return res;
+    }
+}

http://git-wip-us.apache.org/repos/asf/cayenne/blob/2f7b1d53/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/DbLoaderDelegate.java
----------------------------------------------------------------------
diff --git a/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/DbLoaderDelegate.java b/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/DbLoaderDelegate.java
new file mode 100644
index 0000000..35a61fb
--- /dev/null
+++ b/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/DbLoaderDelegate.java
@@ -0,0 +1,58 @@
+/*****************************************************************
+ *   Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ ****************************************************************/
+
+package org.apache.cayenne.dbsync.reverse;
+
+import org.apache.cayenne.map.DbEntity;
+import org.apache.cayenne.map.DbRelationship;
+import org.apache.cayenne.map.ObjEntity;
+
+/**
+ * DbLoaderDelegate defines API that allows to control the behavior of DbLoader
+ * during the database reverse-engineering. Delegate is also notified of the
+ * progress of reverse-engineering.
+ */
+public interface DbLoaderDelegate {
+
+    void dbEntityAdded(DbEntity entity);
+
+    void dbEntityRemoved(DbEntity entity);
+
+    /**
+     * Called before relationship loading for db-entity
+     * @param entity
+     *
+     * @return true in case you want process relationships for this entity
+     *         false otherwise
+     */
+    boolean dbRelationship(DbEntity entity);
+
+    /**
+     * Called before relationship will be added into db-entity but after it was loaded from db
+     * @param entity
+     *
+     * @return true in case you want add this relationship into entity
+     *         false otherwise
+     */
+    boolean dbRelationshipLoaded(DbEntity entity, DbRelationship relationship);
+
+    void objEntityAdded(ObjEntity entity);
+
+    void objEntityRemoved(ObjEntity entity);
+}

http://git-wip-us.apache.org/repos/asf/cayenne/blob/2f7b1d53/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/DbTableLoader.java
----------------------------------------------------------------------
diff --git a/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/DbTableLoader.java b/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/DbTableLoader.java
new file mode 100644
index 0000000..0d12e64
--- /dev/null
+++ b/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/DbTableLoader.java
@@ -0,0 +1,195 @@
+/*****************************************************************
+ *   Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ ****************************************************************/
+package org.apache.cayenne.dbsync.reverse;
+
+import java.sql.DatabaseMetaData;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.LinkedList;
+import java.util.List;
+
+import org.apache.cayenne.dbsync.reverse.filters.PatternFilter;
+import org.apache.cayenne.dbsync.reverse.filters.TableFilter;
+import org.apache.cayenne.map.DataMap;
+import org.apache.cayenne.map.DbAttribute;
+import org.apache.cayenne.map.DbEntity;
+import org.apache.cayenne.map.DetectedDbEntity;
+import org.apache.cayenne.map.ObjEntity;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+/**
+ * @since 4.0
+ */
+public class DbTableLoader {
+
+	private static final Log LOGGER = LogFactory.getLog(DbTableLoader.class);
+
+	private static final String WILDCARD = "%";
+
+	private final String catalog;
+	private final String schema;
+
+	private final DatabaseMetaData metaData;
+	private final DbLoaderDelegate delegate;
+
+	private final DbAttributesLoader attributesLoader;
+
+	public DbTableLoader(String catalog, String schema, DatabaseMetaData metaData, DbLoaderDelegate delegate,
+			DbAttributesLoader attributesLoader) {
+		this.catalog = catalog;
+		this.schema = schema;
+		this.metaData = metaData;
+		this.delegate = delegate;
+
+		this.attributesLoader = attributesLoader;
+	}
+
+	/**
+	 * Returns all tables for given combination of the criteria. Tables returned
+	 * as DbEntities without any attributes or relationships.
+	 *
+	 * @param types
+	 *            The types of table names to retrieve, null returns all types.
+	 * @return
+	 * @since 4.0
+	 */
+	public List<DetectedDbEntity> getDbEntities(TableFilter filters, String[] types) throws SQLException {
+		if (LOGGER.isDebugEnabled()) {
+			LOGGER.debug("Read tables: catalog=" + catalog + ", schema=" + schema + ", types=" + Arrays.toString(types));
+		}
+
+		List<DetectedDbEntity> tables = new LinkedList<DetectedDbEntity>();
+		try (ResultSet rs = metaData.getTables(catalog, schema, WILDCARD, types);) {
+			while (rs.next()) {
+				// Oracle 9i and newer has a nifty recycle bin feature... but we
+				// don't
+				// want dropped tables to be included here; in fact they may
+				// even result
+				// in errors on reverse engineering as their names have special
+				// chars like
+				// "/", etc. So skip them all together
+
+				String name = rs.getString("TABLE_NAME");
+				if (name == null) {
+					continue;
+				}
+
+				DetectedDbEntity table = new DetectedDbEntity(name);
+
+				String catalog = rs.getString("TABLE_CAT");
+				table.setCatalog(catalog);
+
+				String schema = rs.getString("TABLE_SCHEM");
+				table.setSchema(schema);
+				if (!(this.catalog == null || this.catalog.equals(catalog))
+						|| !(this.schema == null || this.schema.equals(schema))) {
+
+					LOGGER.error(catalog + "." + schema + "." + name + " wrongly loaded for catalog/schema : "
+							+ this.catalog + "." + this.schema);
+
+					continue;
+				}
+
+				PatternFilter includeTable = filters.isIncludeTable(table.getName());
+				if (includeTable != null) {
+					tables.add(table);
+				}
+			}
+		}
+		return tables;
+	}
+
+	/**
+	 * Loads dbEntities for the specified tables.
+	 * 
+	 * @param config
+	 * @param types
+	 */
+	public List<DbEntity> loadDbEntities(DataMap map, DbLoaderConfiguration config, String[] types) throws SQLException {
+		/** List of db entities to process. */
+
+		List<DetectedDbEntity> tables = getDbEntities(config.getFiltersConfig().tableFilter(catalog, schema), types);
+
+		List<DbEntity> dbEntities = new ArrayList<DbEntity>();
+		for (DbEntity dbEntity : tables) {
+			DbEntity oldEnt = map.getDbEntity(dbEntity.getName());
+			if (oldEnt != null) {
+				Collection<ObjEntity> oldObjEnt = map.getMappedEntities(oldEnt);
+				if (!oldObjEnt.isEmpty()) {
+					for (ObjEntity objEntity : oldObjEnt) {
+						LOGGER.debug("Delete ObjEntity: " + objEntity.getName());
+						map.removeObjEntity(objEntity.getName(), true);
+						delegate.objEntityRemoved(objEntity);
+					}
+				}
+
+				LOGGER.debug("Overwrite DbEntity: " + oldEnt.getName());
+				map.removeDbEntity(oldEnt.getName(), true);
+				delegate.dbEntityRemoved(oldEnt);
+			}
+
+			map.addDbEntity(dbEntity);
+
+			delegate.dbEntityAdded(dbEntity);
+
+			// delegate might have thrown this entity out... so check if it is
+			// still
+			// around before continuing processing
+			if (map.getDbEntity(dbEntity.getName()) == dbEntity) {
+				dbEntities.add(dbEntity);
+				attributesLoader.loadDbAttributes(dbEntity);
+				if (!config.isSkipPrimaryKeyLoading()) {
+					loadPrimaryKey(dbEntity);
+				}
+			}
+		}
+
+		return dbEntities;
+	}
+
+	private void loadPrimaryKey(DbEntity dbEntity) throws SQLException {
+
+		try (ResultSet rs = metaData.getPrimaryKeys(dbEntity.getCatalog(), dbEntity.getSchema(), dbEntity.getName());) {
+			while (rs.next()) {
+				String columnName = rs.getString("COLUMN_NAME");
+				DbAttribute attribute = dbEntity.getAttribute(columnName);
+
+				if (attribute != null) {
+					attribute.setPrimaryKey(true);
+				} else {
+					// why an attribute might be null is not quiet clear
+					// but there is a bug report 731406 indicating that it is
+					// possible
+					// so just print the warning, and ignore
+					LOGGER.warn("Can't locate attribute for primary key: " + columnName);
+				}
+
+				String pkName = rs.getString("PK_NAME");
+				if (pkName != null && dbEntity instanceof DetectedDbEntity) {
+					((DetectedDbEntity) dbEntity).setPrimaryKeyName(pkName);
+				}
+
+			}
+		}
+	}
+}

http://git-wip-us.apache.org/repos/asf/cayenne/blob/2f7b1d53/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/DefaultDbLoaderDelegate.java
----------------------------------------------------------------------
diff --git a/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/DefaultDbLoaderDelegate.java b/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/DefaultDbLoaderDelegate.java
new file mode 100644
index 0000000..b39fd5f
--- /dev/null
+++ b/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/DefaultDbLoaderDelegate.java
@@ -0,0 +1,59 @@
+/*****************************************************************
+ *   Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ ****************************************************************/
+package org.apache.cayenne.dbsync.reverse;
+
+import org.apache.cayenne.map.DbEntity;
+import org.apache.cayenne.map.DbRelationship;
+import org.apache.cayenne.map.ObjEntity;
+
+/**
+ * @since 4.0.
+ */
+public class DefaultDbLoaderDelegate implements DbLoaderDelegate {
+
+    @Override
+    public void dbEntityAdded(DbEntity entity) {
+
+    }
+
+    @Override
+    public void dbEntityRemoved(DbEntity entity) {
+
+    }
+
+    @Override
+    public boolean dbRelationship(DbEntity entity) {
+        return true;
+    }
+
+    @Override
+    public boolean dbRelationshipLoaded(DbEntity entity, DbRelationship relationship) {
+        return true;
+    }
+
+    @Override
+    public void objEntityAdded(ObjEntity entity) {
+
+    }
+
+    @Override
+    public void objEntityRemoved(ObjEntity entity) {
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/cayenne/blob/2f7b1d53/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/FiltersConfigBuilder.java
----------------------------------------------------------------------
diff --git a/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/FiltersConfigBuilder.java b/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/FiltersConfigBuilder.java
new file mode 100644
index 0000000..390d91a
--- /dev/null
+++ b/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/FiltersConfigBuilder.java
@@ -0,0 +1,393 @@
+/*****************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ ****************************************************************/
+package org.apache.cayenne.dbsync.reverse;
+
+import org.apache.cayenne.dbimport.Catalog;
+import org.apache.cayenne.dbimport.ExcludeColumn;
+import org.apache.cayenne.dbimport.ExcludeProcedure;
+import org.apache.cayenne.dbimport.ExcludeTable;
+import org.apache.cayenne.dbimport.IncludeColumn;
+import org.apache.cayenne.dbimport.IncludeProcedure;
+import org.apache.cayenne.dbimport.IncludeTable;
+import org.apache.cayenne.dbimport.PatternParam;
+import org.apache.cayenne.dbimport.ReverseEngineering;
+import org.apache.cayenne.dbimport.Schema;
+import org.apache.cayenne.dbsync.reverse.filters.LegacyFilterConfigBridge;
+import org.apache.cayenne.dbsync.reverse.filters.CatalogFilter;
+import org.apache.cayenne.dbsync.reverse.filters.IncludeTableFilter;
+import org.apache.cayenne.dbsync.reverse.filters.SchemaFilter;
+import org.apache.cayenne.dbsync.reverse.filters.TableFilter;
+import org.apache.cayenne.dbsync.reverse.filters.FiltersConfig;
+import org.apache.cayenne.dbsync.reverse.filters.PatternFilter;
+
+import java.util.*;
+import java.util.regex.Pattern;
+
+import static org.apache.commons.lang.StringUtils.isBlank;
+
+/**
+ * @since 4.0.
+ */
+public final class FiltersConfigBuilder {
+
+    private final ReverseEngineering engineering;
+
+    public FiltersConfigBuilder(ReverseEngineering engineering) {
+        this.engineering = engineering;
+    }
+
+    public FiltersConfig filtersConfig() {
+        compact();
+
+        return new FiltersConfig(transformCatalogs(engineering.getCatalogs()));
+    }
+
+    private CatalogFilter[] transformCatalogs(Collection<Catalog> catalogs) {
+        CatalogFilter[] catalogFilters = new CatalogFilter[catalogs.size()];
+        int i = 0;
+        for (Catalog catalog : catalogs) {
+            catalogFilters[i] = new CatalogFilter(catalog.getName(), transformSchemas(catalog.getSchemas()));
+            i++;
+        }
+
+        return catalogFilters;
+    }
+
+    private SchemaFilter[] transformSchemas(Collection<Schema> schemas) {
+        SchemaFilter[] schemaFilters = new SchemaFilter[schemas.size()];
+        int i = 0;
+        for (Schema schema : schemas) {
+            schemaFilters[i] = new SchemaFilter(schema.getName(),
+                    new TableFilter(transformIncludeTable(schema.getIncludeTables()),
+                            transformExcludeTable(schema.getExcludeTables())),
+                    transform(schema.getIncludeProcedures(), schema.getExcludeProcedures()));
+            i++;
+        }
+
+        return schemaFilters;
+    }
+
+    private SortedSet<Pattern> transformExcludeTable(Collection<ExcludeTable> excludeTables) {
+        SortedSet<Pattern> res = new TreeSet<Pattern>(PatternFilter.PATTERN_COMPARATOR);
+        for (ExcludeTable exclude : excludeTables) {
+            res.add(PatternFilter.pattern(exclude.getPattern()));
+        }
+        return res;
+    }
+
+    private SortedSet<IncludeTableFilter> transformIncludeTable(Collection<IncludeTable> includeTables) {
+        SortedSet<IncludeTableFilter> includeTableFilters = new TreeSet<IncludeTableFilter>();
+        for (IncludeTable includeTable : includeTables) {
+            includeTableFilters.add(new IncludeTableFilter(includeTable.getPattern(),
+                    transform(includeTable.getIncludeColumns(), includeTable.getExcludeColumns())));
+        }
+
+        return includeTableFilters;
+    }
+
+    private PatternFilter transform(Collection<? extends PatternParam> include,
+                                    Collection<? extends PatternParam> exclude) {
+        PatternFilter filter = new PatternFilter();
+
+        for (PatternParam patternParam : include) {
+            filter.include(patternParam.getPattern());
+        }
+
+        for (PatternParam patternParam : exclude) {
+            filter.exclude(patternParam.getPattern());
+        }
+
+        return filter;
+
+    }
+
+    /**
+     * Goal of this method transform ReverseEngineering config into more regular form
+     * From
+     *      ReverseEngineering
+     *          Catalog
+     *              Schema
+     *                  IncludeTable
+     *                      IncludeColumn
+     *                      ExcludeColumn
+     *                  ExcludeTable
+     *                  IncludeProcedures
+     *                  ExcludeProcedures
+     *                  IncludeColumn
+     *                  ExcludeColumn
+     *              IncludeTable
+     *                  IncludeColumn
+     *                  ExcludeColumn
+     *              ExcludeTable
+     *              IncludeProcedures
+     *              ExcludeProcedures
+     *              IncludeColumn
+     *              ExcludeColumn
+     *          Schema
+     *              IncludeTable
+     *                  IncludeColumn
+     *                  ExcludeColumn
+     *              ExcludeTable
+     *              IncludeProcedures
+     *              ExcludeProcedures
+     *              IncludeColumn
+     *              ExcludeColumn
+     *          IncludeTable
+     *              IncludeColumn
+     *              ExcludeColumn
+     *          ExcludeTable
+     *          IncludeProcedures
+     *          ExcludeProcedures
+     *          IncludeColumn
+     *          ExcludeColumn
+     *
+     * Into
+     *      ReverseEngineering
+     *          Catalog
+     *              Schema
+     *                  IncludeTable
+     *                      IncludeColumn
+     *                      ExcludeColumn
+     *                  ExcludeTable
+     *                  IncludeProcedures
+     *                  ExcludeProcedures
+     *
+     *
+     * */
+    public void compact() {
+        addEmptyElements();
+
+        compactColumnFilters();
+        compactTableFilter();
+        compactProcedureFilter();
+        compactSchemas();
+    }
+
+    private void compactSchemas() {
+        for (Catalog catalog : engineering.getCatalogs()) {
+            catalog.getSchemas().addAll(engineering.getSchemas());
+        }
+        engineering.setSchemas(null);
+    }
+
+    private void compactProcedureFilter() {
+        Collection<IncludeProcedure> engIncludeProcedures = engineering.getIncludeProcedures();
+        Collection<ExcludeProcedure> engExcludeProcedures = engineering.getExcludeProcedures();
+
+        engineering.setIncludeProcedures(null);
+        engineering.setExcludeProcedures(null);
+
+        for (Catalog catalog : engineering.getCatalogs()) {
+            Collection<IncludeProcedure> catalogIncludeProcedures = catalog.getIncludeProcedures();
+            Collection<ExcludeProcedure> catalogExcludeProcedures = catalog.getExcludeProcedures();
+
+            catalog.setIncludeProcedures(null);
+            catalog.setExcludeProcedures(null);
+
+            for (Schema schema : catalog.getSchemas()) {
+                if (engIncludeProcedures != null) {
+                    schema.getIncludeProcedures().addAll(engIncludeProcedures);
+                    schema.getIncludeProcedures().addAll(catalogIncludeProcedures);
+                }
+                if (engExcludeProcedures != null) {
+                    schema.getExcludeProcedures().addAll(engExcludeProcedures);
+                    schema.getExcludeProcedures().addAll(catalogExcludeProcedures);
+                }
+            }
+        }
+
+        for (Schema schema : engineering.getSchemas()) {
+            schema.getIncludeProcedures().addAll(engIncludeProcedures);
+            schema.getExcludeProcedures().addAll(engExcludeProcedures);
+        }
+    }
+
+    private void compactTableFilter() {
+        Collection<IncludeTable> engIncludeTables = engineering.getIncludeTables();
+        Collection<ExcludeTable> engExcludeTables = engineering.getExcludeTables();
+
+        engineering.setIncludeTables(null);
+        engineering.setExcludeTables(null);
+
+        for (Catalog catalog : engineering.getCatalogs()) {
+            Collection<IncludeTable> catalogIncludeTables = catalog.getIncludeTables();
+            Collection<ExcludeTable> catalogExcludeTables = catalog.getExcludeTables();
+
+            catalog.setIncludeTables(null);
+            catalog.setExcludeTables(null);
+
+            for (Schema schema : catalog.getSchemas()) {
+                if (engIncludeTables != null) {
+                    schema.getIncludeTables().addAll(engIncludeTables);
+                    schema.getIncludeTables().addAll(catalogIncludeTables);
+                }
+                if (engExcludeTables != null) {
+                    schema.getExcludeTables().addAll(engExcludeTables);
+                    schema.getExcludeTables().addAll(catalogExcludeTables);
+                }
+            }
+        }
+
+        for (Schema schema : engineering.getSchemas()) {
+            schema.getIncludeTables().addAll(engIncludeTables);
+            schema.getExcludeTables().addAll(engExcludeTables);
+        }
+    }
+
+    private void compactColumnFilters() {
+        Collection<IncludeColumn> engIncludeColumns = engineering.getIncludeColumns();
+        Collection<ExcludeColumn> engExcludeColumns = engineering.getExcludeColumns();
+
+        engineering.setIncludeColumns(null);
+        engineering.setExcludeColumns(null);
+
+        for (Catalog catalog : engineering.getCatalogs()) {
+            Collection<IncludeColumn> catalogIncludeColumns = catalog.getIncludeColumns();
+            Collection<ExcludeColumn> catalogExcludeColumns = catalog.getExcludeColumns();
+
+            catalog.setIncludeColumns(null);
+            catalog.setExcludeColumns(null);
+
+            for (Schema schema : catalog.getSchemas()) {
+                Collection<IncludeColumn> schemaIncludeColumns = schema.getIncludeColumns();
+                Collection<ExcludeColumn> schemaExcludeColumns = schema.getExcludeColumns();
+
+                schema.setIncludeColumns(null);
+                schema.setExcludeColumns(null);
+
+                if (schema != null) {
+                    for (IncludeTable includeTable : schema.getIncludeTables()) {
+                        if (engIncludeColumns != null) {
+                            includeTable.getIncludeColumns().addAll(engIncludeColumns);
+                            includeTable.getIncludeColumns().addAll(catalogIncludeColumns);
+                            includeTable.getIncludeColumns().addAll(schemaIncludeColumns);
+                        }
+                        if (engExcludeColumns != null) {
+                            includeTable.getExcludeColumns().addAll(engExcludeColumns);
+                            includeTable.getExcludeColumns().addAll(catalogExcludeColumns);
+                            includeTable.getExcludeColumns().addAll(schemaExcludeColumns);
+                        }
+                    }
+                }
+            }
+
+            if (catalog.getIncludeTables() != null) {
+                for (IncludeTable includeTable : catalog.getIncludeTables()) {
+                    includeTable.getIncludeColumns().addAll(engIncludeColumns);
+                    includeTable.getIncludeColumns().addAll(catalogIncludeColumns);
+
+                    includeTable.getExcludeColumns().addAll(engExcludeColumns);
+                    includeTable.getExcludeColumns().addAll(catalogExcludeColumns);
+                }
+            }
+        }
+
+        for (Schema schema : engineering.getSchemas()) {
+            Collection<IncludeColumn> schemaIncludeColumns = schema.getIncludeColumns();
+            Collection<ExcludeColumn> schemaExcludeColumns = schema.getExcludeColumns();
+
+            schema.setIncludeColumns(null);
+            schema.setExcludeColumns(null);
+
+            for (IncludeTable includeTable : schema.getIncludeTables()) {
+                includeTable.getIncludeColumns().addAll(engIncludeColumns);
+                includeTable.getIncludeColumns().addAll(schemaIncludeColumns);
+
+                includeTable.getExcludeColumns().addAll(engExcludeColumns);
+                includeTable.getExcludeColumns().addAll(schemaExcludeColumns);
+            }
+        }
+
+        if (engineering.getIncludeTables() != null) {
+            for (IncludeTable includeTable : engineering.getIncludeTables()) {
+                includeTable.getIncludeColumns().addAll(engIncludeColumns);
+                includeTable.getExcludeColumns().addAll(engExcludeColumns);
+            }
+        }
+    }
+
+    private void addEmptyElements() {
+        if (engineering.getCatalogs().isEmpty()) {
+            engineering.addCatalog(new Catalog());
+        }
+
+        for (Catalog catalog : engineering.getCatalogs()) {
+            if (catalog.getSchemas().isEmpty()
+                    && engineering.getSchemas().isEmpty()) {
+                catalog.addSchema(new Schema());
+            }
+
+            for (Schema schema : catalog.getSchemas()) {
+                if (schema.getIncludeTables().isEmpty()
+                        && catalog.getIncludeTables().isEmpty()
+                        && engineering.getIncludeTables().isEmpty()) {
+
+                    schema.addIncludeTable(new IncludeTable());
+                }
+            }
+        }
+
+        if (engineering.getSchemas() == null) {
+            engineering.setSchemas(new LinkedList<Schema>());
+        }
+
+        for (Schema schema : engineering.getSchemas()) {
+            if (schema.getIncludeTables().isEmpty()
+                    && engineering.getIncludeTables().isEmpty()) {
+
+                schema.addIncludeTable(new IncludeTable());
+            }
+        }
+    }
+
+    public FiltersConfigBuilder add(LegacyFilterConfigBridge build) {
+        if (!isBlank(build.catalog())) {
+            engineering.addCatalog(new Catalog(build.catalog()));
+        }
+
+        if (!isBlank(build.schema())) {
+            engineering.addSchema(new Schema(build.schema()));
+        }
+
+        if (!isBlank(build.getIncludeTableFilters())) {
+            engineering.addIncludeTable(new IncludeTable(build.getIncludeTableFilters()));
+        }
+        if (!isBlank(build.getExcludeTableFilters())) {
+            engineering.addExcludeTable(new ExcludeTable(build.getExcludeTableFilters()));
+        }
+
+        if (!isBlank(build.getIncludeColumnFilters())) {
+            engineering.addIncludeColumn(new IncludeColumn(build.getIncludeColumnFilters()));
+        }
+        if (!isBlank(build.getExcludeColumnFilters())) {
+            engineering.addExcludeColumn(new ExcludeColumn(build.getExcludeColumnFilters()));
+        }
+
+        if (build.isLoadProcedures()) {
+            if (!isBlank(build.getIncludeProceduresFilters())) {
+                engineering.addIncludeProcedure(new IncludeProcedure(build.getIncludeProceduresFilters()));
+            }
+            if (!isBlank(build.getExcludeProceduresFilters())) {
+                engineering.addExcludeProcedure(new ExcludeProcedure(build.getExcludeProceduresFilters()));
+            }
+        }
+
+        return this;
+    }
+}

http://git-wip-us.apache.org/repos/asf/cayenne/blob/2f7b1d53/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/LoggingDbLoaderDelegate.java
----------------------------------------------------------------------
diff --git a/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/LoggingDbLoaderDelegate.java b/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/LoggingDbLoaderDelegate.java
new file mode 100644
index 0000000..3777ccc
--- /dev/null
+++ b/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/LoggingDbLoaderDelegate.java
@@ -0,0 +1,76 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.cayenne.dbsync.reverse;
+
+import org.apache.cayenne.map.DbEntity;
+import org.apache.cayenne.map.DbRelationship;
+import org.apache.cayenne.map.ObjEntity;
+import org.apache.commons.logging.Log;
+
+/**
+ * @since 4.0
+ */
+public class LoggingDbLoaderDelegate extends DefaultDbLoaderDelegate {
+
+    private final Log logger;
+
+    public LoggingDbLoaderDelegate(Log logger) {
+        this.logger = logger;
+    }
+
+    @Override
+    public void dbEntityAdded(DbEntity entity) {
+        logger.info("  Table: " + entity.getFullyQualifiedName());
+    }
+
+    @Override
+    public void dbEntityRemoved(DbEntity entity) {
+        logger.info("  Table removed: " + entity.getFullyQualifiedName());
+    }
+
+    @Override
+    public boolean dbRelationship(DbEntity entity) {
+        if (logger.isDebugEnabled()) {
+            logger.debug("    Relationships for " + entity.getFullyQualifiedName());
+        }
+
+        return true;
+    }
+
+    @Override
+    public boolean dbRelationshipLoaded(DbEntity entity, DbRelationship relationship) {
+        logger.info("    " + relationship);
+
+        return true;
+    }
+
+    @Override
+    public void objEntityAdded(ObjEntity entity) {
+        if (logger.isDebugEnabled()) {
+            logger.debug("  Class: " + entity.getName());
+        }
+    }
+
+    @Override
+    public void objEntityRemoved(ObjEntity entity) {
+        if (logger.isDebugEnabled()) {
+            logger.debug("  Class removed: " + entity.getName());
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/cayenne/blob/2f7b1d53/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/ManyToManyCandidateEntity.java
----------------------------------------------------------------------
diff --git a/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/ManyToManyCandidateEntity.java b/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/ManyToManyCandidateEntity.java
new file mode 100644
index 0000000..9666e08
--- /dev/null
+++ b/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/ManyToManyCandidateEntity.java
@@ -0,0 +1,142 @@
+/*****************************************************************
+ *   Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ ****************************************************************/
+package org.apache.cayenne.dbsync.reverse;
+
+import org.apache.cayenne.map.DbRelationship;
+import org.apache.cayenne.map.ObjEntity;
+import org.apache.cayenne.map.ObjRelationship;
+import org.apache.cayenne.map.naming.DefaultUniqueNameGenerator;
+import org.apache.cayenne.map.naming.ExportedKey;
+import org.apache.cayenne.map.naming.NameCheckers;
+import org.apache.cayenne.map.naming.ObjectNameGenerator;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Class represent ObjEntity that may be optimized using flattened relationships
+ * as many to many table
+ */
+public class ManyToManyCandidateEntity {
+
+    private static final Log LOG = LogFactory.getLog(ManyToManyCandidateEntity.class);
+
+    private final ObjEntity joinEntity;
+
+    private final DbRelationship dbRel1;
+    private final DbRelationship dbRel2;
+
+    private final ObjEntity entity1;
+    private final ObjEntity entity2;
+
+    private final DbRelationship reverseRelationship1;
+    private final DbRelationship reverseRelationship2;
+
+    private ManyToManyCandidateEntity(ObjEntity entityValue, List<ObjRelationship> relationships) {
+        joinEntity = entityValue;
+
+        ObjRelationship rel1 = relationships.get(0);
+        ObjRelationship rel2 = relationships.get(1);
+
+        dbRel1 = rel1.getDbRelationships().get(0);
+        dbRel2 = rel2.getDbRelationships().get(0);
+
+        reverseRelationship1 = dbRel1.getReverseRelationship();
+        reverseRelationship2 = dbRel2.getReverseRelationship();
+
+        entity1 = rel1.getTargetEntity();
+        entity2 = rel2.getTargetEntity();
+    }
+
+    /**
+     * Method check - if current entity represent many to many temporary table
+     * @return true if current entity is represent many to many table; otherwise returns false
+     */
+    public static ManyToManyCandidateEntity build(ObjEntity joinEntity) {
+        ArrayList<ObjRelationship> relationships = new ArrayList<ObjRelationship>(joinEntity.getRelationships());
+        if (relationships.size() != 2 || (relationships.get(0).getDbRelationships().isEmpty() || relationships.get(1).getDbRelationships().isEmpty())) {
+            return null;
+        }
+
+        ManyToManyCandidateEntity candidateEntity = new ManyToManyCandidateEntity(joinEntity, relationships);
+        if (candidateEntity.isManyToMany()) {
+            return candidateEntity;
+        }
+
+        return null;
+    }
+
+    private boolean isManyToMany() {
+        boolean isNotHaveAttributes = joinEntity.getAttributes().size() == 0;
+
+        return isNotHaveAttributes
+                && reverseRelationship1 != null && reverseRelationship1.isToDependentPK()
+                && reverseRelationship2 != null && reverseRelationship2.isToDependentPK()
+                && entity1 != null && entity2 != null;
+    }
+
+    private void addFlattenedRelationship(ObjectNameGenerator nameGenerator, ObjEntity srcEntity, ObjEntity dstEntity,
+                                          DbRelationship rel1, DbRelationship rel2) {
+
+        if (rel1.getSourceAttributes().isEmpty() && rel2.getTargetAttributes().isEmpty()) {
+            LOG.warn("Wrong call ManyToManyCandidateEntity.addFlattenedRelationship(... , " + srcEntity.getName()
+                    + ", " + dstEntity.getName() + ", ...)");
+
+            return;
+        }
+
+        ExportedKey key = new ExportedKey(
+                rel1.getSourceEntity().getName(),
+                rel1.getSourceAttributes().iterator().next().getName(),
+                null,
+                rel2.getTargetEntity().getName(),
+                rel2.getTargetAttributes().iterator().next().getName(),
+                null,
+                (short) 1);
+
+        ObjRelationship newRelationship = new ObjRelationship();
+        newRelationship.setName(DefaultUniqueNameGenerator.generate(NameCheckers.objRelationship, srcEntity,
+                nameGenerator.createDbRelationshipName(key, true)));
+
+        newRelationship.setSourceEntity(srcEntity);
+        newRelationship.setTargetEntityName(dstEntity);
+
+        newRelationship.addDbRelationship(rel1);
+        newRelationship.addDbRelationship(rel2);
+
+        srcEntity.addRelationship(newRelationship);
+    }
+
+    /**
+     * Method make direct relationships between 2 entities and remove relationships to
+     * many to many entity
+     *
+     * @param nameGenerator
+     */
+    public void optimizeRelationships(ObjectNameGenerator nameGenerator) {
+        entity1.removeRelationship(reverseRelationship1.getName());
+        entity2.removeRelationship(reverseRelationship2.getName());
+
+        addFlattenedRelationship(nameGenerator, entity1, entity2, reverseRelationship1, dbRel2);
+        addFlattenedRelationship(nameGenerator, entity2, entity1, reverseRelationship2, dbRel1);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/cayenne/blob/2f7b1d53/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/NameFilter.java
----------------------------------------------------------------------
diff --git a/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/NameFilter.java b/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/NameFilter.java
new file mode 100644
index 0000000..89b8330
--- /dev/null
+++ b/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/NameFilter.java
@@ -0,0 +1,27 @@
+/*****************************************************************
+ *   Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ ****************************************************************/
+package org.apache.cayenne.dbsync.reverse;
+
+/**
+ * @since 4.0.
+ */
+public interface NameFilter {
+
+    boolean isIncluded(String string);
+}

http://git-wip-us.apache.org/repos/asf/cayenne/blob/2f7b1d53/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/NamePatternMatcher.java
----------------------------------------------------------------------
diff --git a/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/NamePatternMatcher.java b/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/NamePatternMatcher.java
new file mode 100644
index 0000000..cb9faad
--- /dev/null
+++ b/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/NamePatternMatcher.java
@@ -0,0 +1,225 @@
+/*****************************************************************
+ *   Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ ****************************************************************/
+
+package org.apache.cayenne.dbsync.reverse;
+
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+import java.util.regex.Pattern;
+import java.util.regex.PatternSyntaxException;
+
+import org.apache.cayenne.util.CayenneMapEntry;
+import org.apache.commons.logging.Log;
+
+/**
+ * Provides name pattern matching functionality.
+ * 
+ * @since 1.2
+ */
+public class NamePatternMatcher implements NameFilter {
+
+    private static final String[] EMPTY_ARRAY = new String[0];
+    private static final Pattern COMMA = Pattern.compile(",");
+
+    private final Pattern[] itemIncludeFilters;
+    private final Pattern[] itemExcludeFilters;
+
+    public static NamePatternMatcher build(Log logger, String includePattern, String excludePattern) {
+        return new NamePatternMatcher(createPatterns(logger, includePattern), createPatterns(logger, excludePattern));
+    }
+
+    public NamePatternMatcher(Pattern[] itemIncludeFilters, Pattern[] itemExcludeFilters) {
+        this.itemIncludeFilters = itemIncludeFilters;
+        this.itemExcludeFilters = itemExcludeFilters;
+    }
+
+    /**
+     * Applies preconfigured list of filters to the list, removing entities that do not
+     * pass the filter.
+     * 
+     * @deprecated since 3.0 still used by AntDataPortDelegate, which itself should
+     *             probably be deprecated
+     */
+    @Deprecated
+    public List<?> filter(List<?> items) {
+        if (items == null || items.isEmpty()) {
+            return items;
+        }
+
+        if (itemIncludeFilters.length == 0 && itemExcludeFilters.length == 0) {
+            return items;
+        }
+
+        Iterator<?> it = items.iterator();
+        while (it.hasNext()) {
+            CayenneMapEntry entity = (CayenneMapEntry) it.next();
+
+            if (!passedIncludeFilter(entity.getName())) {
+                it.remove();
+                continue;
+            }
+
+            if (!passedExcludeFilter(entity.getName())) {
+                it.remove();
+            }
+        }
+
+        return items;
+    }
+
+    /**
+     * Returns an array of Patterns. Takes a comma-separated list of patterns, attempting
+     * to convert them to the java.util.regex.Pattern syntax. E.g.
+     * <p>
+     * <code>"billing_*,user?"</code> will become an array of two expressions:
+     * <p>
+     * <code>^billing_.*$</code><br>
+     * <code>^user.?$</code><br>
+     */
+    public static Pattern[] createPatterns(Log logger, String patternString) {
+        if (patternString == null) {
+            return new Pattern[0];
+        }
+        String[] patternStrings = tokenizePattern(patternString);
+        List<Pattern> patterns = new ArrayList<Pattern>(patternStrings.length);
+
+        for (String patternString1 : patternStrings) {
+
+            // test the pattern
+            try {
+                patterns.add(Pattern.compile(patternString1));
+            } catch (PatternSyntaxException e) {
+
+                if (logger != null) {
+                    logger.warn("Ignoring invalid pattern [" + patternString1 + "], reason: " + e.getMessage());
+                }
+            }
+        }
+
+        return patterns.toArray(new Pattern[patterns.size()]);
+    }
+
+    /**
+     * Returns an array of valid regular expressions. Takes a comma-separated list of
+     * patterns, attempting to convert them to the java.util.regex.Pattern syntax. E.g.
+     * <p>
+     * <code>"billing_*,user?"</code> will become an array of two expressions:
+     * <p>
+     * <code>^billing_.*$</code><br>
+     * <code>^user.?$</code><br>
+     */
+    public static String[] tokenizePattern(String pattern) {
+        if (pattern == null || pattern.isEmpty()) {
+            return EMPTY_ARRAY;
+        }
+
+        String[] patterns = COMMA.split(pattern);
+        if (patterns.length == 0) {
+            return EMPTY_ARRAY;
+        }
+
+        for (int i = 0; i < patterns.length; i++) {
+            // convert * into regex syntax
+            // e.g. abc*x becomes ^abc.*x$
+            // or abc?x becomes ^abc.?x$
+            patterns[i] = "^" + patterns[i].replaceAll("[*?]", ".$0") + "$";
+        }
+
+        return patterns;
+    }
+
+    /**
+     * Returns true if a given object property satisfies the include/exclude patterns.
+     * 
+     * @since 3.0
+     */
+    @Override
+    public boolean isIncluded(String string) {
+        return passedIncludeFilter(string) && passedExcludeFilter(string);
+    }
+
+    /**
+     * Returns true if an object matches any one of the "include" patterns, or if there is
+     * no "include" patterns defined.
+     * 
+     * @since 3.0
+     */
+    private boolean passedIncludeFilter(String item) {
+        if (itemIncludeFilters.length == 0) {
+            return true;
+        }
+
+        for (Pattern itemIncludeFilter : itemIncludeFilters) {
+            if (itemIncludeFilter.matcher(item).find()) {
+                return true;
+            }
+        }
+
+        return false;
+    }
+
+    /**
+     * Returns true if an object does not match any one of the "exclude" patterns, or if
+     * there is no "exclude" patterns defined.
+     * 
+     * @since 3.0
+     */
+    private boolean passedExcludeFilter(String item) {
+        if (itemExcludeFilters.length == 0) {
+            return true;
+        }
+
+        for (Pattern itemExcludeFilter : itemExcludeFilters) {
+            if (itemExcludeFilter.matcher(item).find()) {
+                return false;
+            }
+        }
+
+        return true;
+    }
+
+    public static String replaceWildcardInStringWithString(
+            String wildcard,
+            String pattern,
+            String replacement) {
+
+        if (pattern == null || wildcard == null) {
+            return pattern;
+        }
+
+        StringBuilder buffer = new StringBuilder();
+        int lastPos = 0;
+        int wildCardPos = pattern.indexOf(wildcard);
+        while (wildCardPos != -1) {
+            if (lastPos != wildCardPos) {
+                buffer.append(pattern.substring(lastPos, wildCardPos));
+            }
+            buffer.append(replacement);
+            lastPos += wildCardPos + wildcard.length();
+            wildCardPos = pattern.indexOf(wildcard, lastPos);
+        }
+
+        if (lastPos < pattern.length()) {
+            buffer.append(pattern.substring(lastPos));
+        }
+
+        return buffer.toString();
+    }
+}

http://git-wip-us.apache.org/repos/asf/cayenne/blob/2f7b1d53/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/filters/CatalogFilter.java
----------------------------------------------------------------------
diff --git a/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/filters/CatalogFilter.java b/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/filters/CatalogFilter.java
new file mode 100644
index 0000000..46feb9d
--- /dev/null
+++ b/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/filters/CatalogFilter.java
@@ -0,0 +1,62 @@
+/*****************************************************************
+ *   Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ ****************************************************************/
+package org.apache.cayenne.dbsync.reverse.filters;
+
+import java.util.Arrays;
+
+/**
+* @since 4.0.
+*/
+public class CatalogFilter {
+    public final String name;
+    public final SchemaFilter[] schemas;
+
+    public CatalogFilter(String name, SchemaFilter... schemas) {
+        if (schemas == null || schemas.length == 0) {
+            throw new IllegalArgumentException("schemas(" + Arrays.toString(schemas) + ") can't be null or empty");
+        }
+
+        this.name = name;
+        this.schemas = schemas;
+    }
+
+    public SchemaFilter getSchema(String schema) {
+        for (SchemaFilter schemaFilter : schemas) {
+            if (schemaFilter.name == null || schemaFilter.name.equals(schema)) {
+                return schemaFilter;
+            }
+        }
+
+        return null;
+    }
+
+    @Override
+    public String toString() {
+        return toString(new StringBuilder(), "").toString();
+    }
+
+    public StringBuilder toString(StringBuilder res, String prefix) {
+        res.append(prefix).append("Catalog: ").append(name).append("\n");
+        for (SchemaFilter schema : schemas) {
+            schema.toString(res, prefix + "  ");
+        }
+
+        return res;
+    }
+}

http://git-wip-us.apache.org/repos/asf/cayenne/blob/2f7b1d53/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/filters/FiltersConfig.java
----------------------------------------------------------------------
diff --git a/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/filters/FiltersConfig.java b/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/filters/FiltersConfig.java
new file mode 100644
index 0000000..48a2e62
--- /dev/null
+++ b/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/filters/FiltersConfig.java
@@ -0,0 +1,81 @@
+/*****************************************************************
+ *   Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ ****************************************************************/
+
+package org.apache.cayenne.dbsync.reverse.filters;
+
+import java.util.Arrays;
+
+/**
+ * @since 4.0.
+ */
+public class FiltersConfig {
+
+    public final CatalogFilter[] catalogs;
+
+    public FiltersConfig(CatalogFilter ... catalogs) {
+        if (catalogs == null || catalogs.length == 0) {
+            throw new IllegalArgumentException("catalogs(" + Arrays.toString(catalogs) + ") can't be null or empty");
+        }
+
+        this.catalogs = catalogs;
+    }
+
+    public PatternFilter proceduresFilter(String catalog, String schema) {
+        return getSchemaFilter(catalog, schema).procedures;
+    }
+
+    public TableFilter tableFilter(String catalog, String schema) {
+        return getSchemaFilter(catalog, schema).tables;
+    }
+
+    protected SchemaFilter getSchemaFilter(String catalog, String schema) {
+        CatalogFilter catalogFilter = getCatalog(catalog);
+        if (catalogFilter == null) {
+            return null;
+        }
+
+        return catalogFilter.getSchema(schema);
+    }
+
+    protected CatalogFilter getCatalog(String catalog) {
+        for (CatalogFilter catalogFilter : catalogs) {
+            if (catalogFilter.name == null || catalogFilter.name.equals(catalog)) {
+                return catalogFilter;
+            }
+        }
+
+        return null;
+    }
+
+    @Override
+    public String toString() {
+        StringBuilder builder = new StringBuilder();
+        for (CatalogFilter catalog : catalogs) {
+            catalog.toString(builder, "");
+        }
+
+        return builder.toString();
+    }
+
+    public static FiltersConfig create(String catalog, String schema, TableFilter tableFilter, PatternFilter procedures) {
+        return new FiltersConfig(
+                    new CatalogFilter(catalog,
+                        new SchemaFilter(schema, tableFilter, procedures)));
+    }
+}