You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@metamodel.apache.org by ka...@apache.org on 2013/07/19 11:33:21 UTC

[38/61] [partial] Hard rename of all 'org/eobjects' folders to 'org/apache'.

http://git-wip-us.apache.org/repos/asf/incubator-metamodel/blob/e2e2b37a/core/src/main/java/org/eobjects/metamodel/MetaModelHelper.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/eobjects/metamodel/MetaModelHelper.java b/core/src/main/java/org/eobjects/metamodel/MetaModelHelper.java
deleted file mode 100644
index a105973..0000000
--- a/core/src/main/java/org/eobjects/metamodel/MetaModelHelper.java
+++ /dev/null
@@ -1,775 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.eobjects.metamodel;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-
-import org.eobjects.metamodel.data.CachingDataSetHeader;
-import org.eobjects.metamodel.data.DataSet;
-import org.eobjects.metamodel.data.DataSetHeader;
-import org.eobjects.metamodel.data.DefaultRow;
-import org.eobjects.metamodel.data.EmptyDataSet;
-import org.eobjects.metamodel.data.FilteredDataSet;
-import org.eobjects.metamodel.data.IRowFilter;
-import org.eobjects.metamodel.data.InMemoryDataSet;
-import org.eobjects.metamodel.data.Row;
-import org.eobjects.metamodel.data.SimpleDataSetHeader;
-import org.eobjects.metamodel.data.SubSelectionDataSet;
-import org.eobjects.metamodel.query.FilterItem;
-import org.eobjects.metamodel.query.FromItem;
-import org.eobjects.metamodel.query.GroupByItem;
-import org.eobjects.metamodel.query.OrderByItem;
-import org.eobjects.metamodel.query.Query;
-import org.eobjects.metamodel.query.SelectItem;
-import org.eobjects.metamodel.query.parser.QueryParser;
-import org.eobjects.metamodel.schema.Column;
-import org.eobjects.metamodel.schema.ColumnType;
-import org.eobjects.metamodel.schema.Schema;
-import org.eobjects.metamodel.schema.SuperColumnType;
-import org.eobjects.metamodel.schema.Table;
-import org.eobjects.metamodel.util.AggregateBuilder;
-import org.eobjects.metamodel.util.CollectionUtils;
-import org.eobjects.metamodel.util.EqualsBuilder;
-import org.eobjects.metamodel.util.Func;
-import org.eobjects.metamodel.util.ObjectComparator;
-import org.eobjects.metamodel.util.Predicate;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * This class contains various helper functionality to common tasks in
- * MetaModel, eg.:
- * 
- * <ul>
- * <li>Easy-access for traversing common schema items</li>
- * <li>Manipulate data in memory. These methods are primarily used to enable
- * queries for non-queryable data sources like CSV files and spreadsheets.</li>
- * <li>Query rewriting, traversing and manipulation.</li>
- * </ul>
- * 
- * The class is mainly intended for internal use within the framework
- * operations, but is kept stable, so it can also be used by framework users.
- */
-public final class MetaModelHelper {
-
-    private final static Logger logger = LoggerFactory.getLogger(MetaModelHelper.class);
-
-    private MetaModelHelper() {
-        // Prevent instantiation
-    }
-
-    /**
-     * Creates an array of tables where all occurences of tables in the provided
-     * list of tables and columns are included
-     */
-    public static Table[] getTables(Collection<Table> tableList, Iterable<Column> columnList) {
-        HashSet<Table> set = new HashSet<Table>();
-        set.addAll(tableList);
-        for (Column column : columnList) {
-            set.add(column.getTable());
-        }
-        return set.toArray(new Table[set.size()]);
-    }
-
-    /**
-     * Determines if a schema is an information schema
-     * 
-     * @param schema
-     * @return
-     */
-    public static boolean isInformationSchema(Schema schema) {
-        String name = schema.getName();
-        return isInformationSchema(name);
-    }
-
-    /**
-     * Determines if a schema name is the name of an information schema
-     * 
-     * @param name
-     * @return
-     */
-    public static boolean isInformationSchema(String name) {
-        if (name == null) {
-            return false;
-        }
-        return QueryPostprocessDataContext.INFORMATION_SCHEMA_NAME.equals(name.toLowerCase());
-    }
-
-    /**
-     * Converts a list of columns to a corresponding array of tables
-     * 
-     * @param columns
-     *            the columns that the tables will be extracted from
-     * @return an array containing the tables of the provided columns.
-     */
-    public static Table[] getTables(Iterable<Column> columns) {
-        ArrayList<Table> result = new ArrayList<Table>();
-        for (Column column : columns) {
-            Table table = column.getTable();
-            if (!result.contains(table)) {
-                result.add(table);
-            }
-        }
-        return result.toArray(new Table[result.size()]);
-    }
-
-    /**
-     * Creates a subset array of columns, where only columns that are contained
-     * within the specified table are included.
-     * 
-     * @param table
-     * @param columns
-     * @return an array containing the columns that exist in the table
-     */
-    public static Column[] getTableColumns(Table table, Iterable<Column> columns) {
-        if (table == null) {
-            return new Column[0];
-        }
-        final List<Column> result = new ArrayList<Column>();
-        for (Column column : columns) {
-            final boolean sameTable = table.equals(column.getTable());
-            if (sameTable) {
-                result.add(column);
-            }
-        }
-        return result.toArray(new Column[result.size()]);
-    }
-
-    /**
-     * Creates a subset array of columns, where only columns that are contained
-     * within the specified table are included.
-     * 
-     * @param table
-     * @param columns
-     * @return an array containing the columns that exist in the table
-     */
-    public static Column[] getTableColumns(Table table, Column[] columns) {
-        return getTableColumns(table, Arrays.asList(columns));
-    }
-
-    public static DataSet getCarthesianProduct(DataSet... fromDataSets) {
-        return getCarthesianProduct(fromDataSets, new FilterItem[0]);
-    }
-
-    public static DataSet getCarthesianProduct(DataSet[] fromDataSets, Iterable<FilterItem> whereItems) {
-        // First check if carthesian product is even nescesary
-        if (fromDataSets.length == 1) {
-            return getFiltered(fromDataSets[0], whereItems);
-        }
-
-        List<SelectItem> selectItems = new ArrayList<SelectItem>();
-        for (DataSet dataSet : fromDataSets) {
-            for (int i = 0; i < dataSet.getSelectItems().length; i++) {
-                SelectItem item = dataSet.getSelectItems()[i];
-                selectItems.add(item);
-            }
-        }
-
-        int selectItemOffset = 0;
-        List<Object[]> data = new ArrayList<Object[]>();
-        for (int fromDataSetIndex = 0; fromDataSetIndex < fromDataSets.length; fromDataSetIndex++) {
-            DataSet fromDataSet = fromDataSets[fromDataSetIndex];
-            SelectItem[] fromSelectItems = fromDataSet.getSelectItems();
-            if (fromDataSetIndex == 0) {
-                while (fromDataSet.next()) {
-                    Object[] values = fromDataSet.getRow().getValues();
-                    Object[] row = new Object[selectItems.size()];
-                    System.arraycopy(values, 0, row, selectItemOffset, values.length);
-                    data.add(row);
-                }
-                fromDataSet.close();
-            } else {
-                List<Object[]> fromDataRows = new ArrayList<Object[]>();
-                while (fromDataSet.next()) {
-                    fromDataRows.add(fromDataSet.getRow().getValues());
-                }
-                fromDataSet.close();
-                for (int i = 0; i < data.size(); i = i + fromDataRows.size()) {
-                    Object[] originalRow = data.get(i);
-                    data.remove(i);
-                    for (int j = 0; j < fromDataRows.size(); j++) {
-                        Object[] newRow = fromDataRows.get(j);
-                        System.arraycopy(newRow, 0, originalRow, selectItemOffset, newRow.length);
-                        data.add(i + j, originalRow.clone());
-                    }
-                }
-            }
-            selectItemOffset += fromSelectItems.length;
-        }
-
-        if (data.isEmpty()) {
-            return new EmptyDataSet(selectItems);
-        }
-
-        final DataSetHeader header = new CachingDataSetHeader(selectItems);
-        final List<Row> rows = new ArrayList<Row>(data.size());
-        for (Object[] objects : data) {
-            rows.add(new DefaultRow(header, objects, null));
-        }
-
-        DataSet result = new InMemoryDataSet(header, rows);
-        if (whereItems != null) {
-            result = getFiltered(result, whereItems);
-        }
-        return result;
-    }
-
-    public static DataSet getCarthesianProduct(DataSet[] fromDataSets, FilterItem... filterItems) {
-        return getCarthesianProduct(fromDataSets, Arrays.asList(filterItems));
-    }
-
-    public static DataSet getFiltered(DataSet dataSet, Iterable<FilterItem> filterItems) {
-        List<IRowFilter> filters = CollectionUtils.map(filterItems, new Func<FilterItem, IRowFilter>() {
-            @Override
-            public IRowFilter eval(FilterItem filterItem) {
-                return filterItem;
-            }
-        });
-        if (filters.isEmpty()) {
-            return dataSet;
-        }
-
-        return new FilteredDataSet(dataSet, filters.toArray(new IRowFilter[filters.size()]));
-    }
-
-    public static DataSet getFiltered(DataSet dataSet, FilterItem... filterItems) {
-        return getFiltered(dataSet, Arrays.asList(filterItems));
-    }
-
-    public static DataSet getSelection(final List<SelectItem> selectItems, final DataSet dataSet) {
-        final SelectItem[] dataSetSelectItems = dataSet.getSelectItems();
-
-        // check if the selection is already the same
-        if (selectItems.size() == dataSetSelectItems.length) {
-            boolean same = true;
-            int i = 0;
-            for (SelectItem selectItem : selectItems) {
-                if (!EqualsBuilder.equals(selectItem, dataSetSelectItems[i])) {
-                    same = false;
-                    break;
-                }
-                i++;
-            }
-
-            if (same) {
-                // return the dataSet unmodified
-                return dataSet;
-            }
-        }
-
-        SelectItem[] selectItemsArray = selectItems.toArray(new SelectItem[selectItems.size()]);
-        return new SubSelectionDataSet(selectItemsArray, dataSet);
-    }
-
-    public static DataSet getSelection(SelectItem[] selectItems, DataSet dataSet) {
-        return getSelection(Arrays.asList(selectItems), dataSet);
-    }
-
-    public static DataSet getGrouped(List<SelectItem> selectItems, DataSet dataSet, Collection<GroupByItem> groupByItems) {
-        return getGrouped(selectItems, dataSet, groupByItems.toArray(new GroupByItem[groupByItems.size()]));
-    }
-
-    public static DataSet getGrouped(List<SelectItem> selectItems, DataSet dataSet, GroupByItem[] groupByItems) {
-        DataSet result = dataSet;
-        if (groupByItems != null && groupByItems.length > 0) {
-            Map<Row, Map<SelectItem, List<Object>>> uniqueRows = new HashMap<Row, Map<SelectItem, List<Object>>>();
-
-            final SelectItem[] groupBySelects = new SelectItem[groupByItems.length];
-            for (int i = 0; i < groupBySelects.length; i++) {
-                groupBySelects[i] = groupByItems[i].getSelectItem();
-            }
-            final DataSetHeader groupByHeader = new CachingDataSetHeader(groupBySelects);
-
-            // Creates a list of SelectItems that have functions
-            List<SelectItem> functionItems = getFunctionSelectItems(selectItems);
-
-            // Loop through the dataset and identify groups
-            while (dataSet.next()) {
-                Row row = dataSet.getRow();
-
-                // Subselect a row prototype with only the unique values that
-                // define the group
-                Row uniqueRow = row.getSubSelection(groupByHeader);
-
-                // function input is the values used for calculating aggregate
-                // functions in the group
-                Map<SelectItem, List<Object>> functionInput;
-                if (!uniqueRows.containsKey(uniqueRow)) {
-                    // If this group already exist, use an existing function
-                    // input
-                    functionInput = new HashMap<SelectItem, List<Object>>();
-                    for (SelectItem item : functionItems) {
-                        functionInput.put(item, new ArrayList<Object>());
-                    }
-                    uniqueRows.put(uniqueRow, functionInput);
-                } else {
-                    // If this is a new group, create a new function input
-                    functionInput = uniqueRows.get(uniqueRow);
-                }
-
-                // Loop through aggregate functions to check for validity
-                for (SelectItem item : functionItems) {
-                    List<Object> objects = functionInput.get(item);
-                    Column column = item.getColumn();
-                    if (column != null) {
-                        Object value = row.getValue(new SelectItem(column));
-                        objects.add(value);
-                    } else if (SelectItem.isCountAllItem(item)) {
-                        // Just use the empty string, since COUNT(*) don't
-                        // evaluate values (but null values should be prevented)
-                        objects.add("");
-                    } else {
-                        throw new IllegalArgumentException("Expression function not supported: " + item);
-                    }
-                }
-            }
-
-            dataSet.close();
-            final List<Row> resultData = new ArrayList<Row>();
-            final DataSetHeader resultHeader = new CachingDataSetHeader(selectItems);
-
-            // Loop through the groups to generate aggregates
-            for (Entry<Row, Map<SelectItem, List<Object>>> entry : uniqueRows.entrySet()) {
-                Row row = entry.getKey();
-                Map<SelectItem, List<Object>> functionInput = entry.getValue();
-                Object[] resultRow = new Object[selectItems.size()];
-                // Loop through select items to generate a row
-                int i = 0;
-                for (SelectItem item : selectItems) {
-                    int uniqueRowIndex = row.indexOf(item);
-                    if (uniqueRowIndex != -1) {
-                        // If there's already a value for the select item in the
-                        // row, keep it (it's one of the grouped by columns)
-                        resultRow[i] = row.getValue(uniqueRowIndex);
-                    } else {
-                        // Use the function input to calculate the aggregate
-                        // value
-                        List<Object> objects = functionInput.get(item);
-                        if (objects != null) {
-                            Object functionResult = item.getFunction().evaluate(objects.toArray());
-                            resultRow[i] = functionResult;
-                        } else {
-                            if (item.getFunction() != null) {
-                                logger.error("No function input found for SelectItem: {}", item);
-                            }
-                        }
-                    }
-                    i++;
-                }
-                resultData.add(new DefaultRow(resultHeader, resultRow, null));
-            }
-
-            if (resultData.isEmpty()) {
-                result = new EmptyDataSet(selectItems);
-            } else {
-                result = new InMemoryDataSet(resultHeader, resultData);
-            }
-        }
-        result = getSelection(selectItems, result);
-        return result;
-    }
-
-    /**
-     * Applies aggregate values to a dataset. This method is to be invoked AFTER
-     * any filters have been applied.
-     * 
-     * @param workSelectItems
-     *            all select items included in the processing of the query
-     *            (including those originating from other clauses than the
-     *            SELECT clause).
-     * @param dataSet
-     * @return
-     */
-    public static DataSet getAggregated(List<SelectItem> workSelectItems, DataSet dataSet) {
-        final List<SelectItem> functionItems = getFunctionSelectItems(workSelectItems);
-        if (functionItems.isEmpty()) {
-            return dataSet;
-        }
-
-        final Map<SelectItem, AggregateBuilder<?>> aggregateBuilders = new HashMap<SelectItem, AggregateBuilder<?>>();
-        for (SelectItem item : functionItems) {
-            aggregateBuilders.put(item, item.getFunction().build());
-        }
-
-        final DataSetHeader header;
-        final boolean onlyAggregates;
-        if (functionItems.size() != workSelectItems.size()) {
-            onlyAggregates = false;
-            header = new CachingDataSetHeader(workSelectItems);
-        } else {
-            onlyAggregates = true;
-            header = new SimpleDataSetHeader(workSelectItems);
-        }
-
-        final List<Row> resultRows = new ArrayList<Row>();
-        while (dataSet.next()) {
-            final Row inputRow = dataSet.getRow();
-            for (SelectItem item : functionItems) {
-                final AggregateBuilder<?> aggregateBuilder = aggregateBuilders.get(item);
-                final Column column = item.getColumn();
-                if (column != null) {
-                    Object value = inputRow.getValue(new SelectItem(column));
-                    aggregateBuilder.add(value);
-                } else if (SelectItem.isCountAllItem(item)) {
-                    // Just use the empty string, since COUNT(*) don't
-                    // evaluate values (but null values should be prevented)
-                    aggregateBuilder.add("");
-                } else {
-                    throw new IllegalArgumentException("Expression function not supported: " + item);
-                }
-            }
-
-            // If the result should also contain non-aggregated values, we
-            // will keep those in the rows list
-            if (!onlyAggregates) {
-                final Object[] values = new Object[header.size()];
-                for (int i = 0; i < header.size(); i++) {
-                    final Object value = inputRow.getValue(header.getSelectItem(i));
-                    if (value != null) {
-                        values[i] = value;
-                    }
-                }
-                resultRows.add(new DefaultRow(header, values));
-            }
-        }
-        dataSet.close();
-
-        // Collect the aggregates
-        Map<SelectItem, Object> functionResult = new HashMap<SelectItem, Object>();
-        for (SelectItem item : functionItems) {
-            AggregateBuilder<?> aggregateBuilder = aggregateBuilders.get(item);
-            Object result = aggregateBuilder.getAggregate();
-            functionResult.put(item, result);
-        }
-
-        // if there are no result rows (no matching records at all), we still
-        // need to return a record with the aggregates
-        final boolean noResultRows = resultRows.isEmpty();
-
-        if (onlyAggregates || noResultRows) {
-            // We will only create a single row with all the aggregates
-            Object[] values = new Object[header.size()];
-            for (int i = 0; i < header.size(); i++) {
-                values[i] = functionResult.get(header.getSelectItem(i));
-            }
-            Row row = new DefaultRow(header, values);
-            resultRows.add(row);
-        } else {
-            // We will create the aggregates as well as regular values
-            for (int i = 0; i < resultRows.size(); i++) {
-                Row row = resultRows.get(i);
-                Object[] values = row.getValues();
-                for (Entry<SelectItem, Object> entry : functionResult.entrySet()) {
-                    SelectItem item = entry.getKey();
-                    int itemIndex = row.indexOf(item);
-                    if (itemIndex != -1) {
-                        Object value = entry.getValue();
-                        values[itemIndex] = value;
-                    }
-                }
-                resultRows.set(i, new DefaultRow(header, values));
-            }
-        }
-
-        return new InMemoryDataSet(header, resultRows);
-    }
-
-    public static List<SelectItem> getFunctionSelectItems(Iterable<SelectItem> selectItems) {
-        return CollectionUtils.filter(selectItems, new Predicate<SelectItem>() {
-            @Override
-            public Boolean eval(SelectItem arg) {
-                return arg.getFunction() != null;
-            }
-        });
-    }
-
-    public static DataSet getOrdered(DataSet dataSet, List<OrderByItem> orderByItems) {
-        return getOrdered(dataSet, orderByItems.toArray(new OrderByItem[orderByItems.size()]));
-    }
-
-    public static DataSet getOrdered(DataSet dataSet, final OrderByItem... orderByItems) {
-        if (orderByItems != null && orderByItems.length != 0) {
-            final int[] sortIndexes = new int[orderByItems.length];
-            for (int i = 0; i < orderByItems.length; i++) {
-                OrderByItem item = orderByItems[i];
-                int indexOf = dataSet.indexOf(item.getSelectItem());
-                sortIndexes[i] = indexOf;
-            }
-
-            final List<Row> data = readDataSetFull(dataSet);
-            if (data.isEmpty()) {
-                return new EmptyDataSet(dataSet.getSelectItems());
-            }
-
-            final Comparator<Object> valueComparator = ObjectComparator.getComparator();
-
-            // create a comparator for doing the actual sorting/ordering
-            final Comparator<Row> comparator = new Comparator<Row>() {
-                public int compare(Row o1, Row o2) {
-                    for (int i = 0; i < sortIndexes.length; i++) {
-                        int sortIndex = sortIndexes[i];
-                        Object sortObj1 = o1.getValue(sortIndex);
-                        Object sortObj2 = o2.getValue(sortIndex);
-                        int compare = valueComparator.compare(sortObj1, sortObj2);
-                        if (compare != 0) {
-                            OrderByItem orderByItem = orderByItems[i];
-                            boolean ascending = orderByItem.isAscending();
-                            if (ascending) {
-                                return compare;
-                            } else {
-                                return compare * -1;
-                            }
-                        }
-                    }
-                    return 0;
-                }
-            };
-
-            Collections.sort(data, comparator);
-
-            dataSet = new InMemoryDataSet(data);
-        }
-        return dataSet;
-    }
-
-    public static List<Row> readDataSetFull(DataSet dataSet) {
-        final List<Row> result;
-        if (dataSet instanceof InMemoryDataSet) {
-            // if dataset is an in memory dataset we have a shortcut to avoid
-            // creating a new list
-            result = ((InMemoryDataSet) dataSet).getRows();
-        } else {
-            result = new ArrayList<Row>();
-            while (dataSet.next()) {
-                result.add(dataSet.getRow());
-            }
-        }
-        dataSet.close();
-        return result;
-    }
-
-    /**
-     * Examines a query and extracts an array of FromItem's that refer
-     * (directly) to tables (hence Joined FromItems and SubQuery FromItems are
-     * traversed but not included).
-     * 
-     * @param q
-     *            the query to examine
-     * @return an array of FromItem's that refer directly to tables
-     */
-    public static FromItem[] getTableFromItems(Query q) {
-        List<FromItem> result = new ArrayList<FromItem>();
-        List<FromItem> items = q.getFromClause().getItems();
-        for (FromItem item : items) {
-            result.addAll(getTableFromItems(item));
-        }
-        return result.toArray(new FromItem[result.size()]);
-    }
-
-    public static List<FromItem> getTableFromItems(FromItem item) {
-        List<FromItem> result = new ArrayList<FromItem>();
-        if (item.getTable() != null) {
-            result.add(item);
-        } else if (item.getSubQuery() != null) {
-            FromItem[] sqItems = getTableFromItems(item.getSubQuery());
-            for (int i = 0; i < sqItems.length; i++) {
-                result.add(sqItems[i]);
-            }
-        } else if (item.getJoin() != null) {
-            FromItem leftSide = item.getLeftSide();
-            result.addAll(getTableFromItems(leftSide));
-            FromItem rightSide = item.getRightSide();
-            result.addAll(getTableFromItems(rightSide));
-        } else {
-            throw new IllegalStateException("FromItem was neither of Table type, SubQuery type or Join type: " + item);
-        }
-        return result;
-    }
-
-    /**
-     * Executes a single row query, like "SELECT COUNT(*), MAX(SOME_COLUMN) FROM
-     * MY_TABLE" or similar.
-     * 
-     * @param dataContext
-     *            the DataContext object to use for executing the query
-     * @param query
-     *            the query to execute
-     * @return a row object representing the single row returned from the query
-     * @throws MetaModelException
-     *             if less or more than one Row is returned from the query
-     */
-    public static Row executeSingleRowQuery(DataContext dataContext, Query query) throws MetaModelException {
-        DataSet dataSet = dataContext.executeQuery(query);
-        boolean next = dataSet.next();
-        if (!next) {
-            throw new MetaModelException("No rows returned from query: " + query);
-        }
-        Row row = dataSet.getRow();
-        next = dataSet.next();
-        if (next) {
-            throw new MetaModelException("More than one row returned from query: " + query);
-        }
-        dataSet.close();
-        return row;
-    }
-
-    /**
-     * Performs a left join (aka left outer join) operation on two datasets.
-     * 
-     * @param ds1
-     *            the left dataset
-     * @param ds2
-     *            the right dataset
-     * @param onConditions
-     *            the conditions to join by
-     * @return the left joined result dataset
-     */
-    public static DataSet getLeftJoin(DataSet ds1, DataSet ds2, FilterItem[] onConditions) {
-        if (ds1 == null) {
-            throw new IllegalArgumentException("Left DataSet cannot be null");
-        }
-        if (ds2 == null) {
-            throw new IllegalArgumentException("Right DataSet cannot be null");
-        }
-        SelectItem[] si1 = ds1.getSelectItems();
-        SelectItem[] si2 = ds2.getSelectItems();
-        SelectItem[] selectItems = new SelectItem[si1.length + si2.length];
-        System.arraycopy(si1, 0, selectItems, 0, si1.length);
-        System.arraycopy(si2, 0, selectItems, si1.length, si2.length);
-
-        List<Row> resultRows = new ArrayList<Row>();
-        List<Row> ds2data = readDataSetFull(ds2);
-        if (ds2data.isEmpty()) {
-            // no need to join, simply return a new view (with null values) on
-            // the previous dataset.
-            return getSelection(selectItems, ds1);
-        }
-
-        final DataSetHeader header = new CachingDataSetHeader(selectItems);
-
-        while (ds1.next()) {
-
-            // Construct a single-row dataset for making a carthesian product
-            // against ds2
-            Row ds1row = ds1.getRow();
-            List<Row> ds1rows = new ArrayList<Row>();
-            ds1rows.add(ds1row);
-
-            DataSet carthesianProduct = getCarthesianProduct(new DataSet[] {
-                    new InMemoryDataSet(new CachingDataSetHeader(si1), ds1rows),
-                    new InMemoryDataSet(new CachingDataSetHeader(si2), ds2data) }, onConditions);
-            List<Row> carthesianRows = readDataSetFull(carthesianProduct);
-            if (carthesianRows.size() > 0) {
-                resultRows.addAll(carthesianRows);
-            } else {
-                Object[] values = ds1row.getValues();
-                Object[] row = new Object[selectItems.length];
-                System.arraycopy(values, 0, row, 0, values.length);
-                resultRows.add(new DefaultRow(header, row));
-            }
-        }
-        ds1.close();
-
-        if (resultRows.isEmpty()) {
-            return new EmptyDataSet(selectItems);
-        }
-
-        return new InMemoryDataSet(header, resultRows);
-    }
-
-    /**
-     * Performs a right join (aka right outer join) operation on two datasets.
-     * 
-     * @param ds1
-     *            the left dataset
-     * @param ds2
-     *            the right dataset
-     * @param onConditions
-     *            the conditions to join by
-     * @return the right joined result dataset
-     */
-    public static DataSet getRightJoin(DataSet ds1, DataSet ds2, FilterItem[] onConditions) {
-        SelectItem[] ds1selects = ds1.getSelectItems();
-        SelectItem[] ds2selects = ds2.getSelectItems();
-        SelectItem[] leftOrderedSelects = new SelectItem[ds1selects.length + ds2selects.length];
-        System.arraycopy(ds1selects, 0, leftOrderedSelects, 0, ds1selects.length);
-        System.arraycopy(ds2selects, 0, leftOrderedSelects, ds1selects.length, ds2selects.length);
-
-        // We will reuse the left join algorithm (but switch the datasets
-        // around)
-        DataSet dataSet = getLeftJoin(ds2, ds1, onConditions);
-
-        dataSet = getSelection(leftOrderedSelects, dataSet);
-        return dataSet;
-    }
-
-    public static SelectItem[] createSelectItems(Column... columns) {
-        SelectItem[] items = new SelectItem[columns.length];
-        for (int i = 0; i < items.length; i++) {
-            items[i] = new SelectItem(columns[i]);
-        }
-        return items;
-    }
-
-    public static DataSet getDistinct(DataSet dataSet) {
-        SelectItem[] selectItems = dataSet.getSelectItems();
-        GroupByItem[] groupByItems = new GroupByItem[selectItems.length];
-        for (int i = 0; i < groupByItems.length; i++) {
-            groupByItems[i] = new GroupByItem(selectItems[i]);
-        }
-        return getGrouped(Arrays.asList(selectItems), dataSet, groupByItems);
-    }
-
-    public static Table[] getTables(Column[] columns) {
-        return getTables(Arrays.asList(columns));
-    }
-
-    public static Column[] getColumnsByType(Column[] columns, final ColumnType columnType) {
-        return CollectionUtils.filter(columns, new Predicate<Column>() {
-            @Override
-            public Boolean eval(Column column) {
-                return column.getType() == columnType;
-            }
-        }).toArray(new Column[0]);
-    }
-
-    public static Column[] getColumnsBySuperType(Column[] columns, final SuperColumnType superColumnType) {
-        return CollectionUtils.filter(columns, new Predicate<Column>() {
-            @Override
-            public Boolean eval(Column column) {
-                return column.getType().getSuperType() == superColumnType;
-            }
-        }).toArray(new Column[0]);
-    }
-
-    public static Query parseQuery(DataContext dc, String queryString) {
-        final QueryParser parser = new QueryParser(dc, queryString);
-        return parser.parse();
-    }
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metamodel/blob/e2e2b37a/core/src/main/java/org/eobjects/metamodel/QueryPostprocessDataContext.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/eobjects/metamodel/QueryPostprocessDataContext.java b/core/src/main/java/org/eobjects/metamodel/QueryPostprocessDataContext.java
deleted file mode 100644
index d84da6a..0000000
--- a/core/src/main/java/org/eobjects/metamodel/QueryPostprocessDataContext.java
+++ /dev/null
@@ -1,589 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.eobjects.metamodel;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.eobjects.metamodel.convert.ConvertedDataSetInterceptor;
-import org.eobjects.metamodel.convert.Converters;
-import org.eobjects.metamodel.convert.HasReadTypeConverters;
-import org.eobjects.metamodel.convert.TypeConverter;
-import org.eobjects.metamodel.data.DataSet;
-import org.eobjects.metamodel.data.DataSetHeader;
-import org.eobjects.metamodel.data.DefaultRow;
-import org.eobjects.metamodel.data.EmptyDataSet;
-import org.eobjects.metamodel.data.FirstRowDataSet;
-import org.eobjects.metamodel.data.InMemoryDataSet;
-import org.eobjects.metamodel.data.MaxRowsDataSet;
-import org.eobjects.metamodel.data.Row;
-import org.eobjects.metamodel.data.SimpleDataSetHeader;
-import org.eobjects.metamodel.query.FilterItem;
-import org.eobjects.metamodel.query.FromItem;
-import org.eobjects.metamodel.query.GroupByItem;
-import org.eobjects.metamodel.query.JoinType;
-import org.eobjects.metamodel.query.OperatorType;
-import org.eobjects.metamodel.query.OrderByItem;
-import org.eobjects.metamodel.query.Query;
-import org.eobjects.metamodel.query.SelectItem;
-import org.eobjects.metamodel.schema.Column;
-import org.eobjects.metamodel.schema.ColumnType;
-import org.eobjects.metamodel.schema.MutableColumn;
-import org.eobjects.metamodel.schema.MutableRelationship;
-import org.eobjects.metamodel.schema.MutableSchema;
-import org.eobjects.metamodel.schema.MutableTable;
-import org.eobjects.metamodel.schema.Relationship;
-import org.eobjects.metamodel.schema.Schema;
-import org.eobjects.metamodel.schema.Table;
-import org.eobjects.metamodel.schema.TableType;
-import org.eobjects.metamodel.util.CollectionUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Abstract DataContext for data sources that do not support SQL queries
- * natively.
- * 
- * Instead this superclass only requires that a subclass can materialize a
- * single table at a time. Then the query will be executed by post processing
- * the datasets client-side.
- */
-public abstract class QueryPostprocessDataContext extends AbstractDataContext implements HasReadTypeConverters {
-
-    private static final Logger logger = LoggerFactory.getLogger(QueryPostprocessDataContext.class);
-
-    public static final String INFORMATION_SCHEMA_NAME = "information_schema";
-
-    private final Map<Column, TypeConverter<?, ?>> _converters;
-
-    private Schema _mainSchema;
-
-    public QueryPostprocessDataContext() {
-        super();
-        _converters = new HashMap<Column, TypeConverter<?, ?>>();
-    }
-
-    @Override
-    public DataSet executeQuery(final Query query) {
-        final List<SelectItem> selectItems = query.getSelectClause().getItems();
-        final List<FromItem> fromItems = query.getFromClause().getItems();
-        final List<FilterItem> whereItems = query.getWhereClause().getItems();
-        final List<SelectItem> whereSelectItems = query.getWhereClause().getEvaluatedSelectItems();
-        final List<GroupByItem> groupByItems = query.getGroupByClause().getItems();
-        final List<SelectItem> groupBySelectItems = query.getGroupByClause().getEvaluatedSelectItems();
-        final List<SelectItem> havingSelectItems = query.getHavingClause().getEvaluatedSelectItems();
-        final List<SelectItem> orderBySelectItems = query.getOrderByClause().getEvaluatedSelectItems();
-
-        final List<FilterItem> havingItems = query.getHavingClause().getItems();
-        final List<OrderByItem> orderByItems = query.getOrderByClause().getItems();
-
-        // check for approximate SELECT COUNT(*) queries
-        if (fromItems.size() == 1 && selectItems.size() == 1 && groupByItems.isEmpty() && havingItems.isEmpty()) {
-            final SelectItem selectItem = query.getSelectClause().getItem(0);
-            if (SelectItem.isCountAllItem(selectItem)) {
-                final boolean functionApproximationAllowed = selectItem.isFunctionApproximationAllowed();
-                final FromItem fromItem = query.getFromClause().getItem(0);
-                final Table table = fromItem.getTable();
-                if (table != null) {
-                    if (isMainSchemaTable(table)) {
-                        logger.debug("Query is a COUNT query with {} where items. Trying executeCountQuery(...)",
-                                whereItems.size());
-                        final Number count = executeCountQuery(table, whereItems, functionApproximationAllowed);
-                        if (count == null) {
-                            logger.debug("DataContext did not return any count query results. Proceeding with manual counting.");
-                        } else {
-                            List<Row> data = new ArrayList<Row>(1);
-                            final DataSetHeader header = new SimpleDataSetHeader(new SelectItem[] { selectItem });
-                            data.add(new DefaultRow(header, new Object[] { count }));
-                            return new InMemoryDataSet(header, data);
-                        }
-                    }
-                }
-            }
-        }
-
-        final int firstRow = (query.getFirstRow() == null ? 1 : query.getFirstRow());
-        final int maxRows = (query.getMaxRows() == null ? -1 : query.getMaxRows());
-
-        // Check for very simple queries with max rows property set (typically
-        // preview), see Ticket #187
-        previewTable: if (whereItems.isEmpty() && groupByItems.isEmpty() && havingItems.isEmpty()
-                && orderByItems.isEmpty() && fromItems.size() == 1) {
-
-            final Table table = fromItems.get(0).getTable();
-            if (table != null) {
-                for (SelectItem item : selectItems) {
-                    if (item.getFunction() != null || item.getExpression() != null) {
-                        break previewTable;
-                    }
-                }
-
-                DataSet dataSet = materializeTable(table, selectItems, firstRow, maxRows);
-                dataSet = MetaModelHelper.getSelection(selectItems, dataSet);
-                return dataSet;
-            }
-        }
-
-        // Creates a list for all select items that are needed to execute query
-        // (some may only be used as part of a filter, but not shown in result)
-        List<SelectItem> workSelectItems = CollectionUtils.concat(true, selectItems, whereSelectItems,
-                groupBySelectItems, havingSelectItems, orderBySelectItems);
-
-        // Materialize the tables in the from clause
-        final DataSet[] fromDataSets = new DataSet[fromItems.size()];
-        for (int i = 0; i < fromDataSets.length; i++) {
-            FromItem fromItem = fromItems.get(i);
-            fromDataSets[i] = materializeFromItem(fromItem, workSelectItems);
-        }
-
-        // Execute the query using the raw data
-        DataSet dataSet = MetaModelHelper.getCarthesianProduct(fromDataSets, whereItems);
-
-        // we can now exclude the select items imposed by the WHERE clause (and
-        // should, to make the aggregation process faster)
-        workSelectItems = CollectionUtils.concat(true, selectItems, groupBySelectItems, havingSelectItems,
-                orderBySelectItems);
-
-        if (groupByItems.size() > 0) {
-            dataSet = MetaModelHelper.getGrouped(workSelectItems, dataSet, groupByItems);
-        } else {
-            dataSet = MetaModelHelper.getAggregated(workSelectItems, dataSet);
-        }
-        dataSet = MetaModelHelper.getFiltered(dataSet, havingItems);
-
-        if (query.getSelectClause().isDistinct()) {
-            dataSet = MetaModelHelper.getSelection(selectItems, dataSet);
-            dataSet = MetaModelHelper.getDistinct(dataSet);
-            dataSet = MetaModelHelper.getOrdered(dataSet, orderByItems);
-        } else {
-            dataSet = MetaModelHelper.getOrdered(dataSet, orderByItems);
-            dataSet = MetaModelHelper.getSelection(selectItems, dataSet);
-        }
-
-        if (firstRow > 1) {
-            dataSet = new FirstRowDataSet(dataSet, firstRow);
-        }
-        if (maxRows != -1) {
-            dataSet = new MaxRowsDataSet(dataSet, maxRows);
-        }
-        return dataSet;
-    }
-
-    /**
-     * Executes a simple count query, if possible. This method is provided to
-     * allow subclasses to optimize count queries since they are quite common
-     * and often a datastore can retrieve the count using some specialized means
-     * which is much more performant than counting all records manually.
-     * 
-     * @param table
-     *            the table on which the count is requested.
-     * @param whereItems
-     *            a (sometimes empty) list of WHERE items.
-     * @param functionApproximationAllowed
-     *            whether approximation is allowed or not.
-     * @return the count of the particular table, or null if not available.
-     */
-    protected Number executeCountQuery(Table table, List<FilterItem> whereItems, boolean functionApproximationAllowed) {
-        return null;
-    }
-
-    protected DataSet materializeFromItem(final FromItem fromItem, final List<SelectItem> selectItems) {
-        DataSet dataSet;
-        JoinType joinType = fromItem.getJoin();
-        if (fromItem.getTable() != null) {
-            // We need to materialize a single table
-            final Table table = fromItem.getTable();
-            final List<SelectItem> selectItemsToMaterialize = new ArrayList<SelectItem>();
-
-            for (final SelectItem selectItem : selectItems) {
-                final FromItem selectedFromItem = selectItem.getFromItem();
-                if (selectedFromItem != null) {
-                    if (selectedFromItem.equals(fromItem)) {
-                        selectItemsToMaterialize.add(selectItem.replaceFunction(null));
-                    }
-                } else {
-                    // the select item does not specify a specific
-                    // from-item
-                    final Column selectedColumn = selectItem.getColumn();
-                    if (selectedColumn != null) {
-                        // we assume that if the table matches, we will use the
-                        // column
-                        if (selectedColumn.getTable() != null && selectedColumn.getTable().equals(table)) {
-                            selectItemsToMaterialize.add(selectItem.replaceFunction(null));
-                        }
-                    }
-                }
-            }
-
-            if (logger.isDebugEnabled()) {
-                logger.debug("calling materializeTable(" + table.getName() + "," + selectItemsToMaterialize + ",1,-1");
-            }
-
-            // Dispatching to the concrete subclass of
-            // QueryPostprocessDataContextStrategy
-            dataSet = materializeTable(table, selectItemsToMaterialize, 1, -1);
-
-        } else if (joinType != null) {
-            // We need to (recursively) materialize a joined FromItem
-            if (fromItem.getLeftSide() == null || fromItem.getRightSide() == null) {
-                throw new IllegalArgumentException("Joined FromItem requires both left and right side: " + fromItem);
-            }
-            DataSet[] fromItemDataSets = new DataSet[2];
-
-            // materialize left side
-            List<SelectItem> leftOn = Arrays.asList(fromItem.getLeftOn());
-            fromItemDataSets[0] = materializeFromItem(fromItem.getLeftSide(),
-                    CollectionUtils.concat(true, selectItems, leftOn));
-
-            // materialize right side
-            List<SelectItem> rightOn = Arrays.asList(fromItem.getRightOn());
-            fromItemDataSets[1] = materializeFromItem(fromItem.getRightSide(),
-                    CollectionUtils.concat(true, selectItems, rightOn));
-
-            FilterItem[] onConditions = new FilterItem[leftOn.size()];
-            for (int i = 0; i < onConditions.length; i++) {
-                FilterItem whereItem = new FilterItem(leftOn.get(i), OperatorType.EQUALS_TO, rightOn.get(i));
-                onConditions[i] = whereItem;
-            }
-            if (joinType == JoinType.INNER) {
-                dataSet = MetaModelHelper.getCarthesianProduct(fromItemDataSets, onConditions);
-            } else if (joinType == JoinType.LEFT) {
-                dataSet = MetaModelHelper.getLeftJoin(fromItemDataSets[0], fromItemDataSets[1], onConditions);
-            } else if (joinType == JoinType.RIGHT) {
-                dataSet = MetaModelHelper.getRightJoin(fromItemDataSets[0], fromItemDataSets[1], onConditions);
-            } else {
-                throw new IllegalArgumentException("FromItem type not supported: " + fromItem);
-            }
-        } else if (fromItem.getSubQuery() != null) {
-            // We need to (recursively) materialize a subquery
-            dataSet = executeQuery(fromItem.getSubQuery());
-        } else {
-            throw new IllegalArgumentException("FromItem type not supported: " + fromItem);
-        }
-        if (dataSet == null) {
-            throw new IllegalStateException("FromItem was not succesfully materialized: " + fromItem);
-        }
-        return dataSet;
-    }
-
-    protected DataSet materializeTable(final Table table, final List<SelectItem> selectItems, final int firstRow,
-            final int maxRows) {
-        if (table == null) {
-            throw new IllegalArgumentException("Table cannot be null");
-        }
-
-        if (selectItems == null || selectItems.isEmpty()) {
-            // add any column (typically this occurs because of COUNT(*)
-            // queries)
-            Column[] columns = table.getColumns();
-            if (columns.length == 0) {
-                logger.warn("Queried table has no columns: {}", table);
-            } else {
-                selectItems.add(new SelectItem(columns[0]));
-            }
-        }
-
-        if (maxRows == 0) {
-            return new EmptyDataSet(selectItems);
-        }
-
-        final Schema schema = table.getSchema();
-        final String schemaName;
-        if (schema == null) {
-            schemaName = null;
-        } else {
-            schemaName = schema.getName();
-        }
-
-        final DataSet dataSet;
-        if (INFORMATION_SCHEMA_NAME.equals(schemaName)) {
-            final DataSet informationDataSet = materializeInformationSchemaTable(table, selectItems, maxRows);
-            if (firstRow > 1) {
-                dataSet = new FirstRowDataSet(informationDataSet, firstRow);
-            } else {
-                dataSet = informationDataSet;
-            }
-        } else {
-            final DataSet tableDataSet = materializeMainSchemaTable(table, selectItems, firstRow, maxRows);
-
-            // conversion is done at materialization time, since it enables
-            // the refined types to be used also in eg. where clauses.
-            dataSet = new ConvertedDataSetInterceptor(_converters).intercept(tableDataSet);
-        }
-
-        return dataSet;
-    }
-
-    protected boolean isMainSchemaTable(Table table) {
-        Schema schema = table.getSchema();
-        if (INFORMATION_SCHEMA_NAME.equals(schema.getName())) {
-            return false;
-        } else {
-            return true;
-        }
-    }
-
-    @Override
-    protected final String[] getSchemaNamesInternal() throws MetaModelException {
-        final String[] schemaNames = new String[2];
-        schemaNames[0] = INFORMATION_SCHEMA_NAME;
-        schemaNames[1] = getMainSchemaName();
-        return schemaNames;
-    }
-
-    @Override
-    protected String getDefaultSchemaName() throws MetaModelException {
-        return getMainSchemaName();
-    }
-
-    @Override
-    protected final Schema getSchemaByNameInternal(final String name) throws MetaModelException {
-        final String mainSchemaName = getMainSchemaName();
-        if (name == null) {
-            if (mainSchemaName == null) {
-                return getMainSchema();
-            }
-            return null;
-        }
-
-        if (name.equalsIgnoreCase(mainSchemaName)) {
-            return getMainSchemaInternal();
-        } else if (name.equals(INFORMATION_SCHEMA_NAME)) {
-            return getInformationSchema();
-        }
-
-        logger.warn("Could not find matching schema of name '{}'. Main schema name is: '{}'. Returning null.", name,
-                mainSchemaName);
-        return null;
-    }
-
-    private Schema getInformationSchema() {
-        // Create schema
-        MutableSchema informationSchema = new MutableSchema(INFORMATION_SCHEMA_NAME);
-        MutableTable tablesTable = new MutableTable("tables", TableType.TABLE, informationSchema);
-        MutableTable columnsTable = new MutableTable("columns", TableType.TABLE, informationSchema);
-        MutableTable relationshipsTable = new MutableTable("relationships", TableType.TABLE, informationSchema);
-        informationSchema.addTable(tablesTable).addTable(columnsTable).addTable(relationshipsTable);
-
-        // Create "tables" table: name, type, num_columns, remarks
-        tablesTable.addColumn(new MutableColumn("name", ColumnType.VARCHAR, tablesTable, 0, false));
-        tablesTable.addColumn(new MutableColumn("type", ColumnType.VARCHAR, tablesTable, 1, true));
-        tablesTable.addColumn(new MutableColumn("num_columns", ColumnType.INTEGER, tablesTable, 2, true));
-        tablesTable.addColumn(new MutableColumn("remarks", ColumnType.VARCHAR, tablesTable, 3, true));
-
-        // Create "columns" table: name, type, native_type, size, nullable,
-        // indexed, table, remarks
-        columnsTable.addColumn(new MutableColumn("name", ColumnType.VARCHAR, columnsTable, 0, false));
-        columnsTable.addColumn(new MutableColumn("type", ColumnType.VARCHAR, columnsTable, 1, true));
-        columnsTable.addColumn(new MutableColumn("native_type", ColumnType.VARCHAR, columnsTable, 2, true));
-        columnsTable.addColumn(new MutableColumn("size", ColumnType.INTEGER, columnsTable, 3, true));
-        columnsTable.addColumn(new MutableColumn("nullable", ColumnType.BOOLEAN, columnsTable, 4, true));
-        columnsTable.addColumn(new MutableColumn("indexed", ColumnType.BOOLEAN, columnsTable, 5, true));
-        columnsTable.addColumn(new MutableColumn("table", ColumnType.VARCHAR, columnsTable, 6, false));
-        columnsTable.addColumn(new MutableColumn("remarks", ColumnType.VARCHAR, columnsTable, 7, true));
-
-        // Create "relationships" table: primary_table, primary_column,
-        // foreign_table, foreign_column
-        relationshipsTable.addColumn(new MutableColumn("primary_table", ColumnType.VARCHAR, relationshipsTable, 0,
-                false));
-        relationshipsTable.addColumn(new MutableColumn("primary_column", ColumnType.VARCHAR, relationshipsTable, 1,
-                false));
-        relationshipsTable.addColumn(new MutableColumn("foreign_table", ColumnType.VARCHAR, relationshipsTable, 2,
-                false));
-        relationshipsTable.addColumn(new MutableColumn("foreign_column", ColumnType.VARCHAR, relationshipsTable, 3,
-                false));
-
-        MutableRelationship.createRelationship(tablesTable.getColumnByName("name"),
-                columnsTable.getColumnByName("table"));
-        MutableRelationship.createRelationship(tablesTable.getColumnByName("name"),
-                relationshipsTable.getColumnByName("primary_table"));
-        MutableRelationship.createRelationship(tablesTable.getColumnByName("name"),
-                relationshipsTable.getColumnByName("foreign_table"));
-        MutableRelationship.createRelationship(columnsTable.getColumnByName("name"),
-                relationshipsTable.getColumnByName("primary_column"));
-        MutableRelationship.createRelationship(columnsTable.getColumnByName("name"),
-                relationshipsTable.getColumnByName("foreign_column"));
-
-        return informationSchema;
-    }
-
-    private DataSet materializeInformationSchemaTable(final Table table, final List<SelectItem> selectItems,
-            final int maxRows) {
-        final String tableName = table.getName();
-        final SelectItem[] columnSelectItems = MetaModelHelper.createSelectItems(table.getColumns());
-        final SimpleDataSetHeader header = new SimpleDataSetHeader(columnSelectItems);
-        final Table[] tables = getMainSchemaInternal().getTables();
-        final List<Row> data = new ArrayList<Row>();
-        if ("tables".equals(tableName)) {
-            // "tables" columns: name, type, num_columns, remarks
-            for (Table t : tables) {
-                String typeString = null;
-                if (t.getType() != null) {
-                    typeString = t.getType().toString();
-                }
-                data.add(new DefaultRow(header, new Object[] { t.getName(), typeString, t.getColumnCount(),
-                        t.getRemarks() }));
-            }
-        } else if ("columns".equals(tableName)) {
-            // "columns" columns: name, type, native_type, size, nullable,
-            // indexed, table, remarks
-            for (Table t : tables) {
-                for (Column c : t.getColumns()) {
-                    String typeString = null;
-                    if (t.getType() != null) {
-                        typeString = c.getType().toString();
-                    }
-                    data.add(new DefaultRow(header, new Object[] { c.getName(), typeString, c.getNativeType(),
-                            c.getColumnSize(), c.isNullable(), c.isIndexed(), t.getName(), c.getRemarks() }));
-                }
-            }
-        } else if ("relationships".equals(tableName)) {
-            // "relationships" columns: primary_table, primary_column,
-            // foreign_table, foreign_column
-            for (Relationship r : getMainSchemaInternal().getRelationships()) {
-                Column[] primaryColumns = r.getPrimaryColumns();
-                Column[] foreignColumns = r.getForeignColumns();
-                Table pTable = r.getPrimaryTable();
-                Table fTable = r.getForeignTable();
-                for (int i = 0; i < primaryColumns.length; i++) {
-                    Column pColumn = primaryColumns[i];
-                    Column fColumn = foreignColumns[i];
-                    data.add(new DefaultRow(header, new Object[] { pTable.getName(), pColumn.getName(),
-                            fTable.getName(), fColumn.getName() }));
-                }
-            }
-        } else {
-            throw new IllegalArgumentException("Cannot materialize non information_schema table: " + table);
-        }
-
-        DataSet dataSet;
-        if (data.isEmpty()) {
-            dataSet = new EmptyDataSet(selectItems);
-        } else {
-            dataSet = new InMemoryDataSet(header, data);
-        }
-
-        // Handle column subset
-        dataSet = MetaModelHelper.getSelection(selectItems, dataSet);
-
-        // Handle maxRows
-        if (maxRows != -1) {
-            dataSet = new MaxRowsDataSet(dataSet, maxRows);
-        }
-        return dataSet;
-    }
-
-    protected Schema getMainSchemaInternal() {
-        Schema schema = _mainSchema;
-        if (schema == null) {
-            schema = getMainSchema();
-            _mainSchema = schema;
-        }
-        return schema;
-    }
-
-    /**
-     * Adds a {@link TypeConverter} to this DataContext's query engine (Query
-     * Postprocessor) for read operations. Note that this method should NOT be
-     * invoked directly by consuming code. Rather use
-     * {@link Converters#addTypeConverter(DataContext, Column, TypeConverter)}
-     * to ensure conversion on both reads and writes.
-     */
-    @Override
-    public void addConverter(Column column, TypeConverter<?, ?> converter) {
-        _converters.put(column, converter);
-    }
-
-    /**
-     * @return the main schema that subclasses of this class produce
-     */
-    protected abstract Schema getMainSchema() throws MetaModelException;
-
-    /**
-     * @return the name of the main schema that subclasses of this class produce
-     */
-    protected abstract String getMainSchemaName() throws MetaModelException;
-
-    /**
-     * Executes a simple one-table query against a table in the main schema of
-     * the subclasses of this class. This default implementation will delegate
-     * to {@link #materializeMainSchemaTable(Table, Column[], int, int)}.
-     * 
-     * @param table
-     * @param selectItems
-     * @param firstRow
-     * @param maxRows
-     * @return
-     */
-    protected DataSet materializeMainSchemaTable(Table table, List<SelectItem> selectItems, int firstRow, int maxRows) {
-        Column[] columns = new Column[selectItems.size()];
-        for (int i = 0; i < columns.length; i++) {
-            columns[i] = selectItems.get(i).getColumn();
-        }
-        DataSet dataSet = materializeMainSchemaTable(table, columns, firstRow, maxRows);
-
-        dataSet = MetaModelHelper.getSelection(selectItems, dataSet);
-
-        return dataSet;
-    }
-
-    /**
-     * Executes a simple one-table query against a table in the main schema of
-     * the subclasses of this class. This default implementation will delegate
-     * to {@link #materializeMainSchemaTable(Table, Column[], int)} and apply a
-     * {@link FirstRowDataSet} if necessary.
-     * 
-     * @param table
-     * @param columns
-     * @param firstRow
-     * @param maxRows
-     * @return
-     */
-    protected DataSet materializeMainSchemaTable(Table table, Column[] columns, int firstRow, int maxRows) {
-        final int rowsToMaterialize;
-        if (firstRow == 1) {
-            rowsToMaterialize = maxRows;
-        } else {
-            rowsToMaterialize = maxRows + (firstRow - 1);
-        }
-        DataSet dataSet = materializeMainSchemaTable(table, columns, rowsToMaterialize);
-        if (firstRow > 1) {
-            dataSet = new FirstRowDataSet(dataSet, firstRow);
-        }
-        return dataSet;
-    }
-
-    /**
-     * Executes a simple one-table query against a table in the main schema of
-     * the subclasses of this class.
-     * 
-     * @param table
-     *            the table to query
-     * @param columns
-     *            the columns of the table to query
-     * @param maxRows
-     *            the maximum amount of rows needed or -1 if all rows are
-     *            wanted.
-     * @return a dataset with the raw table/column content.
-     */
-    protected abstract DataSet materializeMainSchemaTable(Table table, Column[] columns, int maxRows);
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metamodel/blob/e2e2b37a/core/src/main/java/org/eobjects/metamodel/QueryPostprocessDelegate.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/eobjects/metamodel/QueryPostprocessDelegate.java b/core/src/main/java/org/eobjects/metamodel/QueryPostprocessDelegate.java
deleted file mode 100644
index a69163d..0000000
--- a/core/src/main/java/org/eobjects/metamodel/QueryPostprocessDelegate.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.eobjects.metamodel;
-
-import org.eobjects.metamodel.schema.Schema;
-
-/**
- * A simple subclass of {@link QueryPostprocessDataContext} which provides less
- * implementation fuzz when custom querying features (like composite
- * datacontexts or type conversion) is needed.
- * 
- * @author Kasper Sørensen
- * @author Ankit Kumar
- */
-public abstract class QueryPostprocessDelegate extends
-		QueryPostprocessDataContext {
-
-	@Override
-	protected String getMainSchemaName() throws MetaModelException {
-		throw new UnsupportedOperationException(
-				"QueryPostprocessDelegate cannot perform schema exploration");
-	}
-
-	@Override
-	protected Schema getMainSchema() throws MetaModelException {
-		throw new UnsupportedOperationException(
-				"QueryPostprocessDelegate cannot perform schema exploration");
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-metamodel/blob/e2e2b37a/core/src/main/java/org/eobjects/metamodel/SchemaNameComparator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/eobjects/metamodel/SchemaNameComparator.java b/core/src/main/java/org/eobjects/metamodel/SchemaNameComparator.java
deleted file mode 100644
index 14a2f82..0000000
--- a/core/src/main/java/org/eobjects/metamodel/SchemaNameComparator.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.eobjects.metamodel;
-
-import java.util.Comparator;
-
-/**
- * Comparator for comparing schema names.
- * 
- * @author Kasper Sørensen
- */
-class SchemaNameComparator implements Comparator<String> {
-
-    private static Comparator<? super String> _instance = new SchemaNameComparator();
-
-    public static Comparator<? super String> getInstance() {
-        return _instance;
-    }
-
-    private SchemaNameComparator() {
-    }
-
-    public int compare(String o1, String o2) {
-        if (o1 == null && o2 == null) {
-            return 0;
-        }
-        if (o1 == null) {
-            return -1;
-        }
-        if (o2 == null) {
-            return 1;
-        }
-        if (MetaModelHelper.isInformationSchema(o1)) {
-            return -1;
-        }
-        if (MetaModelHelper.isInformationSchema(o2)) {
-            return 1;
-        }
-        return o1.compareTo(o2);
-    }
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metamodel/blob/e2e2b37a/core/src/main/java/org/eobjects/metamodel/UpdateCallback.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/eobjects/metamodel/UpdateCallback.java b/core/src/main/java/org/eobjects/metamodel/UpdateCallback.java
deleted file mode 100644
index 3f23173..0000000
--- a/core/src/main/java/org/eobjects/metamodel/UpdateCallback.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.eobjects.metamodel;
-
-import org.eobjects.metamodel.create.TableCreatable;
-import org.eobjects.metamodel.delete.RowDeletable;
-import org.eobjects.metamodel.drop.TableDroppable;
-import org.eobjects.metamodel.insert.RowInsertable;
-import org.eobjects.metamodel.update.RowUpdateable;
-
-/**
- * An {@link UpdateCallback} is used by an {@link UpdateScript} to perform
- * updates on a {@link DataContext}. Multiple updates (eg. insertion of several
- * rows or creation of multiple tables) can (and should) be performed with a
- * single {@link UpdateCallback}. This pattern guarantees that connections
- * and/or file handles are handled correctly, surrounding the
- * {@link UpdateScript} that is being executed.
- * 
- * @author Kasper Sørensen
- */
-public interface UpdateCallback extends TableCreatable, TableDroppable, RowInsertable, RowUpdateable, RowDeletable {
-
-    /**
-     * Gets the DataContext on which the update script is being executed.
-     * 
-     * @return the DataContext on which the update script is being executed.
-     */
-    public DataContext getDataContext();
-}

http://git-wip-us.apache.org/repos/asf/incubator-metamodel/blob/e2e2b37a/core/src/main/java/org/eobjects/metamodel/UpdateScript.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/eobjects/metamodel/UpdateScript.java b/core/src/main/java/org/eobjects/metamodel/UpdateScript.java
deleted file mode 100644
index f8dae35..0000000
--- a/core/src/main/java/org/eobjects/metamodel/UpdateScript.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.eobjects.metamodel;
-
-import org.eobjects.metamodel.util.Action;
-
-/**
- * Represents any updating operation or update script that can be executed on a
- * {@link UpdateableDataContext}. Users of MetaModel should implement their own
- * {@link UpdateScript} and submit them to the
- * {@link UpdateableDataContext#executeUpdate(UpdateScript)} method for
- * execution.
- * 
- * @author Kasper Sørensen
- */
-public interface UpdateScript extends Action<UpdateCallback> {
-
-	/**
-	 * Invoked by MetaModel when the update script should be run. User should
-	 * implement this method and invoke update operations on the
-	 * {@link UpdateCallback}.
-	 */
-	@Override
-	public void run(UpdateCallback callback);
-}

http://git-wip-us.apache.org/repos/asf/incubator-metamodel/blob/e2e2b37a/core/src/main/java/org/eobjects/metamodel/UpdateableDataContext.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/eobjects/metamodel/UpdateableDataContext.java b/core/src/main/java/org/eobjects/metamodel/UpdateableDataContext.java
deleted file mode 100644
index 302a70a..0000000
--- a/core/src/main/java/org/eobjects/metamodel/UpdateableDataContext.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.eobjects.metamodel;
-
-/**
- * Represents a {@link DataContext} that supports updating write-operations.
- * 
- * @author Kasper Sørensen
- */
-public interface UpdateableDataContext extends DataContext {
-
-	/**
-	 * Submits an {@link UpdateScript} for execution on the {@link DataContext}.
-	 * 
-	 * Since implementations of the {@link DataContext} vary quite a lot, there
-	 * is no golden rule as to how an update script will be executed. But the
-	 * implementors should strive towards handling an {@link UpdateScript} as a
-	 * single transactional change to the data store.
-	 * 
-	 * @param update
-	 *            the update script to execute
-	 */
-	public void executeUpdate(UpdateScript update);
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-metamodel/blob/e2e2b37a/core/src/main/java/org/eobjects/metamodel/convert/ColumnTypeDetector.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/eobjects/metamodel/convert/ColumnTypeDetector.java b/core/src/main/java/org/eobjects/metamodel/convert/ColumnTypeDetector.java
deleted file mode 100644
index 4b88c5a..0000000
--- a/core/src/main/java/org/eobjects/metamodel/convert/ColumnTypeDetector.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.eobjects.metamodel.convert;
-
-import org.eobjects.metamodel.util.BooleanComparator;
-import org.eobjects.metamodel.util.TimeComparator;
-
-/**
- * A class capable of detecting/narrowing a string column type to something more
- * specific. Either: Boolean, Integer, Double or Date.
- */
-final class ColumnTypeDetector {
-
-	private boolean _booleanPossible = true;
-	private boolean _integerPossible = true;
-	private boolean _doublePossible = true;
-	private boolean _datePossible = true;
-
-	public void registerValue(String stringValue) {
-		if (stringValue == null || stringValue.length() == 0) {
-			return;
-		}
-		if (_booleanPossible) {
-			try {
-				BooleanComparator.parseBoolean(stringValue);
-			} catch (IllegalArgumentException e) {
-				_booleanPossible = false;
-			}
-		}
-		if (_doublePossible) {
-			try {
-				Double.parseDouble(stringValue);
-			} catch (NumberFormatException e) {
-				_doublePossible = false;
-				_integerPossible = false;
-			}
-			// If integer is possible, double will always also be possible,
-			// but not nescesarily the other way around
-			if (_integerPossible) {
-				try {
-					Integer.parseInt(stringValue);
-				} catch (NumberFormatException e) {
-					_integerPossible = false;
-				}
-			}
-		}
-		if (_datePossible) {
-			if (TimeComparator.toDate(stringValue) == null) {
-				_datePossible = false;
-			}
-		}
-	}
-
-	public TypeConverter<?, ?> createConverter() {
-		if (_booleanPossible) {
-			return new StringToBooleanConverter();
-		} else if (_integerPossible) {
-			return new StringToIntegerConverter();
-		} else if (_doublePossible) {
-			return new StringToDoubleConverter();
-		} else if (_datePossible) {
-			return new StringToDateConverter();
-		}
-		return null;
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-metamodel/blob/e2e2b37a/core/src/main/java/org/eobjects/metamodel/convert/ConvertedDataSet.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/eobjects/metamodel/convert/ConvertedDataSet.java b/core/src/main/java/org/eobjects/metamodel/convert/ConvertedDataSet.java
deleted file mode 100644
index 8e35be8..0000000
--- a/core/src/main/java/org/eobjects/metamodel/convert/ConvertedDataSet.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.eobjects.metamodel.convert;
-
-import org.eobjects.metamodel.data.AbstractDataSet;
-import org.eobjects.metamodel.data.DataSet;
-import org.eobjects.metamodel.data.DefaultRow;
-import org.eobjects.metamodel.data.Row;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * A {@link DataSet} wrapper/decorator which converts values using
- * {@link TypeConverter}s before returning them to the user.
- */
-final class ConvertedDataSet extends AbstractDataSet {
-
-    private static final Logger logger = LoggerFactory.getLogger(ConvertedDataSet.class);
-
-    private final DataSet _dataSet;
-    private final TypeConverter<?, ?>[] _converters;
-
-    public ConvertedDataSet(DataSet dataSet, TypeConverter<?, ?>[] converters) {
-        super(dataSet.getSelectItems());
-        _dataSet = dataSet;
-        _converters = converters;
-    }
-
-    @Override
-    public boolean next() {
-        return _dataSet.next();
-    }
-
-    @Override
-    public Row getRow() {
-        Row sourceRow = _dataSet.getRow();
-        Object[] values = new Object[_converters.length];
-        for (int i = 0; i < values.length; i++) {
-            Object value = sourceRow.getValue(i);
-
-            @SuppressWarnings("unchecked")
-            TypeConverter<Object, ?> converter = (TypeConverter<Object, ?>) _converters[i];
-
-            if (converter != null) {
-                Object virtualValue = converter.toVirtualValue(value);
-                logger.debug("Converted physical value {} to {}", value, virtualValue);
-                value = virtualValue;
-            }
-            values[i] = value;
-        }
-        return new DefaultRow(getHeader(), values);
-    }
-
-    @Override
-    public void close() {
-        _dataSet.close();
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-metamodel/blob/e2e2b37a/core/src/main/java/org/eobjects/metamodel/convert/ConvertedDataSetInterceptor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/eobjects/metamodel/convert/ConvertedDataSetInterceptor.java b/core/src/main/java/org/eobjects/metamodel/convert/ConvertedDataSetInterceptor.java
deleted file mode 100644
index 545ffe6..0000000
--- a/core/src/main/java/org/eobjects/metamodel/convert/ConvertedDataSetInterceptor.java
+++ /dev/null
@@ -1,91 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.eobjects.metamodel.convert;
-
-import java.util.HashMap;
-import java.util.Map;
-
-import org.eobjects.metamodel.data.DataSet;
-import org.eobjects.metamodel.intercept.DataSetInterceptor;
-import org.eobjects.metamodel.query.SelectItem;
-import org.eobjects.metamodel.schema.Column;
-
-/**
- * A {@link DataSetInterceptor} used for intercepting values in {@link DataSet}s
- * that need to be converted, according to a set of {@link TypeConverter}s.
- * 
- * @see TypeConverter
- * @see Converters
- */
-public class ConvertedDataSetInterceptor implements DataSetInterceptor, HasReadTypeConverters {
-
-	private Map<Column, TypeConverter<?, ?>> _converters;
-
-	public ConvertedDataSetInterceptor() {
-		this(new HashMap<Column, TypeConverter<?, ?>>());
-	}
-
-	public ConvertedDataSetInterceptor(
-			Map<Column, TypeConverter<?, ?>> converters) {
-		_converters = converters;
-	}
-
-	@Override
-	public void addConverter(Column column, TypeConverter<?, ?> converter) {
-		if (converter == null) {
-			_converters.remove(column);
-		} else {
-			_converters.put(column, converter);
-		}
-	}
-	
-	protected Map<Column, TypeConverter<?, ?>> getConverters(DataSet dataSet) {
-		return _converters;
-	}
-
-	@Override
-	public final DataSet intercept(DataSet dataSet) {
-		Map<Column, TypeConverter<?, ?>> converters = getConverters(dataSet);
-		if (converters.isEmpty()) {
-			return dataSet;
-		}
-
-		boolean hasConverter = false;
-		SelectItem[] selectItems = dataSet.getSelectItems();
-		TypeConverter<?, ?>[] converterArray = new TypeConverter[selectItems.length];
-		for (int i = 0; i < selectItems.length; i++) {
-			SelectItem selectItem = selectItems[i];
-			Column column = selectItem.getColumn();
-			if (column != null && selectItem.getFunction() == null) {
-				TypeConverter<?, ?> converter = converters.get(column);
-				if (converter != null) {
-					hasConverter = true;
-					converterArray[i] = converter;
-				}
-			}
-		}
-
-		if (!hasConverter) {
-			return dataSet;
-		}
-
-		return new ConvertedDataSet(dataSet, converterArray);
-	}
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-metamodel/blob/e2e2b37a/core/src/main/java/org/eobjects/metamodel/convert/ConvertedRowInsertionInterceptor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/eobjects/metamodel/convert/ConvertedRowInsertionInterceptor.java b/core/src/main/java/org/eobjects/metamodel/convert/ConvertedRowInsertionInterceptor.java
deleted file mode 100644
index 21c2506..0000000
--- a/core/src/main/java/org/eobjects/metamodel/convert/ConvertedRowInsertionInterceptor.java
+++ /dev/null
@@ -1,75 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.eobjects.metamodel.convert;
-
-import java.util.HashMap;
-import java.util.Map;
-
-import org.eobjects.metamodel.insert.RowInsertionBuilder;
-import org.eobjects.metamodel.intercept.RowInsertionInterceptor;
-import org.eobjects.metamodel.schema.Column;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * A {@link RowInsertionInterceptor} used for intercepting values in
- * {@link RowInsertionBuilder}s that need to be converted, according to a set of
- * {@link TypeConverter}s.
- * 
- * @see TypeConverter
- * @see Converters
- */
-public class ConvertedRowInsertionInterceptor implements RowInsertionInterceptor {
-
-    private static final Logger logger = LoggerFactory.getLogger(ConvertedRowInsertionInterceptor.class);
-
-    private final Map<Column, TypeConverter<?, ?>> _converters;
-
-    public ConvertedRowInsertionInterceptor() {
-        this(new HashMap<Column, TypeConverter<?, ?>>());
-    }
-
-    public ConvertedRowInsertionInterceptor(Map<Column, TypeConverter<?, ?>> converters) {
-        _converters = converters;
-    }
-
-    public void addConverter(Column column, TypeConverter<?, ?> converter) {
-        if (converter == null) {
-            _converters.remove(column);
-        } else {
-            _converters.put(column, converter);
-        }
-    }
-
-    @Override
-    public RowInsertionBuilder intercept(RowInsertionBuilder insert) {
-        if (_converters.isEmpty()) {
-            return insert;
-        }
-
-        logger.debug("Insert statement before conversion: {}", insert);
-
-        insert = Converters.convertRow(insert, _converters);
-
-        logger.debug("Insert statement after conversion:  {}", insert);
-
-        return insert;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-metamodel/blob/e2e2b37a/core/src/main/java/org/eobjects/metamodel/convert/ConvertedRowUpdationInterceptor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/eobjects/metamodel/convert/ConvertedRowUpdationInterceptor.java b/core/src/main/java/org/eobjects/metamodel/convert/ConvertedRowUpdationInterceptor.java
deleted file mode 100644
index 25c1756..0000000
--- a/core/src/main/java/org/eobjects/metamodel/convert/ConvertedRowUpdationInterceptor.java
+++ /dev/null
@@ -1,67 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.eobjects.metamodel.convert;
-
-import java.util.HashMap;
-import java.util.Map;
-
-import org.eobjects.metamodel.intercept.RowUpdationInterceptor;
-import org.eobjects.metamodel.schema.Column;
-import org.eobjects.metamodel.update.RowUpdationBuilder;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class ConvertedRowUpdationInterceptor implements RowUpdationInterceptor {
-
-    private static final Logger logger = LoggerFactory.getLogger(ConvertedRowUpdationInterceptor.class);
-
-    private final Map<Column, TypeConverter<?, ?>> _converters;
-
-    public ConvertedRowUpdationInterceptor() {
-        this(new HashMap<Column, TypeConverter<?, ?>>());
-    }
-
-    public ConvertedRowUpdationInterceptor(Map<Column, TypeConverter<?, ?>> converters) {
-        _converters = converters;
-    }
-
-    public void addConverter(Column column, TypeConverter<?, ?> converter) {
-        if (converter == null) {
-            _converters.remove(column);
-        } else {
-            _converters.put(column, converter);
-        }
-    }
-
-    @Override
-    public RowUpdationBuilder intercept(RowUpdationBuilder update) {
-        if (_converters.isEmpty()) {
-            return update;
-        }
-
-        logger.debug("Update statement after conversion:  {}", update);
-
-        update = Converters.convertRow(update, _converters);
-        
-        logger.debug("Update statement after conversion:  {}", update);
-
-        return update;
-    }
-
-}