You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@rya.apache.org by mi...@apache.org on 2015/12/22 17:49:32 UTC

[08/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ExternalMultipleBindingSetsIterator.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ExternalMultipleBindingSetsIterator.java b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ExternalMultipleBindingSetsIterator.java
new file mode 100644
index 0000000..16ef588
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ExternalMultipleBindingSetsIterator.java
@@ -0,0 +1,109 @@
+package mvm.rya.rdftriplestore.evaluation;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import info.aduna.iteration.CloseableIteration;
+import info.aduna.iteration.LookAheadIteration;
+
+import java.util.ArrayList;
+import java.util.Collection;
+
+import org.openrdf.query.BindingSet;
+import org.openrdf.query.QueryEvaluationException;
+import org.openrdf.query.algebra.StatementPattern;
+import org.openrdf.query.algebra.TupleExpr;
+
+/**
+ */
+public class ExternalMultipleBindingSetsIterator extends LookAheadIteration<BindingSet, QueryEvaluationException> {
+
+    private final ParallelEvaluationStrategyImpl strategy;
+    private final CloseableIteration leftIter;
+    private ExternalBatchingIterator stmtPtrn;
+    private CloseableIteration<BindingSet, QueryEvaluationException> iter;
+    //TODO: configurable
+    private int batchSize = 1000;
+
+    public ExternalMultipleBindingSetsIterator(ParallelEvaluationStrategyImpl strategy, TupleExpr leftArg, ExternalBatchingIterator stmtPattern, BindingSet bindings)
+            throws QueryEvaluationException {
+        this.strategy = strategy;
+        leftIter = strategy.evaluate(leftArg, bindings);
+        this.stmtPtrn = stmtPattern;
+        initIter();
+    }
+
+    public ExternalMultipleBindingSetsIterator(ParallelEvaluationStrategyImpl strategy, CloseableIteration leftIter, ExternalBatchingIterator stmtPattern, BindingSet bindings)
+            throws QueryEvaluationException {
+        this.strategy = strategy;
+        this.leftIter = leftIter;
+        this.stmtPtrn = stmtPattern;
+        initIter();
+    }
+
+    protected void initIter() throws QueryEvaluationException {
+        try {
+            Collection<BindingSet> sets = new ArrayList<BindingSet>();
+            int i = 0;
+            while (leftIter.hasNext()) {
+                //default to 1K for the batch size
+                if (i >= batchSize) {
+                    break;
+                }
+                sets.add((BindingSet) leftIter.next());
+                i++;
+            }
+            if (iter != null) iter.close();
+            iter = stmtPtrn.evaluate(sets);
+        } catch (Exception e) {
+            throw new QueryEvaluationException(e);
+        }
+    }
+
+    protected BindingSet getNextElement()
+            throws QueryEvaluationException {
+        try {
+            while (true) {
+                if (iter.hasNext()) {
+                    return iter.next();
+                }
+
+                if (leftIter.hasNext()) {
+                    initIter();
+                } else
+                    return null;
+            }
+        } catch (Exception e) {
+            throw new QueryEvaluationException(e);
+        }
+    }
+
+    protected void handleClose()
+            throws QueryEvaluationException {
+        try {
+            super.handleClose();
+            leftIter.close();
+            iter.close();
+        } catch (Exception e) {
+            throw new QueryEvaluationException(e);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/FilterRangeVisitor.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/FilterRangeVisitor.java b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/FilterRangeVisitor.java
new file mode 100644
index 0000000..24e2527
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/FilterRangeVisitor.java
@@ -0,0 +1,97 @@
+package mvm.rya.rdftriplestore.evaluation;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import mvm.rya.api.RdfCloudTripleStoreConfiguration;
+import mvm.rya.api.domain.RangeURI;
+import mvm.rya.api.domain.RangeValue;
+import org.openrdf.model.Value;
+import org.openrdf.model.impl.BooleanLiteralImpl;
+import org.openrdf.query.QueryEvaluationException;
+import org.openrdf.query.algebra.*;
+import org.openrdf.query.algebra.helpers.QueryModelVisitorBase;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static mvm.rya.api.RdfCloudTripleStoreConstants.RANGE;
+
+/**
+ * Class FilterTimeIndexVisitor
+ * Date: Apr 11, 2011
+ * Time: 10:16:15 PM
+ */
+public class FilterRangeVisitor extends QueryModelVisitorBase {
+
+    private RdfCloudTripleStoreConfiguration conf;
+    private Map<Var, RangeValue> rangeValues = new HashMap<Var, RangeValue>();
+
+    public FilterRangeVisitor(RdfCloudTripleStoreConfiguration conf) {
+        this.conf = conf;
+    }
+
+    @Override
+    public void meet(Filter node) throws Exception {
+        super.meet(node);
+
+        ValueExpr arg = node.getCondition();
+        if (arg instanceof FunctionCall) {
+            FunctionCall fc = (FunctionCall) arg;
+            if (RANGE.stringValue().equals(fc.getURI())) {
+                //range(?var, start, end)
+                List<ValueExpr> valueExprs = fc.getArgs();
+                if (valueExprs.size() != 3) {
+                    throw new QueryEvaluationException("mvm:range must have 3 parameters: variable, start, end");
+                }
+                Var var = (Var) valueExprs.get(0);
+                ValueConstant startVc = (ValueConstant) valueExprs.get(1);
+                ValueConstant endVc = (ValueConstant) valueExprs.get(2);
+                Value start = startVc.getValue();
+                Value end = endVc.getValue();
+                rangeValues.put(var, new RangeValue(start, end));
+                node.setCondition(new ValueConstant(BooleanLiteralImpl.TRUE));
+            }
+        }
+    }
+
+    @Override
+    public void meet(StatementPattern node) throws Exception {
+        super.meet(node);
+
+        Var subjectVar = node.getSubjectVar();
+        RangeValue subjRange = rangeValues.get(subjectVar);
+        Var predVar = node.getPredicateVar();
+        RangeValue predRange = rangeValues.get(predVar);
+        Var objVar = node.getObjectVar();
+        RangeValue objRange = rangeValues.get(objVar);
+        if(subjRange != null) {
+            subjectVar.setValue(new RangeURI(subjRange));//Assumes no blank nodes can be ranges
+        }
+        if(predRange != null) {
+            predVar.setValue(new RangeURI(predRange));
+        }
+        if(objRange != null) {
+            objVar.setValue(objRange);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/MultipleBindingSetsIterator.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/MultipleBindingSetsIterator.java b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/MultipleBindingSetsIterator.java
new file mode 100644
index 0000000..01f3d27
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/MultipleBindingSetsIterator.java
@@ -0,0 +1,108 @@
+package mvm.rya.rdftriplestore.evaluation;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import info.aduna.iteration.CloseableIteration;
+import info.aduna.iteration.LookAheadIteration;
+import org.openrdf.query.BindingSet;
+import org.openrdf.query.QueryEvaluationException;
+import org.openrdf.query.algebra.StatementPattern;
+import org.openrdf.query.algebra.TupleExpr;
+
+import java.util.ArrayList;
+import java.util.Collection;
+
+/**
+ */
+public class MultipleBindingSetsIterator extends LookAheadIteration<BindingSet, QueryEvaluationException> {
+
+    private final ParallelEvaluationStrategyImpl strategy;
+    private final CloseableIteration leftIter;
+    private StatementPattern stmtPtrn;
+    private CloseableIteration<BindingSet, QueryEvaluationException> iter;
+    //TODO: configurable
+    private int batchSize = 1000;
+
+    public MultipleBindingSetsIterator(ParallelEvaluationStrategyImpl strategy, TupleExpr leftArg, StatementPattern stmtPattern, BindingSet bindings)
+            throws QueryEvaluationException {
+        this.strategy = strategy;
+        leftIter = strategy.evaluate(leftArg, bindings);
+        this.stmtPtrn = stmtPattern;
+        initIter();
+    }
+
+    public MultipleBindingSetsIterator(ParallelEvaluationStrategyImpl strategy, CloseableIteration leftIter, StatementPattern stmtPattern, BindingSet bindings)
+            throws QueryEvaluationException {
+        this.strategy = strategy;
+        this.leftIter = leftIter;
+        this.stmtPtrn = stmtPattern;
+        initIter();
+    }
+
+    protected void initIter() throws QueryEvaluationException {
+        try {
+            Collection<BindingSet> sets = new ArrayList<BindingSet>();
+            int i = 0;
+            while (leftIter.hasNext()) {
+                //default to 1K for the batch size
+                if (i >= batchSize) {
+                    break;
+                }
+                sets.add((BindingSet) leftIter.next());
+                i++;
+            }
+            if (iter != null) iter.close();
+            iter = strategy.evaluate(stmtPtrn, sets);
+        } catch (Exception e) {
+            throw new QueryEvaluationException(e);
+        }
+    }
+
+    protected BindingSet getNextElement()
+            throws QueryEvaluationException {
+        try {
+            while (true) {
+                if (iter.hasNext()) {
+                    return iter.next();
+                }
+
+                if (leftIter.hasNext()) {
+                    initIter();
+                } else
+                    return null;
+            }
+        } catch (Exception e) {
+            throw new QueryEvaluationException(e);
+        }
+    }
+
+    protected void handleClose()
+            throws QueryEvaluationException {
+        try {
+            super.handleClose();
+            leftIter.close();
+            iter.close();
+        } catch (Exception e) {
+            throw new QueryEvaluationException(e);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ParallelEvaluationStrategyImpl.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ParallelEvaluationStrategyImpl.java b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ParallelEvaluationStrategyImpl.java
new file mode 100644
index 0000000..30dc966
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ParallelEvaluationStrategyImpl.java
@@ -0,0 +1,281 @@
+package mvm.rya.rdftriplestore.evaluation;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import info.aduna.iteration.CloseableIteration;
+import info.aduna.iteration.ConvertingIteration;
+import info.aduna.iteration.EmptyIteration;
+import info.aduna.iteration.Iteration;
+import info.aduna.iteration.IteratorIteration;
+import info.aduna.iteration.LimitIteration;
+import info.aduna.iteration.OffsetIteration;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+
+import mvm.rya.api.RdfCloudTripleStoreConfiguration;
+import mvm.rya.api.RdfCloudTripleStoreUtils;
+import mvm.rya.api.utils.NullableStatementImpl;
+import mvm.rya.rdftriplestore.RdfCloudTripleStoreConnection;
+import mvm.rya.rdftriplestore.inference.InferenceEngine;
+import mvm.rya.rdftriplestore.inference.InferenceEngineException;
+import mvm.rya.rdftriplestore.utils.FixedStatementPattern;
+import mvm.rya.rdftriplestore.utils.TransitivePropertySP;
+
+import org.apache.log4j.Logger;
+import org.openrdf.model.Resource;
+import org.openrdf.model.Statement;
+import org.openrdf.model.URI;
+import org.openrdf.model.Value;
+import org.openrdf.query.BindingSet;
+import org.openrdf.query.Dataset;
+import org.openrdf.query.QueryEvaluationException;
+import org.openrdf.query.algebra.Filter;
+import org.openrdf.query.algebra.Join;
+import org.openrdf.query.algebra.QueryRoot;
+import org.openrdf.query.algebra.Slice;
+import org.openrdf.query.algebra.StatementPattern;
+import org.openrdf.query.algebra.TupleExpr;
+import org.openrdf.query.algebra.ValueExpr;
+import org.openrdf.query.algebra.Var;
+import org.openrdf.query.algebra.evaluation.QueryBindingSet;
+import org.openrdf.query.algebra.evaluation.ValueExprEvaluationException;
+import org.openrdf.query.algebra.evaluation.impl.EvaluationStrategyImpl;
+import org.openrdf.query.algebra.evaluation.iterator.FilterIterator;
+import org.openrdf.query.algebra.evaluation.iterator.JoinIterator;
+import org.openrdf.query.algebra.evaluation.util.QueryEvaluationUtil;
+
+import com.google.common.collect.Lists;
+
+/**
+ */
+public class ParallelEvaluationStrategyImpl extends EvaluationStrategyImpl {
+    private static Logger logger = Logger.getLogger(ParallelEvaluationStrategyImpl.class);
+    
+    private int numOfThreads = 10;
+    private boolean performant = true;
+    private boolean displayQueryPlan = false;
+    private ExecutorService executorService;
+    private InferenceEngine inferenceEngine;
+
+    public ParallelEvaluationStrategyImpl(RdfCloudTripleStoreConnection.StoreTripleSource tripleSource, InferenceEngine inferenceEngine,
+                                          Dataset dataset, RdfCloudTripleStoreConfiguration conf) {
+        super(tripleSource, dataset);
+        Integer nthreads = conf.getNumThreads();
+        this.numOfThreads = (nthreads != null) ? nthreads : this.numOfThreads;
+        Boolean val = conf.isPerformant();
+        this.performant = (val != null) ? val : this.performant;
+        val = conf.isDisplayQueryPlan();
+        this.displayQueryPlan = (val != null) ? val : this.displayQueryPlan;
+        this.executorService = Executors.newFixedThreadPool(this.numOfThreads);
+        this.inferenceEngine = inferenceEngine;
+    }
+
+    @Override
+    public CloseableIteration<BindingSet, QueryEvaluationException> evaluate(Join join, BindingSet bindings) throws QueryEvaluationException {
+        if (performant) {
+            TupleExpr buffer = join.getLeftArg();
+            if (join.getRightArg() instanceof StatementPattern) {
+                TupleExpr stmtPat = join.getRightArg();
+//                if(buffer instanceof StatementPattern && !(stmtPat instanceof StatementPattern)){
+//                    buffer = stmtPat;
+//                    stmtPat = join.getLeftArg();
+//                }
+
+                return new MultipleBindingSetsIterator(this, buffer, (StatementPattern) stmtPat, bindings);
+            } else if (join.getRightArg() instanceof ExternalBatchingIterator) {
+                    TupleExpr stmtPat = join.getRightArg();
+
+                    return new ExternalMultipleBindingSetsIterator(this, buffer, (ExternalBatchingIterator) stmtPat, bindings);
+            } else if (join.getRightArg() instanceof Filter) {
+                //add performance for the filter too
+                Filter filter = (Filter) join.getRightArg();
+                TupleExpr filterChild = filter.getArg();
+                if (filterChild instanceof StatementPattern) {
+                    return new FilterIterator(filter, new MultipleBindingSetsIterator(this, buffer, (StatementPattern) filterChild, bindings), this);
+                } else if (filterChild instanceof Join) {
+                    Join filterChildJoin = (Join) filterChild;
+                    TupleExpr fcj_left = filterChildJoin.getLeftArg();
+                    TupleExpr fcj_right = filterChildJoin.getRightArg();
+                    //TODO: Should be a better way, maybe reorder the filter?
+                    //very particular case filter(join(stmtPat, stmtPat))
+                    if (fcj_left instanceof StatementPattern && fcj_right instanceof StatementPattern) {
+                        return new FilterIterator(filter, new MultipleBindingSetsIterator(this, new Join(buffer, fcj_left), (StatementPattern) fcj_right, bindings), this);
+                    }
+                }
+                //TODO: add a configuration flag for ParallelJoinIterator
+                return new JoinIterator(this, join, bindings);
+            } else {
+                //TODO: add a configuration flag for ParallelJoinIterator
+                return new JoinIterator(this, join, bindings);
+            }
+        } else {
+            return super.evaluate(join, bindings);
+        }
+    }
+
+    @Override
+    public CloseableIteration<BindingSet, QueryEvaluationException> evaluate(StatementPattern sp, BindingSet bindings) throws QueryEvaluationException {
+        //TODO: Wonder if creating a Collection here hurts performance
+        Set<BindingSet> bs = Collections.singleton(bindings);
+        return this.evaluate(sp, bs);
+    }
+
+    public CloseableIteration<BindingSet, QueryEvaluationException> evaluate(final StatementPattern sp, Collection<BindingSet> bindings)
+            throws QueryEvaluationException {
+
+        final Var subjVar = sp.getSubjectVar();
+        final Var predVar = sp.getPredicateVar();
+        final Var objVar = sp.getObjectVar();
+        final Var cntxtVar = sp.getContextVar();
+
+        List<Map.Entry<Statement, BindingSet>> stmts = new ArrayList<Map.Entry<Statement, BindingSet>>();
+
+        Iteration<? extends Map.Entry<Statement, BindingSet>, QueryEvaluationException> iter;
+        if (sp instanceof FixedStatementPattern) {
+            Collection<Map.Entry<Statement, BindingSet>> coll = Lists.newArrayList();
+            for (BindingSet binding : bindings) {
+                Value subjValue = getVarValue(subjVar, binding);
+                Value predValue = getVarValue(predVar, binding);
+                Value objValue = getVarValue(objVar, binding);
+                Resource contxtValue = (Resource) getVarValue(cntxtVar, binding);
+                for (Statement st : ((FixedStatementPattern) sp).statements) {
+                    if (!((subjValue != null && !subjValue.equals(st.getSubject())) ||
+                            (predValue != null && !predValue.equals(st.getPredicate())) ||
+                            (objValue != null && !objValue.equals(st.getObject())))) {
+                        coll.add(new RdfCloudTripleStoreUtils.CustomEntry<Statement, BindingSet>(st, binding));
+                    }
+                }
+            }
+            iter = new IteratorIteration(coll.iterator());
+        } else if (sp instanceof TransitivePropertySP &&
+                ((subjVar != null && subjVar.getValue() != null) ||
+                        (objVar != null && objVar.getValue() != null)) &&
+                sp.getPredicateVar() != null) {
+            //if this is a transitive prop ref, we need to make sure that either the subj or obj is not null
+            //TODO: Cannot handle a open ended transitive property where subj and obj are null
+            //TODO: Should one day handle filling in the subj or obj with bindings and working this
+            //TODO: a lot of assumptions, and might be a large set returned causing an OME
+            Set<Statement> sts = null;
+            try {
+                sts = inferenceEngine.findTransitiveProperty((Resource) getVarValue(subjVar),
+                        (URI) getVarValue(predVar), getVarValue(objVar), (Resource) getVarValue(cntxtVar));
+            } catch (InferenceEngineException e) {
+                throw new QueryEvaluationException(e);
+            }
+            Collection<Map.Entry<Statement, BindingSet>> coll = new ArrayList();
+            for (BindingSet binding : bindings) {
+                for (Statement st : sts) {
+                    coll.add(new RdfCloudTripleStoreUtils.CustomEntry<Statement, BindingSet>(st, binding));
+                }
+            }
+            iter = new IteratorIteration(coll.iterator());
+        } else {
+            for (BindingSet binding : bindings) {
+                Value subjValue = getVarValue(subjVar, binding);
+                Value predValue = getVarValue(predVar, binding);
+                Value objValue = getVarValue(objVar, binding);
+                Resource contxtValue = (Resource) getVarValue(cntxtVar, binding);
+                if ((subjValue != null && !(subjValue instanceof Resource)) ||
+                        (predValue != null && !(predValue instanceof URI))) {
+                    continue;
+                }
+                stmts.add(new RdfCloudTripleStoreUtils.CustomEntry<Statement, BindingSet>(
+                        new NullableStatementImpl((Resource) subjValue, (URI) predValue, objValue, contxtValue), binding));
+            }
+            if (stmts.size() == 0) {
+                return new EmptyIteration();
+            }
+
+            iter = ((RdfCloudTripleStoreConnection.StoreTripleSource) tripleSource).getStatements(stmts);
+        }
+        return new ConvertingIteration<Map.Entry<Statement, BindingSet>, BindingSet, QueryEvaluationException>(iter) {
+
+            @Override
+            protected BindingSet convert(Map.Entry<Statement, BindingSet> stbs) throws QueryEvaluationException {
+                Statement st = stbs.getKey();
+                BindingSet bs = stbs.getValue();
+                QueryBindingSet result = new QueryBindingSet(bs);
+                if (subjVar != null && !result.hasBinding(subjVar.getName())) {
+                    result.addBinding(subjVar.getName(), st.getSubject());
+                }
+                if (predVar != null && !result.hasBinding(predVar.getName())) {
+                    result.addBinding(predVar.getName(), st.getPredicate());
+                }
+                if (objVar != null && !result.hasBinding(objVar.getName())) {
+                    result.addBinding(objVar.getName(), st.getObject());
+                }
+                if (cntxtVar != null && !result.hasBinding(cntxtVar.getName()) && st.getContext() != null) {
+                    result.addBinding(cntxtVar.getName(), st.getContext());
+                }
+                return result;
+            }
+        };
+    }
+
+    @Override
+    public CloseableIteration<BindingSet, QueryEvaluationException> evaluate(TupleExpr expr, BindingSet bindings) throws QueryEvaluationException {
+        if (expr instanceof QueryRoot) {
+            if (displayQueryPlan) {
+//                System.out.println("Tables: ");
+//                System.out.println("--SPO: \t" + RdfCloudTripleStoreConstants.TBL_SPO);
+//                System.out.println("--PO: \t" + RdfCloudTripleStoreConstants.TBL_PO);
+//                System.out.println("--OSP: \t" + RdfCloudTripleStoreConstants.TBL_OSP);
+                logger.info("=================== Rya Query ===================");
+                for (String str : expr.toString().split("\\r?\\n")) {
+                    logger.info(str);
+                }
+                logger.info("================= End Rya Query =================");
+            }
+        }
+        return super.evaluate(expr, bindings);
+    }
+
+    public CloseableIteration evaluate(Slice slice, BindingSet bindings)
+            throws QueryEvaluationException {
+        CloseableIteration result = evaluate(slice.getArg(), bindings);
+        if (slice.hasOffset()) {
+            result = new OffsetIteration(result, slice.getOffset());
+        }
+        if (slice.hasLimit()) {
+            result = new LimitIteration(result, slice.getLimit());
+        }
+        return result;
+    }
+
+    protected Value getVarValue(Var var) {
+        if (var == null)
+            return null;
+        else
+            return var.getValue();
+    }
+
+    public void shutdown() {
+        executorService.shutdownNow();
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ParallelJoinIterator.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ParallelJoinIterator.java b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ParallelJoinIterator.java
new file mode 100644
index 0000000..1d5c982
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ParallelJoinIterator.java
@@ -0,0 +1,139 @@
+package mvm.rya.rdftriplestore.evaluation;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import info.aduna.iteration.CloseableIteration;
+import info.aduna.iteration.LookAheadIteration;
+
+import java.util.NoSuchElementException;
+import java.util.Queue;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.LinkedBlockingQueue;
+
+import org.openrdf.query.BindingSet;
+import org.openrdf.query.QueryEvaluationException;
+import org.openrdf.query.algebra.Join;
+import org.openrdf.query.algebra.TupleExpr;
+import org.openrdf.query.algebra.evaluation.EvaluationStrategy;
+import org.openrdf.query.impl.EmptyBindingSet;
+
+/**
+ */
+public class ParallelJoinIterator extends LookAheadIteration<BindingSet, QueryEvaluationException> {
+
+    public static final EmptyBindingSet EMPTY_BINDING_SET = new EmptyBindingSet();
+
+    private final EvaluationStrategy strategy;
+    private final Join join;
+    private final CloseableIteration<BindingSet, QueryEvaluationException> leftIter;
+
+    private ExecutorService executorService;
+    private Queue<ParallelIteratorWork> workQueue = new LinkedBlockingQueue<ParallelIteratorWork>();
+    private ParallelIteratorWork currentWork;
+    private int batch;
+
+    public ParallelJoinIterator(EvaluationStrategy strategy, Join join, BindingSet bindings, ExecutorService executorService, int batch)
+            throws QueryEvaluationException {
+        this.strategy = strategy;
+        this.join = join;
+        leftIter = strategy.evaluate(join.getLeftArg(), bindings);
+
+        this.executorService = executorService;
+        this.batch = batch;
+    }
+
+
+    @Override
+    protected BindingSet getNextElement() throws QueryEvaluationException {
+
+        try {
+            while (leftIter.hasNext() || !workQueue.isEmpty() || currentWork != null) {
+                if (!workQueue.isEmpty() && currentWork == null) {
+                    currentWork = workQueue.poll();
+                }
+
+                if (currentWork != null) {
+                    BindingSet bindingSet = currentWork.queue.poll();
+                    if (EMPTY_BINDING_SET.equals(bindingSet)) {
+                        currentWork = null;
+                        continue;
+                    } else if (bindingSet == null) {
+                        continue;
+                    }
+                    return bindingSet;
+                }
+
+                try {
+                    for (int i = 0; i < batch; i++) {
+                        if (leftIter.hasNext()) {
+                            ParallelIteratorWork work = new ParallelIteratorWork((BindingSet) leftIter.next(), join.getRightArg());
+                            workQueue.add(work);
+                            executorService.execute(work);
+                        } else
+                            break;
+                    }
+                } catch (NoSuchElementException ignore) {
+                }
+            }
+        } catch (Exception e) {
+            throw new QueryEvaluationException(e);
+        }
+        return null;
+    }
+
+    @Override
+    protected void handleClose() throws QueryEvaluationException {
+        try {
+            super.handleClose();
+            leftIter.close();
+//           rightIter.close();
+        } catch (Exception e) {
+            throw new QueryEvaluationException(e);
+        }
+    }
+
+    private class ParallelIteratorWork implements Runnable {
+
+        private BindingSet leftBindingSet;
+        private TupleExpr rightTupleExpr;
+        public LinkedBlockingQueue<BindingSet> queue = new LinkedBlockingQueue<BindingSet>();
+
+        private ParallelIteratorWork(BindingSet leftBindingSet, TupleExpr rightTupleExpr) {
+            this.leftBindingSet = leftBindingSet;
+            this.rightTupleExpr = rightTupleExpr;
+        }
+
+        @Override
+        public void run() {
+            try {
+                CloseableIteration<BindingSet, QueryEvaluationException> iter = strategy.evaluate(rightTupleExpr, leftBindingSet);
+                while (iter.hasNext()) {
+                    queue.add(iter.next());
+                }
+                queue.add(EMPTY_BINDING_SET);
+                iter.close();
+            } catch (QueryEvaluationException e) {
+                throw new RuntimeException(e);
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/PushJoinDownVisitor.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/PushJoinDownVisitor.java b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/PushJoinDownVisitor.java
new file mode 100644
index 0000000..342f98d
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/PushJoinDownVisitor.java
@@ -0,0 +1,57 @@
+package mvm.rya.rdftriplestore.evaluation;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import org.openrdf.query.algebra.Join;
+import org.openrdf.query.algebra.TupleExpr;
+import org.openrdf.query.algebra.helpers.QueryModelVisitorBase;
+
+/**
+ * Class ReorderJoinVisitor
+ * Date: Apr 11, 2011
+ * Time: 10:16:15 PM
+ */
+public class PushJoinDownVisitor extends QueryModelVisitorBase {
+    @Override
+    public void meet(Join node) throws Exception {
+        super.meet(node);
+
+        TupleExpr leftArg = node.getLeftArg();
+        TupleExpr rightArg = node.getRightArg();
+
+        /**
+         * if join(join(1, 2), join(3,4))
+         * should be:
+         * join(join(join(1,2), 3), 4)
+         */
+        if (leftArg instanceof Join && rightArg instanceof Join) {
+            Join leftJoin = (Join) leftArg;
+            Join rightJoin = (Join) rightArg;
+            TupleExpr right_LeftArg = rightJoin.getLeftArg();
+            TupleExpr right_rightArg = rightJoin.getRightArg();
+            Join inner = new Join(leftJoin, right_LeftArg);
+            Join outer = new Join(inner, right_rightArg);
+            node.replaceWith(outer);
+        }
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/QueryJoinOptimizer.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/QueryJoinOptimizer.java b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/QueryJoinOptimizer.java
new file mode 100644
index 0000000..940e46e
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/QueryJoinOptimizer.java
@@ -0,0 +1,284 @@
+package mvm.rya.rdftriplestore.evaluation;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import org.openrdf.query.BindingSet;
+import org.openrdf.query.Dataset;
+import org.openrdf.query.algebra.*;
+import org.openrdf.query.algebra.evaluation.QueryOptimizer;
+import org.openrdf.query.algebra.evaluation.impl.EvaluationStatistics;
+import org.openrdf.query.algebra.helpers.QueryModelVisitorBase;
+import org.openrdf.query.algebra.helpers.StatementPatternCollector;
+
+import java.util.*;
+
+/**
+ * A query optimizer that re-orders nested Joins.
+ *
+ * @author Arjohn Kampman
+ * @author James Leigh
+ */
+public class QueryJoinOptimizer implements QueryOptimizer {
+
+    protected final EvaluationStatistics statistics;
+
+    public QueryJoinOptimizer() {
+        this(new EvaluationStatistics());
+    }
+
+    public QueryJoinOptimizer(EvaluationStatistics statistics) {
+        this.statistics = statistics;
+    }
+
+    /**
+     * Applies generally applicable optimizations: path expressions are sorted
+     * from more to less specific.
+     *
+     * @param tupleExpr
+     */
+    public void optimize(TupleExpr tupleExpr, Dataset dataset, BindingSet bindings) {
+        tupleExpr.visit(new JoinVisitor());
+    }
+
+    protected class JoinVisitor extends QueryModelVisitorBase<RuntimeException> {
+
+        Set<String> boundVars = new HashSet<String>();
+
+        @Override
+        public void meet(LeftJoin leftJoin) {
+            leftJoin.getLeftArg().visit(this);
+
+            Set<String> origBoundVars = boundVars;
+            try {
+                boundVars = new HashSet<String>(boundVars);
+                boundVars.addAll(leftJoin.getLeftArg().getBindingNames());
+
+                leftJoin.getRightArg().visit(this);
+            } finally {
+                boundVars = origBoundVars;
+            }
+        }
+
+        @Override
+        public void meet(Join node) {
+            Set<String> origBoundVars = boundVars;
+            try {
+                boundVars = new HashSet<String>(boundVars);
+
+                // Recursively get the join arguments
+                List<TupleExpr> joinArgs = getJoinArgs(node, new ArrayList<TupleExpr>());
+
+                // Build maps of cardinalities and vars per tuple expression
+                Map<TupleExpr, Double> cardinalityMap = new HashMap<TupleExpr, Double>();
+//                Map<TupleExpr, List<Var>> varsMap = new HashMap<TupleExpr, List<Var>>();
+//                Map<Var, Double> varCardinalityMap = new HashMap<Var, Double>();
+
+                for (TupleExpr tupleExpr : joinArgs) {
+                    double cardinality = statistics.getCardinality(tupleExpr);
+//                    List<Var> statementPatternVars = getStatementPatternVars(tupleExpr);
+
+                    cardinalityMap.put(tupleExpr, cardinality);
+//                    varsMap.put(tupleExpr, statementPatternVars);
+                }
+
+                // Build map of var frequences
+//                Map<Var, Integer> varFreqMap = new HashMap<Var, Integer>();
+//                for (List<Var> varList : varsMap.values()) {
+//                    getVarFreqMap(varList, varFreqMap);
+//                }
+
+                // Reorder the (recursive) join arguments to a more optimal sequence
+                List<TupleExpr> orderedJoinArgs = new ArrayList<TupleExpr>(joinArgs.size());
+                while (!joinArgs.isEmpty()) {
+                    TupleExpr tupleExpr = selectNextTupleExpr(joinArgs, cardinalityMap
+                    );
+                    if (tupleExpr == null) {
+                        break;
+                    }
+
+                    joinArgs.remove(tupleExpr);
+                    orderedJoinArgs.add(tupleExpr);
+
+                    // Recursively optimize join arguments
+                    tupleExpr.visit(this);
+
+                    boundVars.addAll(tupleExpr.getBindingNames());
+                }
+
+                // Build new join hierarchy
+                // Note: generated hierarchy is right-recursive to help the
+                // IterativeEvaluationOptimizer to factor out the left-most join
+                // argument
+                int i = 0;
+                TupleExpr replacement = orderedJoinArgs.get(i);
+                for (i++; i < orderedJoinArgs.size(); i++) {
+                    replacement = new Join(replacement, orderedJoinArgs.get(i));
+                }
+
+                // Replace old join hierarchy
+                node.replaceWith(replacement);
+            } finally {
+                boundVars = origBoundVars;
+            }
+        }
+
+        protected <L extends List<TupleExpr>> L getJoinArgs(TupleExpr tupleExpr, L joinArgs) {
+            if (tupleExpr instanceof Join) {
+                Join join = (Join) tupleExpr;
+                getJoinArgs(join.getLeftArg(), joinArgs);
+                getJoinArgs(join.getRightArg(), joinArgs);
+            } else {
+                joinArgs.add(tupleExpr);
+            }
+
+            return joinArgs;
+        }
+
+        protected List<Var> getStatementPatternVars(TupleExpr tupleExpr) {
+            List<StatementPattern> stPatterns = StatementPatternCollector.process(tupleExpr);
+            List<Var> varList = new ArrayList<Var>(stPatterns.size() * 4);
+            for (StatementPattern sp : stPatterns) {
+                sp.getVars(varList);
+            }
+            return varList;
+        }
+
+        protected <M extends Map<Var, Integer>> M getVarFreqMap(List<Var> varList, M varFreqMap) {
+            for (Var var : varList) {
+                Integer freq = varFreqMap.get(var);
+                freq = (freq == null) ? 1 : freq + 1;
+                varFreqMap.put(var, freq);
+            }
+            return varFreqMap;
+        }
+
+        /**
+         * Selects from a list of tuple expressions the next tuple expression that
+         * should be evaluated. This method selects the tuple expression with
+         * highest number of bound variables, preferring variables that have been
+         * bound in other tuple expressions over variables with a fixed value.
+         */
+        protected TupleExpr selectNextTupleExpr(List<TupleExpr> expressions,
+                                                Map<TupleExpr, Double> cardinalityMap
+//                                                ,Map<TupleExpr, List<Var>> varsMap,
+//                                                Map<Var, Integer> varFreqMap, Set<String> boundVars
+        ) {
+            double lowestCardinality = Double.MAX_VALUE;
+            TupleExpr result = expressions.get(0);
+
+            for (TupleExpr tupleExpr : expressions) {
+                // Calculate a score for this tuple expression
+//                double cardinality = getTupleExprCardinality(tupleExpr, cardinalityMap, varsMap, varFreqMap, boundVars);
+                double cardinality = cardinalityMap.get(tupleExpr);
+//                List<Var> vars = varsMap.get(tupleExpr);
+//                List<Var> distinctUnboundVars = getUnboundVars(vars);
+//                if (distinctUnboundVars.size() >= 2) {
+//                    cardinality *= (distinctUnboundVars.size() + 1);
+//                }
+
+                if (cardinality < lowestCardinality) {
+                    // More specific path expression found
+                    lowestCardinality = cardinality;
+                    result = tupleExpr;
+                }
+            }
+
+            return result;
+        }
+
+        protected double getTupleExprCardinality(TupleExpr tupleExpr, Map<TupleExpr, Double> cardinalityMap,
+                                                 Map<TupleExpr, List<Var>> varsMap, Map<Var, Integer> varFreqMap, Set<String> boundVars) {
+            double cardinality = cardinalityMap.get(tupleExpr);
+
+            List<Var> vars = varsMap.get(tupleExpr);
+
+            // Compensate for variables that are bound earlier in the evaluation
+            List<Var> unboundVars = getUnboundVars(vars);
+            List<Var> constantVars = getConstantVars(vars);
+            int nonConstantVarCount = vars.size() - constantVars.size();
+            if (nonConstantVarCount > 0) {
+                double exp = (double) unboundVars.size() / nonConstantVarCount;
+                cardinality = Math.pow(cardinality, exp);
+            }
+
+            if (unboundVars.isEmpty()) {
+                // Prefer patterns with more bound vars
+                if (nonConstantVarCount > 0) {
+                    cardinality /= nonConstantVarCount;
+                }
+            } else {
+                // Prefer patterns that bind variables from other tuple expressions
+                int foreignVarFreq = getForeignVarFreq(unboundVars, varFreqMap);
+                if (foreignVarFreq > 0) {
+                    cardinality /= foreignVarFreq;
+                }
+            }
+
+            // Prefer patterns that bind more variables
+            List<Var> distinctUnboundVars = getUnboundVars(new
+                    HashSet<Var>(vars));
+            if (distinctUnboundVars.size() >= 2) {
+                cardinality /= distinctUnboundVars.size();
+            }
+
+            return cardinality;
+        }
+
+        protected List<Var> getConstantVars(Iterable<Var> vars) {
+            List<Var> constantVars = new ArrayList<Var>();
+
+            for (Var var : vars) {
+                if (var.hasValue()) {
+                    constantVars.add(var);
+                }
+            }
+
+            return constantVars;
+        }
+
+        protected List<Var> getUnboundVars(Iterable<Var> vars) {
+            List<Var> unboundVars = new ArrayList<Var>();
+
+            for (Var var : vars) {
+                if (!var.hasValue() && !this.boundVars.contains(var.getName())) {
+                    unboundVars.add(var);
+                }
+            }
+
+            return unboundVars;
+        }
+
+        protected int getForeignVarFreq(List<Var> ownUnboundVars, Map<Var, Integer> varFreqMap) {
+            int result = 0;
+
+            Map<Var, Integer> ownFreqMap = getVarFreqMap(ownUnboundVars, new HashMap<Var, Integer>());
+
+            for (Map.Entry<Var, Integer> entry : ownFreqMap.entrySet()) {
+                Var var = entry.getKey();
+                int ownFreq = entry.getValue();
+                result += varFreqMap.get(var) - ownFreq;
+            }
+
+            return result;
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/QueryJoinSelectOptimizer.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/QueryJoinSelectOptimizer.java b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/QueryJoinSelectOptimizer.java
new file mode 100644
index 0000000..643446a
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/QueryJoinSelectOptimizer.java
@@ -0,0 +1,260 @@
+package mvm.rya.rdftriplestore.evaluation;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import mvm.rya.api.RdfCloudTripleStoreConfiguration;
+import mvm.rya.api.persist.joinselect.SelectivityEvalDAO;
+import mvm.rya.rdftriplestore.inference.DoNotExpandSP;
+import mvm.rya.rdftriplestore.utils.FixedStatementPattern;
+
+import org.openrdf.query.BindingSet;
+import org.openrdf.query.Dataset;
+import org.openrdf.query.algebra.Join;
+import org.openrdf.query.algebra.TupleExpr;
+import org.openrdf.query.algebra.evaluation.QueryOptimizer;
+import org.openrdf.query.algebra.evaluation.impl.EvaluationStatistics;
+import org.openrdf.query.algebra.helpers.QueryModelVisitorBase;
+
+public class QueryJoinSelectOptimizer implements QueryOptimizer {
+
+  private final EvaluationStatistics statistics;
+  private final SelectivityEvalDAO eval;
+  private final RdfCloudTripleStoreConfiguration config;
+
+  public QueryJoinSelectOptimizer(EvaluationStatistics statistics, SelectivityEvalDAO eval) {
+    System.out.println("Entering join optimizer!");
+    this.statistics = statistics;
+    this.eval = eval;
+    this.config = eval.getConf();
+  }
+
+  /**
+   * Applies generally applicable optimizations: path expressions are sorted from more to less specific.
+   *
+   * @param tupleExpr
+   */
+  public void optimize(TupleExpr tupleExpr, Dataset dataset, BindingSet bindings) {
+    tupleExpr.visit(new JoinVisitor());
+  }
+
+  protected class JoinVisitor extends QueryModelVisitorBase<RuntimeException> {
+
+    @Override
+    public void meet(Join node) {
+
+      try {
+        if (node.getLeftArg() instanceof FixedStatementPattern && node.getRightArg() instanceof DoNotExpandSP) {
+          return;
+        }
+
+        TupleExpr partialQuery = null;
+        List<TupleExpr> joinArgs = getJoinArgs(node, new ArrayList<TupleExpr>());
+        Map<TupleExpr,Double> cardinalityMap = new HashMap<TupleExpr,Double>();
+
+        for (TupleExpr tupleExpr : joinArgs) {
+          double cardinality = statistics.getCardinality(tupleExpr);
+          cardinalityMap.put(tupleExpr, cardinality);
+
+        }
+
+        while (!joinArgs.isEmpty()) {
+          TePairCost tpc = getBestTupleJoin(partialQuery, joinArgs);
+          List<TupleExpr> tePair = tpc.getTePair();
+          if (partialQuery == null) {
+            if (tePair.size() != 2) {
+              throw new IllegalStateException();
+            }
+            if (!(tePair.get(0) instanceof Join)) {
+              tePair.get(0).visit(this);
+            }
+            if (!(tePair.get(1) instanceof Join)) {
+              tePair.get(1).visit(this);
+            }
+            if (tePair.get(1) instanceof Join) {
+              partialQuery = new Join(tePair.get(0), ((Join) tePair.get(1)).getLeftArg());
+              partialQuery = new Join(partialQuery, ((Join) tePair.get(1)).getRightArg());
+              joinArgs.remove(tePair.get(0));
+              joinArgs.remove(tePair.get(1));
+            } else {
+              partialQuery = new Join(tePair.get(0), tePair.get(1));
+              joinArgs.remove(tePair.get(0));
+              joinArgs.remove(tePair.get(1));
+            }
+          } else {
+            if (tePair.size() != 1) {
+              throw new IllegalStateException();
+            }
+            if (!(tePair.get(0) instanceof Join)) {
+              tePair.get(0).visit(this);
+            }
+
+            if (tePair.get(0) instanceof Join) {
+              partialQuery = new Join(partialQuery, ((Join) tePair.get(0)).getLeftArg());
+              partialQuery = new Join(partialQuery, ((Join) tePair.get(0)).getRightArg());
+              joinArgs.remove(tePair.get(0));
+
+            } else {
+              partialQuery = new Join(partialQuery, tePair.get(0));
+              joinArgs.remove(tePair.get(0));
+            }
+          }
+
+        }
+
+        // Replace old join hierarchy
+        node.replaceWith(partialQuery);
+
+      } catch (Exception e) {
+        e.printStackTrace();
+      }
+    }
+
+    protected <L extends List<TupleExpr>> L getJoinArgs(TupleExpr tupleExpr, L joinArgs) {
+      if (tupleExpr instanceof Join) {
+        if (!(((Join) tupleExpr).getLeftArg() instanceof FixedStatementPattern) && !(((Join) tupleExpr).getRightArg() instanceof DoNotExpandSP)) {
+          Join join = (Join) tupleExpr;
+          getJoinArgs(join.getLeftArg(), joinArgs);
+          getJoinArgs(join.getRightArg(), joinArgs);
+        } else {
+          joinArgs.add(tupleExpr);
+        }
+      } else {
+        joinArgs.add(tupleExpr);
+      }
+
+      return joinArgs;
+    }
+
+    public TePairCost getBestTupleJoin(TupleExpr partialQuery, List<TupleExpr> teList) throws Exception {
+
+      double tempCost = 0;
+      double bestCost = Double.MAX_VALUE;
+      List<TupleExpr> bestJoinNodes = new ArrayList<TupleExpr>();
+
+      if (partialQuery == null) {
+
+        double jSelect = 0;
+        double card1 = 0;
+        double card2 = 0;
+        TupleExpr teMin1 = null;
+        TupleExpr teMin2 = null;
+        double bestCard1 = 0;
+        double bestCard2 = 0;
+
+        for (int i = 0; i < teList.size(); i++) {
+          for (int j = i + 1; j < teList.size(); j++) {
+            jSelect = eval.getJoinSelect(config, teList.get(i), teList.get(j));
+            card1 = statistics.getCardinality(teList.get(i));
+            card2 = statistics.getCardinality(teList.get(j));
+            tempCost = card1 + card2 + card1 * card2 * jSelect;
+//             System.out.println("Optimizer: TempCost is " + tempCost + " cards are " + card1 + ", " + card2 + ", selectivity is "
+//             + jSelect + ", and nodes are "
+//             + teList.get(i) + " and " + teList.get(j));
+
+            // TODO this generates a nullpointer exception if tempCost = Double.Max
+            if (bestCost > tempCost) {
+
+              teMin1 = teList.get(i);
+              teMin2 = teList.get(j);
+              bestCard1 = card1;
+              bestCard2 = card2;
+              bestCost = tempCost;
+
+              if (bestCost == 0) {
+                bestJoinNodes.add(teMin1);
+                bestJoinNodes.add(teMin2);
+                return new TePairCost(0.0, bestJoinNodes);
+              }
+            }
+          }
+        }
+
+        if (bestCard1 < bestCard2) {
+
+          bestJoinNodes.add(teMin1);
+          bestJoinNodes.add(teMin2);
+
+        } else {
+          bestJoinNodes.add(teMin2);
+          bestJoinNodes.add(teMin1);
+        }
+        //System.out.println("Optimizer: Card1 is " + card1 + ", card2 is " + card2 + ", selectivity is " + jSelect + ", and best cost is" + bestCost);
+        return new TePairCost(bestCost, bestJoinNodes);
+
+      } else {
+        double card1 = statistics.getCardinality(partialQuery);
+        TupleExpr bestTe = null;
+        double card2 = 0;
+        double select = 0;
+
+        for (TupleExpr te : teList) {
+          select = eval.getJoinSelect(config, partialQuery, te);
+          card2 = statistics.getCardinality(te);
+          tempCost = card1 + card2 + card1 * card2 * select;
+//          System.out.println("Optimizer: TempCost is " + tempCost + " cards are " + card1 + ", " + card2 + ", selectivity is "
+//                  + select + ", and nodes are "
+//                  + partialQuery + " and " + te);
+
+
+          if (bestCost > tempCost) {
+            bestTe = te;
+            bestCost = tempCost;
+          }
+
+        }
+        List<TupleExpr> teList2 = new ArrayList<TupleExpr>();
+        teList2.add(bestTe);
+        //System.out.println("Optimizer: Card1 is " + card1 + ", card2 is " + card2 + ", selectivity is " + select + ", and best cost is" + bestCost);
+        return new TePairCost(bestCost, teList2);
+      }
+
+    }
+
+    // **************************************************************************************
+    public class TePairCost {
+
+      private double cost;
+      private List<TupleExpr> tePair;
+
+      public TePairCost(double cost, List<TupleExpr> tePair) {
+        this.cost = cost;
+        this.tePair = tePair;
+
+      }
+
+      public double getCost() {
+        return cost;
+      }
+
+      public List<TupleExpr> getTePair() {
+        return tePair;
+      }
+
+    }
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/RdfCloudTripleStoreEvaluationStatistics.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/RdfCloudTripleStoreEvaluationStatistics.java b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/RdfCloudTripleStoreEvaluationStatistics.java
new file mode 100644
index 0000000..b0fa46c
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/RdfCloudTripleStoreEvaluationStatistics.java
@@ -0,0 +1,281 @@
+package mvm.rya.rdftriplestore.evaluation;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import static com.google.common.base.Preconditions.checkNotNull;
+//import static RdfCloudTripleStoreUtils.getTtlValueConverter;
+
+
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import mvm.rya.api.RdfCloudTripleStoreConfiguration;
+import mvm.rya.api.persist.RdfEvalStatsDAO;
+import mvm.rya.api.persist.RdfEvalStatsDAO.CARDINALITY_OF;
+import mvm.rya.rdftriplestore.inference.DoNotExpandSP;
+import mvm.rya.rdftriplestore.utils.FixedStatementPattern;
+
+import org.openrdf.model.Resource;
+import org.openrdf.model.Statement;
+import org.openrdf.model.URI;
+import org.openrdf.model.Value;
+import org.openrdf.model.vocabulary.RDF;
+import org.openrdf.query.algebra.BinaryTupleOperator;
+import org.openrdf.query.algebra.Filter;
+import org.openrdf.query.algebra.Join;
+import org.openrdf.query.algebra.Projection;
+import org.openrdf.query.algebra.Slice;
+import org.openrdf.query.algebra.StatementPattern;
+import org.openrdf.query.algebra.TupleExpr;
+import org.openrdf.query.algebra.UnaryTupleOperator;
+import org.openrdf.query.algebra.Var;
+import org.openrdf.query.algebra.evaluation.impl.EvaluationStatistics;
+
+/**
+ * Class RdfCloudTripleStoreEvaluationStatistics
+ * Date: Apr 12, 2011
+ * Time: 1:31:05 PM
+ */
+public class RdfCloudTripleStoreEvaluationStatistics extends EvaluationStatistics {
+
+    private RdfCloudTripleStoreConfiguration conf;
+    private RdfEvalStatsDAO rdfEvalStatsDAO;
+    protected boolean pushEmptyRdfTypeDown = true;
+    protected boolean useCompositeCardinalities = true;
+
+    public RdfCloudTripleStoreEvaluationStatistics(RdfCloudTripleStoreConfiguration conf, RdfEvalStatsDAO rdfEvalStatsDAO) {
+        checkNotNull(conf);
+        checkNotNull(rdfEvalStatsDAO);
+        try {
+            this.conf = conf;
+            this.rdfEvalStatsDAO = rdfEvalStatsDAO;
+            pushEmptyRdfTypeDown = conf.isStatsPushEmptyRdftypeDown();
+            useCompositeCardinalities = conf.isUseCompositeCardinality();
+        } catch (Exception e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    @Override
+    public double getCardinality(TupleExpr expr) {
+        if (expr instanceof Filter) {
+            Filter f = (Filter) expr;
+            // filters must make sets smaller
+            return super.getCardinality(f.getArg()) / 10;
+        }
+        return super.getCardinality(expr);
+    }
+
+    @Override
+    protected CardinalityCalculator createCardinalityCalculator() {
+        return new RdfCloudTripleStoreCardinalityCalculator(this);
+    }
+
+    public RdfEvalStatsDAO getRdfEvalStatsDAO() {
+        return rdfEvalStatsDAO;
+    }
+
+    public void setRdfEvalStatsDAO(RdfEvalStatsDAO rdfEvalStatsDAO) {
+        this.rdfEvalStatsDAO = rdfEvalStatsDAO;
+    }
+
+    public class RdfCloudTripleStoreCardinalityCalculator extends CardinalityCalculator {
+        private RdfCloudTripleStoreEvaluationStatistics statistics;
+        protected Map<Var, Collection<Statement>> fspMap;
+
+        public RdfCloudTripleStoreCardinalityCalculator(RdfCloudTripleStoreEvaluationStatistics statistics) {
+            this.statistics = statistics;
+        }
+        
+ 
+        @Override
+        protected double getCardinality(StatementPattern sp) {
+            Var subjectVar = sp.getSubjectVar();
+            Resource subj = (Resource) getConstantValue(subjectVar);
+            Var predicateVar = sp.getPredicateVar();
+            URI pred = (URI) getConstantValue(predicateVar);
+            Var objectVar = sp.getObjectVar();
+            Value obj = getConstantValue(objectVar);
+            Resource context = (Resource) getConstantValue(sp.getContextVar());
+
+            // set rdf type to be a max value (as long as the object/subject aren't specified) to 
+                if (pred != null) {
+                    if (statistics.pushEmptyRdfTypeDown && RDF.TYPE.equals(pred) && subj == null && obj == null) {
+                        return Double.MAX_VALUE;
+                    }
+                }
+
+            // FixedStatementPattern indicates that this is when backward chaining reasoning is being used
+            if (sp instanceof FixedStatementPattern) {
+                //no query here
+                FixedStatementPattern fsp = (FixedStatementPattern) sp;
+                //TODO: assume that only the subject is open ended here
+                Var fspSubjectVar = fsp.getSubjectVar();
+                if (fspSubjectVar != null && fspSubjectVar.getValue() == null) {
+                    if (fspMap == null) {
+                        fspMap = new HashMap<Var, Collection<Statement>>();
+                    }
+                    fspMap.put(fspSubjectVar, fsp.statements);
+                }
+                return fsp.statements.size();
+            }
+
+            /**
+             * Use the output of the FixedStatementPattern to determine more information
+             */
+            if (fspMap != null && sp instanceof DoNotExpandSP) {
+                //TODO: Might be a better way than 3 map pulls
+                RdfEvalStatsDAO.CARDINALITY_OF cardinality_of = null;
+                Collection<Statement> statements = null;
+                // TODO unsure of how to incorporate additional cardinalities here
+                if (objectVar != null && objectVar.getValue() == null) {
+                    statements = fspMap.get(objectVar);
+                    cardinality_of = RdfEvalStatsDAO.CARDINALITY_OF.OBJECT;
+                }
+                if (statements == null && predicateVar != null && predicateVar.getValue() == null) {
+                    statements = fspMap.get(predicateVar);
+                    cardinality_of = RdfEvalStatsDAO.CARDINALITY_OF.PREDICATE;
+                }
+                if (statements == null && subjectVar != null && subjectVar.getValue() == null) {
+                    statements = fspMap.get(subjectVar);
+                    cardinality_of = RdfEvalStatsDAO.CARDINALITY_OF.SUBJECT;
+                }
+                if (statements != null) {
+                    double fspCard = 0;
+                    for (Statement statement : statements) {
+                    	List<Value> values = new ArrayList<Value>();
+                    	values.add(statement.getSubject());
+                    	fspCard  += rdfEvalStatsDAO.getCardinality(conf, cardinality_of, values, context);
+                    }
+                    return fspCard;
+                }
+            }
+
+            /**
+             * We put full triple scans before rdf:type because more often than not
+             * the triple scan is being joined with something else that is better than
+             * asking the full rdf:type of everything.
+             */
+            double cardinality = Double.MAX_VALUE - 1;
+            try {
+                if (subj != null) {
+                	List<Value> values = new ArrayList<Value>();
+                	CARDINALITY_OF card = RdfEvalStatsDAO.CARDINALITY_OF.SUBJECT;
+            		values.add(subj);
+            		if (useCompositeCardinalities){
+                   	    if (pred != null){
+                    		values.add(pred);
+                    		card = RdfEvalStatsDAO.CARDINALITY_OF.SUBJECTPREDICATE;
+                    	}
+                   	    else if (obj != null){
+                    		values.add(obj);
+                    		card = RdfEvalStatsDAO.CARDINALITY_OF.SUBJECTOBJECT;
+                   	    }
+            		}
+                	double evalCard = evalCard = rdfEvalStatsDAO.getCardinality(conf, card, values, context);
+                	// the cardinality will be -1 if there was no value found (if the index does not exist)
+                    if (evalCard >= 0) {
+                        cardinality = Math.min(cardinality, evalCard);
+                    } else {
+                        cardinality = 1;
+                    }
+                }
+                else if (pred != null) {
+                	List<Value> values = new ArrayList<Value>();
+                	CARDINALITY_OF card = RdfEvalStatsDAO.CARDINALITY_OF.PREDICATE;
+            		values.add(pred);
+            		if (useCompositeCardinalities){
+                   	    if (obj != null){
+                    		values.add(obj);
+                    		card = RdfEvalStatsDAO.CARDINALITY_OF.PREDICATEOBJECT;
+                   	    }
+            		}
+                	double evalCard = evalCard = rdfEvalStatsDAO.getCardinality(conf, card, values, context);
+                    if (evalCard >= 0) {
+                        cardinality = Math.min(cardinality, evalCard);
+                    } else {
+                        cardinality = 1;
+                    }
+                }
+                else if (obj != null) {
+                	List<Value> values = new ArrayList<Value>();
+            		values.add(obj);
+                    double evalCard = rdfEvalStatsDAO.getCardinality(conf, RdfEvalStatsDAO.CARDINALITY_OF.OBJECT, values, context);
+                    if (evalCard >= 0) {
+                        cardinality = Math.min(cardinality, evalCard);
+                    } else {
+                        cardinality = 1;
+                    }
+                }
+            } catch (Exception e) {
+                throw new RuntimeException(e);
+            }
+
+            return cardinality;
+        }
+
+        @Override
+        protected void meetUnaryTupleOperator(UnaryTupleOperator node) {
+            if (node instanceof Projection) {
+                cardinality += -1.0;
+            }
+            super.meetUnaryTupleOperator(node);
+        }
+
+        @Override
+        protected void meetBinaryTupleOperator(BinaryTupleOperator node) {
+            node.getLeftArg().visit(this);
+            double leftArgCost = cardinality;
+            node.getRightArg().visit(this);
+            cardinality += leftArgCost;
+        }
+        
+        // TODO Is this sufficient for add capability of slice node?
+        @Override
+        public void meet(Slice node) {
+            cardinality = node.getLimit();
+        }
+        
+
+        @Override
+        public void meet(Join node) {
+            node.getLeftArg().visit(this);
+            double leftArgCost = cardinality;
+            node.getRightArg().visit(this);
+            if (leftArgCost > cardinality) {
+                cardinality = leftArgCost;    //TODO: Is this ok?
+            }
+        }
+
+        protected Value getConstantValue(Var var) {
+            if (var != null)
+                return var.getValue();
+            else
+                return null;
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/RdfCloudTripleStoreSelectivityEvaluationStatistics.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/RdfCloudTripleStoreSelectivityEvaluationStatistics.java b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/RdfCloudTripleStoreSelectivityEvaluationStatistics.java
new file mode 100644
index 0000000..7c88640
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/RdfCloudTripleStoreSelectivityEvaluationStatistics.java
@@ -0,0 +1,128 @@
+package mvm.rya.rdftriplestore.evaluation;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import static com.google.common.base.Preconditions.checkNotNull;
+import mvm.rya.api.RdfCloudTripleStoreConfiguration;
+import mvm.rya.api.persist.RdfEvalStatsDAO;
+import mvm.rya.api.persist.joinselect.SelectivityEvalDAO;
+import mvm.rya.rdftriplestore.inference.DoNotExpandSP;
+import mvm.rya.rdftriplestore.utils.FixedStatementPattern;
+
+import org.openrdf.query.algebra.Join;
+import org.openrdf.query.algebra.StatementPattern;
+
+public class RdfCloudTripleStoreSelectivityEvaluationStatistics extends RdfCloudTripleStoreEvaluationStatistics {
+
+  // allows access to join selectivity and extending RdfCloudTripleStoreEvaluationStatistics allows for use of prospector
+  private SelectivityEvalDAO selectEvalStatsDAO; // TODO redundancy here as RdfCloudTripleStoreEvalStats object contains
+                                                 // RdfEvalStatsDAO object
+
+  protected double filterCard;
+  RdfCloudTripleStoreConfiguration config; // TODO redundancy here as RdfCloudTripleStoreEvalStats object contains conf as well
+
+  public RdfCloudTripleStoreSelectivityEvaluationStatistics(RdfCloudTripleStoreConfiguration conf,
+      RdfEvalStatsDAO<RdfCloudTripleStoreConfiguration> prospector, SelectivityEvalDAO selectEvalStatsDAO) {
+
+    super(conf, prospector);
+    checkNotNull(selectEvalStatsDAO);
+
+    try {
+      this.selectEvalStatsDAO = selectEvalStatsDAO;
+      this.config = conf; // TODO fix this!
+    } catch (Exception e) {
+      throw new RuntimeException(e);
+    }
+  }
+
+  @Override
+  protected CardinalityCalculator createCardinalityCalculator() {
+    try {
+      return new SelectivityCardinalityCalculator(this);
+    } catch (Exception e) {
+      System.out.println(e);
+      throw new RuntimeException(e);
+    }
+  }
+
+  public class SelectivityCardinalityCalculator extends RdfCloudTripleStoreCardinalityCalculator {
+
+    public SelectivityCardinalityCalculator(RdfCloudTripleStoreSelectivityEvaluationStatistics statistics) {
+      super(statistics);
+    }
+
+    @Override
+    public void meet(Join node) {
+      node.getLeftArg().visit(this);
+      double leftArgCost = cardinality;
+      // System.out.println("Left cardinality is " + cardinality);
+      node.getRightArg().visit(this);
+
+      if (node.getLeftArg() instanceof FixedStatementPattern && node.getRightArg() instanceof DoNotExpandSP) {
+        return;
+      }
+
+      try {
+        double selectivity = selectEvalStatsDAO.getJoinSelect(config, node.getLeftArg(), node.getRightArg());
+//        System.out.println("CardCalc: left cost of " + node.getLeftArg() + " is " + leftArgCost + " right cost of "
+//        + node.getRightArg() + " is " + cardinality);
+//         System.out.println("Right cardinality is " + cardinality);
+        cardinality += leftArgCost + leftArgCost * cardinality * selectivity;
+//        System.out.println("CardCalc: Cardinality is " + cardinality);
+//        System.out.println("CardCalc: Selectivity is " + selectivity);
+        // System.out.println("Join cardinality is " + cardinality);
+
+      } catch (Exception e) {
+        e.printStackTrace();
+      }
+
+    }
+    
+    
+    
+    
+        @Override
+        public double getCardinality(StatementPattern node) {
+
+            cardinality = super.getCardinality(node);
+
+            // If sp contains all variables or is EmptyRDFtype, assign
+            // cardinality
+            // equal to table size
+            if (cardinality == Double.MAX_VALUE || cardinality == Double.MAX_VALUE - 1) {
+                try {
+                    cardinality = selectEvalStatsDAO.getTableSize(config);
+                } catch (Exception e) {
+                    // TODO Auto-generated catch block
+                    e.printStackTrace();
+                }
+            }
+
+            return cardinality;
+        }
+    
+    
+    
+
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ReorderJoinVisitor.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ReorderJoinVisitor.java b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ReorderJoinVisitor.java
new file mode 100644
index 0000000..f825921
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ReorderJoinVisitor.java
@@ -0,0 +1,70 @@
+package mvm.rya.rdftriplestore.evaluation;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import org.openrdf.query.algebra.Join;
+import org.openrdf.query.algebra.StatementPattern;
+import org.openrdf.query.algebra.TupleExpr;
+import org.openrdf.query.algebra.helpers.QueryModelVisitorBase;
+
+/**
+ * Class ReorderJoinVisitor
+ * Date: Apr 11, 2011
+ * Time: 10:16:15 PM
+ */
+public class ReorderJoinVisitor extends QueryModelVisitorBase {
+    @Override
+    public void meet(Join node) throws Exception {
+        super.meet(node);
+        
+        TupleExpr leftArg = node.getLeftArg();
+        TupleExpr rightArg = node.getRightArg();
+
+        /**
+         * if join(stmtPattern1, join(stmtPattern2, anything)
+         * Should be
+         * join(join(stmtPattern1, stmtPattern2), anything)
+         */
+        if (leftArg instanceof StatementPattern && rightArg instanceof Join) {
+            Join rightJoin = (Join) rightArg;
+            //find the stmtPattern in the right side
+            TupleExpr right_LeftArg = rightJoin.getLeftArg();
+            TupleExpr right_rightArg = rightJoin.getRightArg();
+            if (right_LeftArg instanceof StatementPattern || right_rightArg instanceof StatementPattern) {
+                StatementPattern stmtPattern = null;
+                TupleExpr anything = null;
+                if (right_LeftArg instanceof StatementPattern) {
+                    stmtPattern = (StatementPattern) right_LeftArg;
+                    anything = right_rightArg;
+                } else {
+                    stmtPattern = (StatementPattern) right_rightArg;
+                    anything = right_LeftArg;
+                }
+
+                Join inner = new Join(leftArg, stmtPattern);
+                Join outer = new Join(inner, anything);
+                node.replaceWith(outer);
+            }
+        }
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/SeparateFilterJoinsVisitor.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/SeparateFilterJoinsVisitor.java b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/SeparateFilterJoinsVisitor.java
new file mode 100644
index 0000000..002b804
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/SeparateFilterJoinsVisitor.java
@@ -0,0 +1,55 @@
+package mvm.rya.rdftriplestore.evaluation;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import org.openrdf.query.algebra.*;
+import org.openrdf.query.algebra.helpers.QueryModelVisitorBase;
+
+/**
+ * TODO: This might be a very bad thing. It may force all AND and not allow ORs?. Depends on how they do the bindings.
+ * Class SeparateFilterJoinsVisitor
+ * Date: Apr 11, 2011
+ * Time: 10:16:15 PM
+ */
+public class SeparateFilterJoinsVisitor extends QueryModelVisitorBase {
+    @Override
+    public void meet(Filter node) throws Exception {
+        super.meet(node);
+
+        ValueExpr condition = node.getCondition();
+        TupleExpr arg = node.getArg();
+        if (!(arg instanceof Join)) {
+            return;
+        }
+
+        Join join = (Join) arg;
+        TupleExpr leftArg = join.getLeftArg();
+        TupleExpr rightArg = join.getRightArg();
+
+        if (leftArg instanceof StatementPattern && rightArg instanceof StatementPattern) {
+            Filter left = new Filter(leftArg, condition);
+            Filter right = new Filter(rightArg, condition);
+            node.replaceWith(new Join(left, right));
+        }
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/inference/AbstractInferVisitor.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/inference/AbstractInferVisitor.java b/sail/src/main/java/mvm/rya/rdftriplestore/inference/AbstractInferVisitor.java
new file mode 100644
index 0000000..f6d3ff0
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/inference/AbstractInferVisitor.java
@@ -0,0 +1,108 @@
+package mvm.rya.rdftriplestore.inference;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import mvm.rya.api.RdfCloudTripleStoreConfiguration;
+import mvm.rya.rdftriplestore.utils.FixedStatementPattern;
+import mvm.rya.rdftriplestore.utils.TransitivePropertySP;
+import mvm.rya.rdftriplestore.utils.FixedStatementPattern;
+import mvm.rya.rdftriplestore.utils.TransitivePropertySP;
+import org.openrdf.query.algebra.Join;
+import org.openrdf.query.algebra.StatementPattern;
+import org.openrdf.query.algebra.Union;
+import org.openrdf.query.algebra.Var;
+import org.openrdf.query.algebra.helpers.QueryModelVisitorBase;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+/**
+ * Class AbstractInferVisitor
+ * Date: Mar 14, 2012
+ * Time: 5:33:01 PM
+ */
+public class AbstractInferVisitor extends QueryModelVisitorBase {
+
+    static Var EXPANDED = new Var("infer-expanded");
+
+    boolean include = true;
+
+    RdfCloudTripleStoreConfiguration conf;
+    InferenceEngine inferenceEngine;
+
+    public AbstractInferVisitor(RdfCloudTripleStoreConfiguration conf, InferenceEngine inferenceEngine) {
+        checkNotNull(conf, "Configuration cannot be null");
+        checkNotNull(inferenceEngine, "Inference Engine cannot be null");
+        this.conf = conf;
+        this.inferenceEngine = inferenceEngine;
+    }
+
+    @Override
+    public void meet(StatementPattern sp) throws Exception {
+        if (!include) {
+            return;
+        }
+        if (sp instanceof FixedStatementPattern || sp instanceof TransitivePropertySP || sp instanceof DoNotExpandSP) {
+            return;   //already inferred somewhere else
+        }
+        final Var predVar = sp.getPredicateVar();
+        //we do not let timeRange preds be inferred, not good
+        if (predVar == null || predVar.getValue() == null
+//                || RdfCloudTripleStoreUtils.getTtlValueConverter(conf, (URI) predVar.getValue()) != null
+                ) {
+            return;
+        }
+        meetSP(sp);
+    }
+
+    protected void meetSP(StatementPattern sp) throws Exception {
+
+    }
+
+    @Override
+    public void meet(Union node) throws Exception {
+//        if (!(node instanceof InferUnion))
+        super.meet(node);
+    }
+
+    @Override
+    public void meet(Join node) throws Exception {
+        if (!(node instanceof InferJoin)) {
+            super.meet(node);
+        }
+    }
+
+    public RdfCloudTripleStoreConfiguration getConf() {
+        return conf;
+    }
+
+    public void setConf(RdfCloudTripleStoreConfiguration conf) {
+        this.conf = conf;
+    }
+
+    public InferenceEngine getInferenceEngine() {
+        return inferenceEngine;
+    }
+
+    public void setInferenceEngine(InferenceEngine inferenceEngine) {
+        this.inferenceEngine = inferenceEngine;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/inference/DoNotExpandSP.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/inference/DoNotExpandSP.java b/sail/src/main/java/mvm/rya/rdftriplestore/inference/DoNotExpandSP.java
new file mode 100644
index 0000000..aed7ed0
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/inference/DoNotExpandSP.java
@@ -0,0 +1,51 @@
+package mvm.rya.rdftriplestore.inference;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import org.openrdf.query.algebra.StatementPattern;
+import org.openrdf.query.algebra.Var;
+
+/**
+ * Class DoNotExpandSP
+ * Date: Mar 15, 2012
+ * Time: 9:39:45 AM
+ */
+public class DoNotExpandSP extends StatementPattern{
+    public DoNotExpandSP() {
+    }
+
+    public DoNotExpandSP(Var subject, Var predicate, Var object) {
+        super(subject, predicate, object);
+    }
+
+    public DoNotExpandSP(Scope scope, Var subject, Var predicate, Var object) {
+        super(scope, subject, predicate, object);
+    }
+
+    public DoNotExpandSP(Var subject, Var predicate, Var object, Var context) {
+        super(subject, predicate, object, context);
+    }
+
+    public DoNotExpandSP(Scope scope, Var subjVar, Var predVar, Var objVar, Var conVar) {
+        super(scope, subjVar, predVar, objVar, conVar);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/inference/InferConstants.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/inference/InferConstants.java b/sail/src/main/java/mvm/rya/rdftriplestore/inference/InferConstants.java
new file mode 100644
index 0000000..aa0b99b
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/inference/InferConstants.java
@@ -0,0 +1,34 @@
+package mvm.rya.rdftriplestore.inference;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+/**
+ * Interface InferConstants
+ * Date: Apr 16, 2011
+ * Time: 7:30:47 AM
+ */
+public interface InferConstants {
+
+    public static final String INFERRED = "inferred";
+    public static final String TRUE = "true";
+    public static final String FALSE = "false";
+}