You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@jena.apache.org by an...@apache.org on 2017/10/03 19:33:58 UTC

[12/65] [abbrv] jena git commit: JENA-1397: Rename java packages

http://git-wip-us.apache.org/repos/asf/jena/blob/3d456654/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/solver/OpExecutorTDB1.java
----------------------------------------------------------------------
diff --git a/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/solver/OpExecutorTDB1.java b/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/solver/OpExecutorTDB1.java
deleted file mode 100644
index 8ab52a8..0000000
--- a/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/solver/OpExecutorTDB1.java
+++ /dev/null
@@ -1,428 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.seaborne.tdb2.solver;
-
-import java.util.function.Predicate;
-
-import org.apache.jena.atlas.lib.tuple.Tuple ;
-import org.apache.jena.atlas.logging.Log ;
-import org.apache.jena.graph.Graph ;
-import org.apache.jena.graph.Node ;
-import org.apache.jena.sparql.ARQInternalErrorException ;
-import org.apache.jena.sparql.algebra.Op ;
-import org.apache.jena.sparql.algebra.op.* ;
-import org.apache.jena.sparql.algebra.optimize.TransformFilterPlacement ;
-import org.apache.jena.sparql.core.BasicPattern ;
-import org.apache.jena.sparql.core.Quad ;
-import org.apache.jena.sparql.core.Substitute ;
-import org.apache.jena.sparql.core.Var ;
-import org.apache.jena.sparql.engine.ExecutionContext ;
-import org.apache.jena.sparql.engine.QueryIterator ;
-import org.apache.jena.sparql.engine.iterator.QueryIterPeek ;
-import org.apache.jena.sparql.engine.main.OpExecutor ;
-import org.apache.jena.sparql.engine.main.OpExecutorFactory ;
-import org.apache.jena.sparql.engine.main.QC ;
-import org.apache.jena.sparql.engine.main.iterator.QueryIterGraph ;
-import org.apache.jena.sparql.engine.optimizer.reorder.ReorderProc ;
-import org.apache.jena.sparql.engine.optimizer.reorder.ReorderTransformation ;
-import org.apache.jena.sparql.expr.ExprList ;
-import org.apache.jena.sparql.mgt.Explain ;
-import org.seaborne.tdb2.store.DatasetGraphTDB ;
-import org.seaborne.tdb2.store.GraphTDB ;
-import org.seaborne.tdb2.store.NodeId ;
-import org.slf4j.Logger ;
-import org.slf4j.LoggerFactory ;
-
-/** TDB executor for algebra expressions.  It is the standard ARQ executor
- *  except for basic graph patterns and filtered basic graph patterns (currently).  
- * 
- * See also: StageGeneratorDirectTDB, a non-reordering 
- */
-public class OpExecutorTDB1 extends OpExecutor
-{
-    private static final Logger log = LoggerFactory.getLogger(OpExecutorTDB1.class) ;
-    
-    public final static OpExecutorFactory OpExecFactoryTDB = new OpExecutorFactory()
-    {
-        @Override
-        public OpExecutor create(ExecutionContext execCxt)
-        { return new OpExecutorTDB1(execCxt) ; }
-    } ;
-    
-    private final boolean isForTDB ;
-    
-    // A new compile object is created for each op compilation.
-    // So the execCxt is changing as we go through the query-compile-execute process  
-    public OpExecutorTDB1(ExecutionContext execCxt)
-    {
-        super(execCxt) ;
-        // NB. The dataset may be a TDB one, or a general one.
-        // Any merged union graph magic (for a TDB dataset was handled
-        // in QueryEngineTDB).
-        
-        isForTDB = (execCxt.getActiveGraph() instanceof GraphTDB) ;
-    }
-
-    @Override
-    protected QueryIterator exec(Op op, QueryIterator input) {
-        if ( level < 0 )
-            // Print only at top level (and we're called before level++) 
-            Explain.explain("TDB", op, super.execCxt.getContext()) ;
-        return super.exec(op, input) ;
-    } 
-    
-    // Retrieving nodes isn't so bad because they will be needed anyway.
-    // And if their duplicates, likely to be cached.
-    // Need to work with SolverLib which wraps the NodeId bindgins with a converter. 
-    
-    @Override
-    protected QueryIterator execute(OpDistinct opDistinct, QueryIterator input)
-    {
-        return super.execute(opDistinct, input) ;
-    }
-    
-    @Override
-    protected QueryIterator execute(OpReduced opReduced, QueryIterator input)
-    {
-        return super.execute(opReduced, input) ;
-    }
-    
-    @Override
-    protected QueryIterator execute(OpFilter opFilter, QueryIterator input)
-    {
-        if ( ! isForTDB )
-            return super.execute(opFilter, input) ;
-        
-        // If the filter does not apply to the input??
-        // Where does ARQ catch this?
-        
-        // (filter (bgp ...))
-        if ( OpBGP.isBGP(opFilter.getSubOp()) )
-        {
-            // Still may be a TDB graph in a non-TDB dataset (e.g. a named model)
-            GraphTDB graph = (GraphTDB)execCxt.getActiveGraph() ;
-            OpBGP opBGP = (OpBGP)opFilter.getSubOp() ;
-            return executeBGP(graph, opBGP, input, opFilter.getExprs(), execCxt) ;
-        }
-        
-        // (filter (quadpattern ...))
-        if ( opFilter.getSubOp() instanceof OpQuadPattern )
-        {
-            OpQuadPattern quadPattern = (OpQuadPattern)opFilter.getSubOp() ;
-            DatasetGraphTDB ds = (DatasetGraphTDB)execCxt.getDataset() ;
-            return optimizeExecuteQuads(ds, input,
-                                        quadPattern.getGraphNode(), quadPattern.getBasicPattern(),
-                                        opFilter.getExprs(), execCxt) ;
-        }
-    
-        // (filter (anything else))
-        return super.execute(opFilter, input) ;
-        }
-
-    // ---- Triple patterns
-    
-    @Override
-    protected QueryIterator execute(OpBGP opBGP, QueryIterator input)
-    {
-        if ( ! isForTDB )
-            return super.execute(opBGP, input) ;
-        
-        GraphTDB graph = (GraphTDB)execCxt.getActiveGraph() ;
-        return executeBGP(graph, opBGP, input, null, execCxt) ;
-       
-    }
-
-    @Override
-    protected QueryIterator execute(OpQuadPattern quadPattern, QueryIterator input)
-    {
-        if ( ! isForTDB )
-            return super.execute(quadPattern, input) ;
-            
-    //        DatasetGraph dg = execCxt.getDataset() ;
-    //        if ( ! ( dg instanceof DatasetGraphTDB ) )
-    //            throw new InternalErrorException("Not a TDB backed dataset in quad pattern execution") ;
-        
-        DatasetGraphTDB ds = (DatasetGraphTDB)execCxt.getDataset() ;
-        BasicPattern bgp = quadPattern.getBasicPattern() ;
-        Node gn = quadPattern.getGraphNode() ;
-        return optimizeExecuteQuads(ds, input, gn, bgp, null, execCxt) ;
-    }
-
-    @Override
-    protected QueryIterator execute(OpGraph opGraph, QueryIterator input)
-    {
-        // Path evaluation or dataset sets which do not go straight to the DatasetGraphTDB  
-        return new QueryIterGraph(input, opGraph, execCxt) ;
-    }
-
-    /** Execute a BGP (and filters) on a TDB graph, which may be in default storage or it may be a named graph */ 
-    private static QueryIterator executeBGP(GraphTDB graph, OpBGP opBGP, QueryIterator input, ExprList exprs, 
-                                            ExecutionContext execCxt)
-    {
-        // Is it the real default graph (normal route or explicitly named)?
-        if ( ! isDefaultGraphStorage(graph.getGraphName()))
-        {
-            // Not default storage - it's a named graph in storage. 
-            DatasetGraphTDB ds = graph.getDSG() ;
-            return optimizeExecuteQuads(ds, input, graph.getGraphName(), opBGP.getPattern(), exprs, execCxt) ;
-        }
-        
-        // Execute a BGP on the real default graph
-        return optimizeExecuteTriples(graph, input, opBGP.getPattern(), exprs, execCxt) ;
-    }
-
-    /** Execute, with optimization, a basic graph pattern on the default graph storage */
-    private static QueryIterator optimizeExecuteTriples(GraphTDB graph, QueryIterator input,
-                                                        BasicPattern pattern, ExprList exprs,
-                                                        ExecutionContext execCxt)
-    {
-        if ( ! input.hasNext() )
-            return input ;
-    
-        // -- Input
-        // Must pass this iterator into the next stage.
-        if ( pattern.size() >= 2 )
-        {
-            // Must be 2 or triples to reorder. 
-            ReorderTransformation transform = graph.getDSG().getReorderTransform() ;
-            if ( transform != null )
-            {
-                QueryIterPeek peek = QueryIterPeek.create(input, execCxt) ;
-                input = peek ; // Must pass on
-                pattern = reorder(pattern, peek, transform) ;
-            }
-        }
-        // -- Filter placement
-            
-        Op op = null ;
-        if ( exprs != null )
-            op = TransformFilterPlacement.transform(exprs, pattern) ;
-        else
-            op = new OpBGP(pattern) ;
-        
-        return plainExecute(op, input, execCxt) ;
-    }
-
-    /** Execute, with optimization, a quad pattern */
-    private static QueryIterator optimizeExecuteQuads(DatasetGraphTDB ds, 
-                                                      QueryIterator input, 
-                                                      Node gn, BasicPattern bgp,
-                                                      ExprList exprs, ExecutionContext execCxt)
-    {
-        if ( ! input.hasNext() )
-            return input ;
-
-        // ---- Graph names with special meaning. 
-
-        gn = decideGraphNode(gn, execCxt) ;
-        if ( gn == null )
-            return optimizeExecuteTriples(ds.getDefaultGraphTDB(), input, bgp, exprs, execCxt) ;
-        
-        // ---- Execute quads+filters
-        if ( bgp.size() >= 2 )
-        {
-            ReorderTransformation transform = ds.getReorderTransform() ;
-    
-            if ( transform != null )
-            {
-                QueryIterPeek peek = QueryIterPeek.create(input, execCxt) ;
-                input = peek ; // Original input now invalid.
-                bgp = reorder(bgp, peek, transform) ;
-            }
-        }
-        // -- Filter placement
-        Op op = null ;
-        if ( exprs != null )
-            op = TransformFilterPlacement.transform(exprs, gn, bgp) ;
-        else
-            op = new OpQuadPattern(gn, bgp) ;
-
-        return plainExecute(op, input, execCxt) ;
-    }
-
-    /** Execute without modification of the op - does <b>not</b> apply special graph name translations */ 
-    private static QueryIterator plainExecute(Op op, QueryIterator input, ExecutionContext execCxt)
-    {
-        // -- Execute
-        // Switch to a non-reordering executor
-        // The Op may be a sequence due to TransformFilterPlacement
-        // so we need to do a full execution step, not go straight to the SolverLib.
-        
-        ExecutionContext ec2 = new ExecutionContext(execCxt) ;
-        ec2.setExecutor(plainFactory) ;
-
-        // Solve without going through this executor again.
-        // There would be issues of nested patterns but this is only a
-        // (filter (bgp...)) or (filter (quadpattern ...)) or sequences of these.
-        // so there are no nested patterns to reorder.
-        return QC.execute(op, input, ec2) ;
-    }
-
-    private static BasicPattern reorder(BasicPattern pattern, QueryIterPeek peek, ReorderTransformation transform)
-    {
-        if ( transform != null )
-        {
-            // This works by getting one result from the peek iterator,
-            // and creating the more gounded BGP. The tranform is used to
-            // determine the best order and the transformation is returned. This
-            // transform is applied to the unsubstituted pattern (which will be
-            // substituted as part of evaluation.
-            
-            if ( ! peek.hasNext() )
-                throw new ARQInternalErrorException("Peek iterator is already empty") ;
- 
-            BasicPattern pattern2 = Substitute.substitute(pattern, peek.peek() ) ;
-            // Calculate the reordering based on the substituted pattern.
-            ReorderProc proc = transform.reorderIndexes(pattern2) ;
-            // Then reorder original patten
-            pattern = proc.reorder(pattern) ;
-        }
-        return pattern ;
-    }
-    
-    /** Handle special graph node names.  
-     * Returns null for default graph in storage (use the triple table).
-     * Returns Node.ANY for the union graph
-     */
-    public static Node decideGraphNode(Node gn, ExecutionContext execCxt)
-    {
-     // ---- Graph names with special meaning. 
-    
-        // Graph names with special meaning:
-        //   Quad.defaultGraphIRI -- the IRI used in GRAPH <> to mean the default graph.
-        //   Quad.defaultGraphNodeGenerated -- the internal marker node used for the quad form of queries.
-        //   Quad.unionGraph -- the IRI used in GRAPH <> to mean the union of named graphs
-    
-        if ( isDefaultGraphStorage(gn) ) 
-        {
-            // Storage concrete, default graph. 
-            // Either outside GRAPH (no implicit union)
-            // or using the "name" of the default graph
-            return null ;
-        }
-
-        // Not default storage graph.
-        // ---- Union (RDF Merge) of named graphs
-
-        if ( Quad.isUnionGraph(gn) ) 
-            return Node.ANY ;
-        boolean doingUnion = false ;
-        
-        return gn ;
-    }
-
-    // Is this a query against the real default graph in the storage (in a 3-tuple table). 
-    private static boolean isDefaultGraphStorage(Node gn)
-    {
-        if ( gn == null )
-            return true ;
-        
-        // Is it the implicit name for default graph.
-        if ( Quad.isDefaultGraph(gn) )
-            // Not accessing the union of named graphs as the default graph
-            // and pattern is directed to the default graph.
-            return true ;
-    
-        return false ;
-    }
-    
-    @Override
-    protected QueryIterator execute(OpDatasetNames dsNames, QueryIterator input)
-    { 
-        DatasetGraphTDB ds = (DatasetGraphTDB)execCxt.getDataset() ;
-        Predicate<Tuple<NodeId>> filter = QC2.getFilter(execCxt.getContext()) ;
-        Node gn = dsNames.getGraphNode() ;
-        if ( Var.isVar(gn) )
-            return SolverLib.graphNames(ds, dsNames.getGraphNode(), input, filter, execCxt) ;
-        else
-            return SolverLib.testForGraphName(ds, dsNames.getGraphNode(), input, filter, execCxt) ;
-    }
-
-    // ---- OpExecute factories and plain executor.
-    
-    private static OpExecutorFactory plainFactory = new OpExecutorPlainFactoryTDB() ;
-    private static class OpExecutorPlainFactoryTDB implements OpExecutorFactory
-    {
-        @Override
-        public OpExecutor create(ExecutionContext execCxt)
-        {
-            return new OpExecutorPlainTDB(execCxt) ;
-        }
-    }
-
-    /** An op executor that simply executes a BGP or QuadPattern without any reordering */ 
-    private static class OpExecutorPlainTDB extends OpExecutor
-    {
-        Predicate<Tuple<NodeId>> filter = null ;
-        
-        public OpExecutorPlainTDB(ExecutionContext execCxt)
-        {
-            super(execCxt) ;
-            filter = QC2.getFilter(execCxt.getContext()) ;
-        }
-        
-        @Override
-        public QueryIterator execute(OpBGP opBGP, QueryIterator input)
-        {
-            Graph g = execCxt.getActiveGraph() ;
-            
-            if ( g instanceof GraphTDB )
-            {
-                BasicPattern bgp = opBGP.getPattern() ;
-                Explain.explain("Execute", bgp, execCxt.getContext()) ;
-                // Triple-backed (but may be named as explicit default graph).
-                //return SolverLib.execute((GraphTDB)g, bgp, input, filter, execCxt) ;
-                GraphTDB gtdb = (GraphTDB)g ;
-                Node gn = decideGraphNode(gtdb.getGraphName(), execCxt) ;
-                return SolverLib.execute(gtdb.getDSG(), gn, bgp, input, filter, execCxt) ;
-            }
-            Log.warn(this, "Non-GraphTDB passed to OpExecutorPlainTDB") ;
-            return super.execute(opBGP, input) ;
-        }
-        
-        @Override
-        public QueryIterator execute(OpQuadPattern opQuadPattern, QueryIterator input)
-        {
-            Node gn = opQuadPattern.getGraphNode() ;
-            gn = decideGraphNode(gn, execCxt) ;
-            
-            if ( execCxt.getDataset() instanceof DatasetGraphTDB )
-            {
-                DatasetGraphTDB ds = (DatasetGraphTDB)execCxt.getDataset() ;
-                Explain.explain("Execute", opQuadPattern.getPattern(), execCxt.getContext()) ;
-                BasicPattern bgp = opQuadPattern.getBasicPattern() ;
-                return SolverLib.execute(ds, gn, bgp, input, filter, execCxt) ;
-            }
-            // Maybe a TDB named graph inside a non-TDB dataset.
-            Graph g = execCxt.getActiveGraph() ;
-            if ( g instanceof GraphTDB )
-            {
-                // Triples graph from TDB (which is the default graph of the dataset),
-                // used a named graph in a composite dataset.
-                BasicPattern bgp = opQuadPattern.getBasicPattern() ;
-                Explain.explain("Execute", bgp, execCxt.getContext()) ;
-                // Don't pass in G -- gn may be different.
-                return SolverLib.execute(((GraphTDB)g).getDSG(), gn, bgp, input, filter, execCxt) ;
-            }
-            Log.warn(this, "Non-DatasetGraphTDB passed to OpExecutorPlainTDB") ;
-            return super.execute(opQuadPattern, input) ;
-        }
-
-    }
-}

http://git-wip-us.apache.org/repos/asf/jena/blob/3d456654/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/solver/QC2.java
----------------------------------------------------------------------
diff --git a/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/solver/QC2.java b/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/solver/QC2.java
deleted file mode 100644
index d3b9db5..0000000
--- a/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/solver/QC2.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.seaborne.tdb2.solver;
-
-import java.util.function.Predicate;
-
-import org.apache.jena.atlas.lib.tuple.Tuple ;
-import org.apache.jena.sparql.util.Context ;
-import org.seaborne.tdb2.TDBException ;
-import org.seaborne.tdb2.store.NodeId ;
-import org.seaborne.tdb2.sys.SystemTDB ;
-
-public class QC2
-{
-    public static Predicate<Tuple<NodeId>> getFilter(Context context)
-    {
-        Object x = context.get(SystemTDB.symTupleFilter) ;
-
-        try {
-            @SuppressWarnings("unchecked")
-            Predicate<Tuple<NodeId>> f = (Predicate<Tuple<NodeId>>)x ;
-            return f ;
-        } catch (ClassCastException ex)
-        {
-            throw new TDBException("Not a Filter<Tuple<NodeId>>:"+x, ex) ;
-        }
-    }
-
-    public static void setFilter(Context context, Predicate<Tuple<NodeId>> filter)
-    {
-        context.set(SystemTDB.symTupleFilter, filter) ;
-    }
-}

http://git-wip-us.apache.org/repos/asf/jena/blob/3d456654/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/solver/QueryEngineTDB.java
----------------------------------------------------------------------
diff --git a/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/solver/QueryEngineTDB.java b/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/solver/QueryEngineTDB.java
deleted file mode 100644
index 60a6868..0000000
--- a/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/solver/QueryEngineTDB.java
+++ /dev/null
@@ -1,207 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.seaborne.tdb2.solver;
-
-import org.apache.jena.atlas.lib.Lib ;
-import org.apache.jena.query.Query ;
-import org.apache.jena.sparql.algebra.Algebra ;
-import org.apache.jena.sparql.algebra.Op ;
-import org.apache.jena.sparql.core.DatasetDescription ;
-import org.apache.jena.sparql.core.DatasetGraph ;
-import org.apache.jena.sparql.core.DynamicDatasets ;
-import org.apache.jena.sparql.core.Substitute ;
-import org.apache.jena.sparql.engine.Plan ;
-import org.apache.jena.sparql.engine.QueryEngineFactory ;
-import org.apache.jena.sparql.engine.QueryEngineRegistry ;
-import org.apache.jena.sparql.engine.QueryIterator ;
-import org.apache.jena.sparql.engine.binding.Binding ;
-import org.apache.jena.sparql.engine.binding.BindingFactory ;
-import org.apache.jena.sparql.engine.iterator.QueryIteratorWrapper ;
-import org.apache.jena.sparql.engine.main.QueryEngineMain ;
-import org.apache.jena.sparql.mgt.Explain ;
-import org.apache.jena.sparql.util.Context ;
-import org.seaborne.tdb2.TDB2 ;
-import org.seaborne.tdb2.TDBException ;
-import org.seaborne.tdb2.migrate.A2 ;
-import org.seaborne.tdb2.store.DatasetGraphTDB ;
-import org.seaborne.tdb2.sys.TDBInternal ;
-
-// This exists to intercept the query execution setup.
-//  e.g choose the transformation optimizations
-// then to make the quad form.
-// TDB also uses a custom OpExecutor to intercept certain part 
-// of the Op evaluations
-
-public class QueryEngineTDB extends QueryEngineMain
-{
-    // ---- Wiring
-    static public QueryEngineFactory getFactory() { return factory ; } 
-    static public void register()       { QueryEngineRegistry.addFactory(factory) ; }
-    static public void unregister()     { QueryEngineRegistry.removeFactory(factory) ; }
-    
-    private Binding initialInput ;
-
-    // ---- Object
-    protected QueryEngineTDB(Op op, DatasetGraphTDB dataset, Binding input, Context context)
-    {
-        super(op, dataset, input, context) ;
-        this.initialInput = input ;
-    }
-    
-    private boolean doingDynamicDatasetBySpecialDataset = false ;
-    
-    protected QueryEngineTDB(Query query, DatasetGraphTDB dataset, Binding input, Context cxt)
-    { 
-        super(query, dataset, input, cxt) ; 
-        DatasetDescription dsDesc = DatasetDescription.create(query, context) ;
-        
-        if ( dsDesc != null )
-        {
-            doingDynamicDatasetBySpecialDataset = true ;
-            super.dataset = DynamicDatasets.dynamicDataset(dsDesc, dataset, cxt.isTrue(TDB2.symUnionDefaultGraph) ) ;
-        }
-        this.initialInput = input ; 
-    }
-    
-    // Choose the algebra-level optimizations to invoke. 
-    @Override
-    protected Op modifyOp(Op op)
-    {
-        op = Substitute.substitute(op, initialInput) ;
-        // Optimize (high-level)
-        op = super.modifyOp(op) ;
-
-        // Quadification
-        // Only apply if not a rewritten DynamicDataset
-        if ( ! doingDynamicDatasetBySpecialDataset )
-            op = Algebra.toQuadForm(op) ;
-        
-        // Record it.
-        setOp(op) ;
-        return op ;
-    }
-
-    @Override
-    public QueryIterator eval(Op op, DatasetGraph dsg, Binding input, Context context)
-    {
-        // Top of execution of a query.
-        // Op is quad'ed by now but there still may be some (graph ....) forms e.g. paths
-        
-        // Fix DatasetGraph for global union.
-        if ( context.isTrue(TDB2.symUnionDefaultGraph) && ! doingDynamicDatasetBySpecialDataset ) 
-        {
-            op = A2.unionDefaultGraphQuads(op) ;
-            Explain.explain("REWRITE(Union default graph)", op, context) ;
-        }
-        QueryIterator results = super.eval(op, dsg, input, context) ;
-        results = new QueryIteratorMaterializeBinding(results) ;
-        return results ; 
-    }
-    
-    /** Copy from any TDB internal BindingTDB to a Binding that
-     *  does not have any connection to the database.   
-     */
-    static class QueryIteratorMaterializeBinding extends QueryIteratorWrapper
-    {
-        public QueryIteratorMaterializeBinding(QueryIterator qIter)
-        {
-            super(qIter) ;
-        }
-        
-        @Override
-        protected Binding moveToNextBinding()
-        { 
-            Binding b = super.moveToNextBinding() ;
-            b = BindingFactory.materialize(b) ;
-            return b ;
-        }
-    }
-    
-    // Execution time (needs wiring to ARQ).
-    public long getMillis() { return -1 ; }
-    
-    // ---- Factory
-    protected static QueryEngineFactory factory = new QueryEngineFactoryTDB() ;
-        
-    protected static class QueryEngineFactoryTDB implements QueryEngineFactory
-    {
-        private static boolean isHandledByTDB(DatasetGraph dataset) {
-            return TDBInternal.isBackedByTDB(dataset);
-        }
-        
-        protected DatasetGraphTDB dsgToQuery(DatasetGraph dataset) {
-            try { 
-                return TDBInternal.requireStorage(dataset);
-            } catch (TDBException ex) {
-                // Check to a more specific message. 
-                throw new TDBException("Internal inconsistency: trying to execute query on unrecognized kind of DatasetGraph: "+Lib.className(dataset)) ;
-            }
-        }
-        
-        @Override
-        public boolean accept(Query query, DatasetGraph dataset, Context context) 
-        { return isHandledByTDB(dataset) ; }
-
-        @Override
-        public Plan create(Query query, DatasetGraph dataset, Binding input, Context context)
-        {
-            QueryEngineTDB engine = new QueryEngineTDB(query, dsgToQuery(dataset), input, context) ;
-            return engine.getPlan() ;
-        }
-        
-        @Override
-        public boolean accept(Op op, DatasetGraph dataset, Context context) 
-        { return isHandledByTDB(dataset) ; }
-
-        @Override
-        public Plan create(Op op, DatasetGraph dataset, Binding binding, Context context)
-        {
-            QueryEngineTDB engine = new QueryEngineTDB(op, dsgToQuery(dataset), binding, context) ;
-            return engine.getPlan() ;
-        }
-    }
-    
-//    // By rewrite, not using a general purpose dataset with the right graphs in.
-//    private static Op dynamicDatasetOp(Op op,  Context context)
-//    {
-//        Transform transform = null ;
-//    
-//        try {
-//            @SuppressWarnings("unchecked")
-//            Set<Node> defaultGraphs = (Set<Node>)(context.get(SystemTDB.symDatasetDefaultGraphs)) ;
-//            @SuppressWarnings("unchecked")
-//            Set<Node> namedGraphs = (Set<Node>)(context.get(SystemTDB.symDatasetNamedGraphs)) ;
-//            if ( defaultGraphs != null || namedGraphs != null )
-//                transform = new TransformDynamicDataset(defaultGraphs, 
-//                                                        namedGraphs, 
-//                                                        context.isTrue(TDB.symUnionDefaultGraph)) ;
-//        } catch (ClassCastException ex)
-//        {
-//            Log.warn(QueryEngineTDB.class, "Bad dynamic dataset description (ClassCastException)", ex) ;
-//            transform = null ;
-//            return op ;
-//        }
-//
-//        // Apply dynamic dataset modifications.
-//        if ( transform != null )
-//            op = Transformer.transform(transform, op) ;
-//        return op ;
-//    }        
-//    
-}

http://git-wip-us.apache.org/repos/asf/jena/blob/3d456654/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/solver/QueryIterTDB.java
----------------------------------------------------------------------
diff --git a/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/solver/QueryIterTDB.java b/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/solver/QueryIterTDB.java
deleted file mode 100644
index c0bb49a..0000000
--- a/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/solver/QueryIterTDB.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.seaborne.tdb2.solver;
-
-import java.util.Iterator ;
-import java.util.List ;
-
-import org.apache.jena.sparql.engine.ExecutionContext ;
-import org.apache.jena.sparql.engine.QueryIterator ;
-import org.apache.jena.sparql.engine.binding.Binding ;
-import org.apache.jena.sparql.engine.iterator.QueryIterPlainWrapper ;
-
-public class QueryIterTDB extends QueryIterPlainWrapper
-{
-    final private QueryIterator originalInput ;
-    private List<Abortable> killList ;
-    
-    // The original input needs closing as well.
-    public QueryIterTDB(Iterator<Binding> iterBinding, List<Abortable> killList , QueryIterator originalInput, ExecutionContext execCxt)
-    {
-        super(iterBinding, execCxt) ;
-        this.originalInput = originalInput ;
-        this.killList = killList ;
-    }
-    
-    @Override
-    protected void closeIterator()
-    { 
-        if ( originalInput != null )
-            originalInput.close();
-        super.closeIterator() ;
-    }
-
-    @Override
-    protected void requestCancel()
-    { 
-        if ( killList != null )
-            for ( Abortable it : killList )
-                it.abort() ;
-        if ( originalInput != null )
-            originalInput.cancel(); 
-    }
-}

http://git-wip-us.apache.org/repos/asf/jena/blob/3d456654/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/solver/SolverLib.java
----------------------------------------------------------------------
diff --git a/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/solver/SolverLib.java b/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/solver/SolverLib.java
deleted file mode 100644
index 0812832..0000000
--- a/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/solver/SolverLib.java
+++ /dev/null
@@ -1,341 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.seaborne.tdb2.solver;
-
-import java.util.* ;
-import java.util.function.Function ;
-import java.util.function.Predicate ;
-
-import org.apache.jena.atlas.iterator.Iter ;
-import org.apache.jena.atlas.iterator.IteratorWrapper ;
-import org.apache.jena.atlas.lib.tuple.Tuple ;
-import org.apache.jena.atlas.lib.tuple.TupleFactory ;
-import org.apache.jena.graph.Node ;
-import org.apache.jena.graph.Triple ;
-import org.apache.jena.query.QueryCancelledException ;
-import org.apache.jena.sparql.core.BasicPattern ;
-import org.apache.jena.sparql.core.Quad ;
-import org.apache.jena.sparql.core.Var ;
-import org.apache.jena.sparql.engine.ExecutionContext ;
-import org.apache.jena.sparql.engine.QueryIterator ;
-import org.apache.jena.sparql.engine.binding.Binding ;
-import org.apache.jena.sparql.engine.binding.BindingFactory ;
-import org.apache.jena.sparql.engine.binding.BindingMap ;
-import org.apache.jena.sparql.engine.iterator.QueryIterNullIterator ;
-import org.seaborne.tdb2.TDBException ;
-import org.seaborne.tdb2.lib.NodeLib ;
-import org.seaborne.tdb2.store.DatasetGraphTDB ;
-import org.seaborne.tdb2.store.GraphTDB ;
-import org.seaborne.tdb2.store.NodeId ;
-import org.seaborne.tdb2.store.nodetable.NodeTable ;
-import org.seaborne.tdb2.store.nodetupletable.NodeTupleTable ;
-import org.seaborne.tdb2.sys.TDBInternal ;
-import org.slf4j.Logger ;
-import org.slf4j.LoggerFactory ;
-
-/** Utilities used within the TDB BGP solver : local TDB store */
-public class SolverLib
-{
-    private static Logger log = LoggerFactory.getLogger(SolverLib.class) ; 
-    
-    /** Non-reordering execution of a basic graph pattern, given a iterator of bindings as input */ 
-    public static QueryIterator execute(GraphTDB graph, BasicPattern pattern, 
-                                        QueryIterator input, Predicate<Tuple<NodeId>> filter,
-                                        ExecutionContext execCxt)
-    {
-        // Maybe default graph or named graph.
-        NodeTupleTable ntt = graph.getNodeTupleTable() ;
-        return execute(ntt, graph.getGraphName(), pattern, input, filter, execCxt) ;
-    }
-    
-    /** Non-reordering execution of a quad pattern, given a iterator of bindings as input.
-     *  GraphNode is Node.ANY for execution over the union of named graphs.
-     *  GraphNode is null for execution over the real default graph.
-     */ 
-    public static QueryIterator execute(DatasetGraphTDB ds, Node graphNode, BasicPattern pattern,
-                                        QueryIterator input, Predicate<Tuple<NodeId>> filter,
-                                        ExecutionContext execCxt)
-    {
-        NodeTupleTable ntt = ds.chooseNodeTupleTable(graphNode) ;
-        return execute(ntt, graphNode, pattern, input, filter, execCxt) ;
-    }
-    
-    public static Iterator<BindingNodeId> convertToIds(Iterator<Binding> iterBindings, NodeTable nodeTable)
-    { return Iter.map(iterBindings, convFromBinding(nodeTable)) ; }
-    
-    /** Convert from Iterator<BindingNodeId> to Iterator<Binding>, conversion "on demand" 
-     * (in convToBinding(BindingNodeId, NodeTable)
-     */
-    public static Iterator<Binding> convertToNodes(Iterator<BindingNodeId> iterBindingIds, NodeTable nodeTable)
-    { return Iter.map(iterBindingIds, bindingNodeIds -> convToBinding(bindingNodeIds, nodeTable)) ; }
-    
-    // The worker.  Callers choose the NodeTupleTable.  
-    //     graphNode may be Node.ANY, meaning we should make triples unique.
-    //     graphNode may be null, meaning default graph
-
-    private static QueryIterator execute(NodeTupleTable nodeTupleTable, Node graphNode, BasicPattern pattern, 
-                                         QueryIterator input, Predicate<Tuple<NodeId>> filter,
-                                         ExecutionContext execCxt)
-    {
-        if ( Quad.isUnionGraph(graphNode) )
-            graphNode = Node.ANY ;
-        if ( Quad.isDefaultGraph(graphNode) )
-            graphNode = null ;
-        
-        List<Triple> triples = pattern.getList() ;
-        boolean anyGraph = (graphNode==null ? false : (Node.ANY.equals(graphNode))) ;
-        
-        int tupleLen = nodeTupleTable.getTupleTable().getTupleLen() ;
-        if ( graphNode == null ) {
-            if ( 3 != tupleLen )
-                throw new TDBException("SolverLib: Null graph node but tuples are of length "+tupleLen) ;
-        } else {
-            if ( 4 != tupleLen )
-                throw new TDBException("SolverLib: Graph node specified but tuples are of length "+tupleLen) ;
-        }
-        
-        // Convert from a QueryIterator (Bindings of Var/Node) to BindingNodeId
-        NodeTable nodeTable = nodeTupleTable.getNodeTable() ;
-        
-        Iterator<BindingNodeId> chain = Iter.map(input, SolverLib.convFromBinding(nodeTable)) ;
-        List<Abortable> killList = new ArrayList<>() ;
-        
-        for ( Triple triple : triples )
-        {
-            Tuple<Node> tuple = null ;
-            if ( graphNode == null )
-                // 3-tuples
-                tuple = TupleFactory.tuple(triple.getSubject(), triple.getPredicate(), triple.getObject()) ;
-            else
-                // 4-tuples.
-                tuple = TupleFactory.tuple(graphNode, triple.getSubject(), triple.getPredicate(), triple.getObject()) ;
-            chain = solve(nodeTupleTable, tuple, anyGraph, chain, filter, execCxt) ;
-            chain = makeAbortable(chain, killList) ; 
-        }
-        
-        // DEBUG POINT
-        if ( false )
-        {
-            if ( chain.hasNext())
-                chain = Iter.debug(chain) ;
-            else
-                System.out.println("No results") ;
-        }
-        
-        // Timeout wrapper ****
-        // QueryIterTDB gets called async.
-        // Iter.abortable?
-        // Or each iterator has a place to test.
-        // or pass in a thing to test?
-        
-        
-        // Need to make sure the bindings here point to parent.
-        Iterator<Binding> iterBinding = convertToNodes(chain, nodeTable) ;
-        
-        // "input" will be closed by QueryIterTDB but is otherwise unused.
-        // "killList" will be aborted on timeout.
-        return new QueryIterTDB(iterBinding, killList, input, execCxt) ;
-    }
-    
-    /** Create an abortable iterator, storing it in the killList.
-     *  Just return the input iterator if kilList is null. 
-     */
-    static <T> Iterator<T> makeAbortable(Iterator<T> iter, List<Abortable> killList)
-    {
-        if ( killList == null )
-            return iter ;
-        IterAbortable<T> k = new IterAbortable<>(iter) ;
-        killList.add(k) ;
-        return k ;
-    }
-    
-    /** Iterator that adds an abort operation which can be called
-     *  at any time, including from another thread, and causes the
-     *  iterator to throw an exception when next touched (hasNext, next).  
-     */
-    static class IterAbortable<T> extends IteratorWrapper<T> implements Abortable
-    {
-        volatile boolean abortFlag = false ;
-        
-        public IterAbortable(Iterator<T> iterator)
-        {
-            super(iterator) ;
-        }
-        
-        /** Can call asynchronously at anytime */
-        @Override
-        public void abort() { 
-            abortFlag = true ;
-        }
-        
-        @Override
-        public boolean hasNext()
-        {
-            if ( abortFlag )
-                throw new QueryCancelledException() ;
-            return iterator.hasNext() ; 
-        }
-        
-        @Override
-        public T next()
-        {
-            if ( abortFlag )
-                throw new QueryCancelledException() ;
-            return iterator.next() ; 
-        }
-    }
-    
-    public static Iterator<BindingNodeId> solve(NodeTupleTable nodeTupleTable, 
-                                                Tuple<Node> tuple,
-                                                boolean anyGraph,
-                                                Iterator<BindingNodeId> chain, Predicate<Tuple<NodeId>> filter,
-                                                ExecutionContext execCxt)
-    {
-        return new StageMatchTuple(nodeTupleTable, chain, tuple, anyGraph, filter, execCxt) ;
-    }
-
-    public static Binding convToBinding(BindingNodeId bindingNodeIds, NodeTable nodeTable) {
-        if ( true )
-            return new BindingTDB(bindingNodeIds, nodeTable) ;
-        else {
-            // Makes nodes immediately. Causing unnecessary NodeTable accesses
-            // (e.g. project)
-            BindingMap b = BindingFactory.create() ;
-            for (Var v : bindingNodeIds) {
-                NodeId id = bindingNodeIds.get(v) ;
-                Node n = nodeTable.getNodeForNodeId(id) ;
-                b.add(v, n) ;
-            }
-            return b ;
-        }
-    }
-    
-    // Transform : Binding ==> BindingNodeId
-    public static Function<Binding, BindingNodeId> convFromBinding(final NodeTable nodeTable)
-    {
-        return binding -> SolverLib.convert(binding, nodeTable);
-    }
-    
-    /** Binding ==> BindingNodeId, given a NodeTable */
-    public static BindingNodeId convert(Binding binding, NodeTable nodeTable) 
-    {
-        if ( binding instanceof BindingTDB )
-            return ((BindingTDB)binding).getBindingId() ;
-
-        BindingNodeId b = new BindingNodeId(binding) ;
-        // and copy over, getting NodeIds.
-        Iterator<Var> vars = binding.vars() ;
-
-        for ( ; vars.hasNext() ; )
-        {
-            Var v = vars.next() ;
-            Node n = binding.get(v) ;  
-            if ( n == null )
-                // Variable mentioned in the binding but not actually defined.
-                // Can occur with BindingProject
-                continue ;
-
-            // Rely on the node table cache for efficency - we will likely be
-            // repeatedly looking up the same node in different bindings.
-            NodeId id = nodeTable.getNodeIdForNode(n) ;
-            // Even put in "does not exist" for a node now known not to be in the DB.
-            b.put(v, id) ;
-        }
-        return b ;
-    }
-    
-    /** Find whether a specific graph name is in the quads table. */
-    public static QueryIterator testForGraphName(DatasetGraphTDB ds, Node graphNode, QueryIterator input,
-                                                 Predicate<Tuple<NodeId>> filter, ExecutionContext execCxt) {
-        NodeId nid = TDBInternal.getNodeId(ds, graphNode) ;
-        boolean exists = !NodeId.isDoesNotExist(nid) ;
-        if ( exists ) {
-            // Node exists but is it used in the quad position?
-            NodeTupleTable ntt = ds.getQuadTable().getNodeTupleTable() ;
-            // Don't worry about abortable - this iterator should be fast
-            // (with normal indexing - at least one G???).
-            // Either it finds a starting point, or it doesn't.  We are only 
-            // interested in the first .hasNext.
-            Iterator<Tuple<NodeId>> iter1 = ntt.find(nid, NodeId.NodeIdAny, NodeId.NodeIdAny, NodeId.NodeIdAny) ;
-            if ( filter != null )
-                iter1 = Iter.filter(iter1, filter) ;
-            exists = iter1.hasNext() ;
-        }
-
-        if ( exists )
-            return input ;
-        else {
-            input.close() ;
-            return QueryIterNullIterator.create(execCxt) ;
-        }
-    }
-
-    /** Find all the graph names in the quads table. */
-    public static QueryIterator graphNames(DatasetGraphTDB ds, Node graphNode, QueryIterator input,
-                                           Predicate<Tuple<NodeId>> filter, ExecutionContext execCxt) {
-        List<Abortable> killList = new ArrayList<>() ;
-        Iterator<Tuple<NodeId>> iter1 = ds.getQuadTable().getNodeTupleTable().find(NodeId.NodeIdAny, NodeId.NodeIdAny,
-                                                                                   NodeId.NodeIdAny, NodeId.NodeIdAny) ;
-        if ( filter != null )
-            iter1 = Iter.filter(iter1, filter) ;
-
-        Iterator<NodeId> iter2 = Iter.map(iter1, t -> t.get(0)) ;
-        // Project is cheap - don't brother wrapping iter1
-        iter2 = makeAbortable(iter2, killList) ;
-
-        Iterator<NodeId> iter3 = Iter.distinct(iter2) ;
-        iter3 = makeAbortable(iter3, killList) ;
-
-        Iterator<Node> iter4 = NodeLib.nodes(ds.getQuadTable().getNodeTupleTable().getNodeTable(), iter3) ;
-
-        final Var var = Var.alloc(graphNode) ;
-        Iterator<Binding> iterBinding = Iter.map(iter4, node -> BindingFactory.binding(var, node)) ;
-        // Not abortable.
-        return new QueryIterTDB(iterBinding, killList, input, execCxt) ;
-    }
-
-    public static Set<NodeId> convertToNodeIds(Collection<Node> nodes, DatasetGraphTDB dataset)
-    {
-        Set<NodeId> graphIds = new HashSet<>() ;
-        NodeTable nt = dataset.getQuadTable().getNodeTupleTable().getNodeTable() ;
-        for ( Node n : nodes )
-            graphIds.add(nt.getNodeIdForNode(n)) ;
-        return graphIds ;
-    }
-
-    public static Iterator<Tuple<NodeId>> unionGraph(NodeTupleTable ntt)
-    {
-        Iterator<Tuple<NodeId>> iter = ntt.find((NodeId)null, null, null, null) ;
-        iter = Iter.map(iter, quadsToAnyTriples) ;
-        //iterMatches = Iter.distinct(iterMatches) ;
-        
-        // This depends on the way indexes are choose and
-        // the indexing pattern. It assumes that the index 
-        // chosen ends in G so same triples are adjacent 
-        // in a union query.
-        /// See TupleTable.scanAllIndex that ensures this.
-        iter = Iter.distinctAdjacent(iter) ;
-        return iter ;
-    }
-    
-    private static Function<Tuple<NodeId>, Tuple<NodeId>> quadsToAnyTriples = item -> {
-        return TupleFactory.create4(NodeId.NodeIdAny, item.get(1), item.get(2), item.get(3) ) ;
-    } ;
-
-}

http://git-wip-us.apache.org/repos/asf/jena/blob/3d456654/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/solver/StageGeneratorDirectTDB.java
----------------------------------------------------------------------
diff --git a/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/solver/StageGeneratorDirectTDB.java b/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/solver/StageGeneratorDirectTDB.java
deleted file mode 100644
index 16bf5d5..0000000
--- a/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/solver/StageGeneratorDirectTDB.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.seaborne.tdb2.solver;
-
-import java.util.function.Predicate;
-
-import org.apache.jena.atlas.lib.tuple.Tuple ;
-import org.apache.jena.graph.Graph ;
-import org.apache.jena.sparql.core.BasicPattern ;
-import org.apache.jena.sparql.engine.ExecutionContext ;
-import org.apache.jena.sparql.engine.QueryIterator ;
-import org.apache.jena.sparql.engine.main.StageGenerator ;
-import org.seaborne.tdb2.store.GraphTDB ;
-import org.seaborne.tdb2.store.NodeId ;
-
-/** Execute TDB requests directly -- no reordering
- *  Using OpExecutor is preferred.
- */ 
-public class StageGeneratorDirectTDB implements StageGenerator
-{
-    // Using OpExecutor is preferred.
-    StageGenerator above = null ;
-    
-    public StageGeneratorDirectTDB(StageGenerator original)
-    {
-        above = original ;
-    }
-    
-    @Override
-    public QueryIterator execute(BasicPattern pattern, QueryIterator input, ExecutionContext execCxt)
-    {
-        // --- In case this isn't for TDB
-        Graph g = execCxt.getActiveGraph() ;
-        
-        if ( ! ( g instanceof GraphTDB ) )
-            // Not us - bounce up the StageGenerator chain
-            return above.execute(pattern, input, execCxt) ;
-        GraphTDB graph = (GraphTDB)g ;
-        Predicate<Tuple<NodeId>> filter = QC2.getFilter(execCxt.getContext()) ;
-        return SolverLib.execute(graph, pattern, input, filter, execCxt) ;
-    }
-}

http://git-wip-us.apache.org/repos/asf/jena/blob/3d456654/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/solver/StageMatchTuple.java
----------------------------------------------------------------------
diff --git a/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/solver/StageMatchTuple.java b/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/solver/StageMatchTuple.java
deleted file mode 100644
index 85e634b..0000000
--- a/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/solver/StageMatchTuple.java
+++ /dev/null
@@ -1,196 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.seaborne.tdb2.solver;
-
-import java.util.Iterator ;
-import java.util.List ;
-import java.util.function.Function ;
-import java.util.function.Predicate ;
-
-import org.apache.jena.atlas.iterator.Iter ;
-import org.apache.jena.atlas.iterator.RepeatApplyIterator ;
-import org.apache.jena.atlas.lib.tuple.Tuple ;
-import org.apache.jena.atlas.lib.tuple.TupleFactory ;
-import org.apache.jena.graph.Node ;
-import org.apache.jena.sparql.core.Var ;
-import org.apache.jena.sparql.engine.ExecutionContext ;
-import org.seaborne.tdb2.store.NodeId ;
-import org.seaborne.tdb2.store.nodetable.NodeTable ;
-import org.seaborne.tdb2.store.nodetupletable.NodeTupleTable ;
-
-public class StageMatchTuple extends RepeatApplyIterator<BindingNodeId>
-{
-    private final NodeTupleTable nodeTupleTable ;
-    private final Tuple<Node> patternTuple ;
-
-    private final ExecutionContext execCxt ;
-    private boolean anyGraphs ;
-    private Predicate<Tuple<NodeId>> filter ;
-
-    public StageMatchTuple(NodeTupleTable nodeTupleTable, Iterator<BindingNodeId> input, 
-                            Tuple<Node> tuple, boolean anyGraphs, 
-                            Predicate<Tuple<NodeId>> filter, 
-                            ExecutionContext execCxt)
-    {
-        super(input) ;
-        this.filter = filter ;
-        this.nodeTupleTable = nodeTupleTable ; 
-        this.patternTuple = tuple ;
-        this.execCxt = execCxt ;
-        this.anyGraphs = anyGraphs ; 
-    }
-
-    /** Prepare a pattern (tuple of nodes), and an existing binding of NodeId, into NodeIds and Variables. 
-     *  A variable in the pattern is replaced by its binding or null in the Nodeids.
-     *  A variable that is not bound by the binding is placed in the var array.
-     *  Return false if preparation detechs the pattern can not match. 
-     */
-    public static boolean prepare(NodeTable nodeTable, Tuple<Node> patternTuple, BindingNodeId input, NodeId ids[], Var[] var)
-    {
-        // Process the Node to NodeId conversion ourselves because
-        // we wish to abort if an unknown node is seen.
-        for ( int i = 0 ; i < patternTuple.len() ; i++ )
-        {
-            Node n = patternTuple.get(i) ;
-            // Substitution and turning into NodeIds
-            // Variables unsubstituted are null NodeIds
-            NodeId nId = idFor(nodeTable, input, n) ;
-            if ( NodeId.isDoesNotExist(nId) )
-                return false;
-            ids[i] = nId ;
-            if ( nId == null )
-                var[i] = asVar(n) ;
-        }
-        return true ;
-    }
-    
-    @Override
-    protected Iterator<BindingNodeId> makeNextStage(final BindingNodeId input)
-    {
-        // ---- Convert to NodeIds 
-        NodeId ids[] = new NodeId[patternTuple.len()] ;
-        // Variables for this tuple after subsitution
-        final Var[] var = new Var[patternTuple.len()] ;
-
-        boolean b = prepare(nodeTupleTable.getNodeTable(), patternTuple, input, ids, var) ;
-        if ( !b )
-            // Short cut - known unknown NodeId
-            return Iter.nullIterator(); 
-        
-        Iterator<Tuple<NodeId>> iterMatches = nodeTupleTable.find(TupleFactory.create(ids)) ;  
-        
-        // ** Allow a triple or quad filter here.
-        if ( filter != null )
-            iterMatches = Iter.filter(iterMatches, filter) ;
-        
-        // If we want to reduce to RDF semantics over quads,
-        // we need to reduce the quads to unique triples. 
-        // We do that by having the graph slot as "any", then running
-        // through a distinct-ifier. 
-        // Assumes quads are GSPO - zaps the first slot.
-        // Assumes that tuples are not shared.
-        if ( anyGraphs )
-        {
-            iterMatches = Iter.map(iterMatches, quadsToAnyTriples) ;
-            //Guaranteed 
-            //iterMatches = Iter.distinct(iterMatches) ;
-            
-            // This depends on the way indexes are chosen and
-            // the indexing pattern. It assumes that the index 
-            // chosen ends in G so same triples are adjacent 
-            // in a union query.
-            // 
-            // If any slot is defined, then the index will be X??G.
-            // If no slot is defined, then the index will be ???G.
-            // But the  TupleTable
-            //  See TupleTable.scanAllIndex that ensures the latter.
-            //  No G part way through.
-            iterMatches = Iter.distinctAdjacent(iterMatches) ;
-        }
-        
-        // Map Tuple<NodeId> to BindingNodeId
-        Function<Tuple<NodeId>, BindingNodeId> binder = tuple -> 
-            {
-                BindingNodeId output = new BindingNodeId(input) ;
-                for ( int i = 0 ; i < var.length ; i++ )
-                {
-                    Var v = var[i] ;
-                    if ( v == null )
-                        continue ;
-                    NodeId id = tuple.get(i) ;
-                    if ( reject(output, v, id) )
-                        return null ;
-                    output.put(v, id) ;
-                }
-                return output ;
-        } ;
-        
-        return Iter.iter(iterMatches).map(binder).removeNulls() ;
-    }
-    
-    private static Iterator<Tuple<NodeId>> print(Iterator<Tuple<NodeId>> iter)
-    {
-        if ( ! iter.hasNext() )
-            System.err.println("<empty>") ;
-        else
-        {
-            List<Tuple<NodeId>> r = Iter.toList(iter) ;
-            String str = Iter.asString(r, "\n") ;
-            System.err.println(str) ;
-            // Reset iter
-            iter = Iter.iter(r) ;
-        }
-        return iter ;
-    }
-    
-    private static boolean reject(BindingNodeId output , Var var, NodeId value)
-    {
-        if ( ! output.containsKey(var) )
-            return false ;
-        
-        if ( output.get(var).equals(value) )
-            return false ;
-
-        return true ;
-    }
-    
-    private static Var asVar(Node node)
-    {
-        if ( Var.isVar(node) )
-            return Var.alloc(node) ;
-        return null ;
-    }
-
-    /** Return null for variables, and for nodes, the node id or NodeDoesNotExist */
-    private static NodeId idFor(NodeTable nodeTable, BindingNodeId input, Node node)
-    {
-        if ( Var.isVar(node) )
-        {
-            NodeId n = input.get((Var.alloc(node))) ;
-            // Bound to NodeId or null. 
-            return n ;
-        } 
-        // May return NodeId.NodeDoesNotExist which must not be null. 
-        return nodeTable.getNodeIdForNode(node) ;
-    }
-    
-    private static Function<Tuple<NodeId>, Tuple<NodeId>> quadsToAnyTriples = item -> {
-        return TupleFactory.create4(NodeId.NodeIdAny, item.get(1), item.get(2), item.get(3) ) ;
-    } ;
-}

http://git-wip-us.apache.org/repos/asf/jena/blob/3d456654/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/solver/stats/Stats.java
----------------------------------------------------------------------
diff --git a/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/solver/stats/Stats.java b/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/solver/stats/Stats.java
deleted file mode 100644
index 42afbac..0000000
--- a/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/solver/stats/Stats.java
+++ /dev/null
@@ -1,138 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.seaborne.tdb2.solver.stats;
-
-import static org.apache.jena.sparql.sse.Item.addPair ;
-import static org.apache.jena.sparql.sse.Item.createTagged ;
-
-import java.io.BufferedOutputStream ;
-import java.io.FileOutputStream ;
-import java.io.IOException ;
-import java.io.OutputStream ;
-import java.util.Iterator ;
-import java.util.Map ;
-import java.util.Map.Entry ;
-
-import org.apache.jena.atlas.lib.DateTimeUtils ;
-import org.apache.jena.atlas.logging.Log ;
-import org.apache.jena.graph.Graph ;
-import org.apache.jena.graph.Node ;
-import org.apache.jena.graph.Triple ;
-import org.apache.jena.sparql.engine.optimizer.StatsMatcher ;
-import org.apache.jena.sparql.graph.NodeConst ;
-import org.apache.jena.sparql.sse.Item ;
-import org.apache.jena.sparql.sse.ItemList ;
-import org.apache.jena.sparql.sse.ItemWriter ;
-import org.apache.jena.sparql.util.NodeFactoryExtra ;
-
-public class Stats
-{
-    static Item ZERO = Item.createNode(NodeFactoryExtra.intToNode(0)) ;
-
-    /** Write statistics */
-    static public void write(String filename, StatsResults stats)
-    {
-        write(filename, stats.getPredicates(), stats.getTypes(), stats.getCount()) ;
-    }
-    
-    /** Write statistics */
-    static public void write(OutputStream output, StatsResults stats)
-    {
-        write(output, stats.getPredicates(), stats.getTypes(), stats.getCount()) ;
-    }
-    
-    static private void write(String filename, Map<Node, Integer> predicateStats, Map<Node, Integer> typeStats, long statsTotal)
-    {
-        // Write out the stats
-        try (OutputStream statsOut = new BufferedOutputStream(new FileOutputStream(filename))) {
-            write(statsOut, predicateStats, typeStats, statsTotal) ;
-        } catch (IOException ex)
-        { Log.warn(Stats.class, "Problem when writing stats file", ex) ; }
-    }
-    
-    static private void write(OutputStream output, Map<Node, Integer> predicateStats, Map<Node, Integer> typeStats, long statsTotal)
-    {
-        Item item = format(predicateStats, typeStats, statsTotal) ;
-        ItemWriter.write(output, item) ;
-    }
-    
-
-    /** Gather statistics, any graph */
-    public static StatsCollector gather(Graph graph)
-    {
-        StatsCollector stats = new StatsCollector() ;
-    
-        Iterator<Triple> iter = graph.find(Node.ANY, Node.ANY, Node.ANY) ;
-        for ( ; iter.hasNext() ; )
-        {
-            Triple t = iter.next();
-            stats.record(null, t.getSubject(), t.getPredicate(), t.getObject()) ;
-        }
-        
-        return stats ;
-    }
-
-    public static Item format(StatsResults stats)
-    {
-        return format(stats.getPredicates(), stats.getTypes(), stats.getCount()) ;
-    }
-    
-    private static Item format(Map<Node, Integer> predicates, Map<Node, Integer> types, long count)
-    {
-        Item stats = Item.createList() ;
-        ItemList statsList = stats.getList() ;
-        statsList.add("stats") ;
-
-        Item meta = createTagged(StatsMatcher.META) ;
-        addPair(meta.getList(), "timestamp", NodeFactoryExtra.nowAsDateTime()) ;
-        addPair(meta.getList(), "run@",  DateTimeUtils.nowAsString()) ;
-        if ( count >= 0 )
-            addPair(meta.getList(), StatsMatcher.COUNT, NodeFactoryExtra.intToNode((int)count)) ;
-        statsList.add(meta) ;
-        
-        for ( Entry<Node, Integer> entry : types.entrySet() )
-        {
-            Node type = entry.getKey() ;
-            addTypeTriple(statsList, type, NodeFactoryExtra.intToNode(entry.getValue()) ) ;
-        }
-        
-        for ( Entry<Node, Integer> entry : predicates.entrySet() )
-        {
-            Node node = entry.getKey() ;
-            // Skip these - they just clog things up!
-            if ( node.getURI().startsWith("http://www.w3.org/1999/02/22-rdf-syntax-ns#_") )
-                continue ;
-            addPair(statsList, node, NodeFactoryExtra.intToNode(entry.getValue())) ;
-        }
-        
-        // Add a default rule.
-        addPair(statsList, StatsMatcher.OTHER, ZERO) ;
-        
-        return stats ;
-    }
-
-    private static void addTypeTriple(ItemList statsList, Node type, Node intCount)
-    {
-        ItemList triple = new ItemList() ;
-        triple.add("VAR") ;
-        triple.add(NodeConst.nodeRDFType) ;
-        triple.add(type) ;
-        addPair(statsList, Item.createList(triple), Item.createNode(intCount)) ;
-    }
-}

http://git-wip-us.apache.org/repos/asf/jena/blob/3d456654/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/solver/stats/StatsCollector.java
----------------------------------------------------------------------
diff --git a/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/solver/stats/StatsCollector.java b/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/solver/stats/StatsCollector.java
deleted file mode 100644
index 43a90b4..0000000
--- a/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/solver/stats/StatsCollector.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.seaborne.tdb2.solver.stats;
-
-import java.util.Map ;
-
-import org.apache.jena.graph.Node ;
-import org.apache.jena.sparql.graph.NodeConst ;
-
-/** Statistics collector, general purpose, uses Nodes */
-public class StatsCollector extends StatsCollectorBase<Node>
-{
-    public StatsCollector() { super(NodeConst.nodeRDFType) ; }
-
-    @Override
-    protected Map<Node, Integer> convert(Map<Node, Integer> map)
-    {
-        return map ;
-    }
-}

http://git-wip-us.apache.org/repos/asf/jena/blob/3d456654/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/solver/stats/StatsCollectorBase.java
----------------------------------------------------------------------
diff --git a/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/solver/stats/StatsCollectorBase.java b/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/solver/stats/StatsCollectorBase.java
deleted file mode 100644
index 8d01ca5..0000000
--- a/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/solver/stats/StatsCollectorBase.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.seaborne.tdb2.solver.stats;
-
-
-import java.util.HashMap ;
-import java.util.Map ;
-
-import org.apache.jena.graph.Node ;
-
-/** Statistics collector, general purpose */
-abstract class StatsCollectorBase<T>
-{
-    private long count = 0 ;
-    private Map<T, Integer> predicates = new HashMap<>(10000) ;
-    private Map<T, Integer> types = new HashMap<>(10000) ;
-    private T typeTrigger ;
-    
-    protected StatsCollectorBase(T typeTrigger)
-    {
-        this.typeTrigger = typeTrigger ;
-    }
-
-    public void record(T g, T s, T p, T o)
-    {
-        count++ ;
-		predicates.put(p, predicates.getOrDefault(p, 0) + 1);
-        if ( typeTrigger != null && typeTrigger.equals(p) )
-        		types.put(o, types.getOrDefault(o, 0) + 1);
-    }
-
-    protected abstract Map<Node, Integer> convert(Map<T, Integer> map) ;
-    
-    public StatsResults results()
-    {
-        return new StatsResults(convert(predicates), convert(types), count) ;
-    }
-}

http://git-wip-us.apache.org/repos/asf/jena/blob/3d456654/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/solver/stats/StatsCollectorNodeId.java
----------------------------------------------------------------------
diff --git a/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/solver/stats/StatsCollectorNodeId.java b/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/solver/stats/StatsCollectorNodeId.java
deleted file mode 100644
index 62705d2..0000000
--- a/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/solver/stats/StatsCollectorNodeId.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.seaborne.tdb2.solver.stats;
-
-import java.util.HashMap ;
-import java.util.Map ;
-
-import org.apache.jena.graph.Node ;
-import org.apache.jena.sparql.graph.NodeConst ;
-import org.seaborne.tdb2.store.NodeId ;
-import org.seaborne.tdb2.store.nodetable.NodeTable ;
-
-/** Statistics collector, aggregates based on NodeId */
-public class StatsCollectorNodeId extends StatsCollectorBase<NodeId>
-{
-    private NodeTable nodeTable ;
-    
-    public StatsCollectorNodeId(NodeTable nodeTable)
-    {
-        super(findRDFType(nodeTable)) ;
-        this.nodeTable = nodeTable ;
-    }
-    
-    private static NodeId findRDFType(NodeTable nodeTable2)
-    {
-        return nodeTable2.getAllocateNodeId(NodeConst.nodeRDFType) ;
-    }
-
-    @Override
-    protected Map<Node, Integer> convert(Map<NodeId, Integer> stats)
-    {
-        Map<Node, Integer> statsNodes = new HashMap<>(1000) ;
-        for ( NodeId p : stats.keySet() )
-        {
-            Node n = nodeTable.getNodeForNodeId(p) ;
-            statsNodes.put(n, stats.get(p)) ;
-        }
-        return statsNodes ;
-    }
-}

http://git-wip-us.apache.org/repos/asf/jena/blob/3d456654/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/solver/stats/StatsResults.java
----------------------------------------------------------------------
diff --git a/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/solver/stats/StatsResults.java b/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/solver/stats/StatsResults.java
deleted file mode 100644
index 2f6f9e8..0000000
--- a/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/solver/stats/StatsResults.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.seaborne.tdb2.solver.stats;
-
-import java.util.Map ;
-
-import org.apache.jena.graph.Node ;
-
-/** Statistics collector */
-public class StatsResults 
-{
-    private final Map<Node, Integer> predicates ;
-    private final Map<Node, Integer> types ;
-    private final long count ;
-
-    StatsResults(Map<Node, Integer> predicates, Map<Node, Integer> types, long count)
-    {
-        this.count = count ;
-        this.predicates = predicates ;
-        this.types = types ;
-    }
-
-    public Map<Node, Integer> getPredicates()
-    {
-        return predicates ;
-    }
-
-    public Map<Node, Integer> getTypes()
-    {
-        return types ;
-    }
-
-    public long getCount()
-    {
-        return count ;
-    }
-}

http://git-wip-us.apache.org/repos/asf/jena/blob/3d456654/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/store/DatasetGraphSwitchable.java
----------------------------------------------------------------------
diff --git a/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/store/DatasetGraphSwitchable.java b/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/store/DatasetGraphSwitchable.java
deleted file mode 100644
index e7f4b19..0000000
--- a/jena-db/jena-tdb2/src/main/java/org/seaborne/tdb2/store/DatasetGraphSwitchable.java
+++ /dev/null
@@ -1,204 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.seaborne.tdb2.store;
-
-import java.nio.file.Path;
-import java.util.Map;
-import java.util.concurrent.atomic.AtomicReference;
-
-import org.apache.jena.atlas.lib.Cache ;
-import org.apache.jena.atlas.lib.CacheFactory ;
-import org.apache.jena.graph.Graph;
-import org.apache.jena.graph.Node;
-import org.apache.jena.shared.PrefixMapping;
-import org.apache.jena.shared.impl.PrefixMappingImpl;
-import org.apache.jena.sparql.core.DatasetGraph ;
-import org.apache.jena.sparql.core.DatasetPrefixStorage ;
-import org.seaborne.dboe.base.file.Location ;
-
-final
-public class DatasetGraphSwitchable extends DatasetGraphWrapperTxn /* Until ARQ catches up with promote */ 
-{
-    // QueryEngineFactoryWrapper has a QueryEngineFactory that is always loaded that
-    // executes on the unwrapped DSG (recursively). Unwrapping is via getBase, calling
-    // getWrapped() which is implemented with get().
-    
-//    static { 
-//        // QueryEngineRegistry.addFactory(factory());
-//    }
-    
-    private final AtomicReference<DatasetGraph> dsgx = new AtomicReference<>();
-    // Null for in-memory datasets.
-    private final Path basePath;
-    private final Location location ;
-    
-    public DatasetGraphSwitchable(Path base, Location location, DatasetGraph dsg) {
-        // Don't use the slot in datasetGraphWrapper - use the AtomicReference
-        super(null) ;
-        dsgx.set(dsg);
-        this.basePath = base;
-        this.location = location; 
-    }
-
-    /** Is this {@code DatasetGraphSwitchable} just a holder for a {@code DatasetGraph}?
-     *  If so, it does not have a location on disk.
-     */
-    public boolean hasContainerPath() { return basePath != null; } 
-    
-    public Path getContainerPath() { return basePath; }
-    
-    public Location getLocation() { return location; }
-
-    /** The dataset to use for redirection - can be overridden.
-     *  It is also guaranteed that this is called only once per
-     *  delegated call.  Changes to the wrapped object can be
-     *  made based on that contract.
-     */
-    @Override
-    public DatasetGraph get() { return dsgx.get(); }
-
-    /** Set the base {@link DatasetGraph}.
-     * Returns the old value.
-     */ 
-    public DatasetGraph set(DatasetGraph dsg) { 
-        return dsgx.getAndSet(dsg);
-    }
-    
-    /** If and only if the current value is the given old value, set the base {@link DatasetGraph}  
-     * Returns true if a swap happened.
-     */ 
-    public boolean change(DatasetGraph oldDSG, DatasetGraph newDSG) { 
-        // No need to clear. ngCache.clear();
-        return dsgx.compareAndSet(oldDSG, newDSG);
-    }
-
-    private Graph dftGraph = GraphViewSwitchable.createDefaultGraph(this);
-    
-    @Override
-    public Graph getDefaultGraph() {
-        return dftGraph;
-    }
-    
-//    private Cache<Node, Graph> ngCache = CacheFactory.createCache(10);
-    private Cache<Node, Graph> ngCache = CacheFactory.createOneSlotCache();
-    
-    @Override
-    public Graph getGraph(Node gn) {
-        return ngCache.getOrFill(gn, ()->GraphViewSwitchable.createNamedGraph(this, gn));
-    }
-
-    // TDB2 specific.
-    // Does not cope with blank nodes.
-    // A PrefixMapping sending operations via the switchable.
-    private PrefixMapping prefixMapping(Node graphName) {
-        
-        String gn = (graphName == null) ? "" : graphName.getURI(); 
-        
-        return new PrefixMappingImpl() {
-            
-            DatasetPrefixStorage dps() {
-                return ((DatasetGraphTDB)dsgx.get()).getPrefixes();
-            }
-            
-            Graph graph() {
-                DatasetGraphTDB dsg = (DatasetGraphTDB)dsgx.get();
-                if ( gn == null )
-                    return dsg.getDefaultGraph();
-                else
-                    return dsg.getGraph(graphName);
-            }
-            
-            PrefixMapping prefixMapping() {
-                if ( gn == null )
-                    return dps().getPrefixMapping();
-                else
-                    return dps().getPrefixMapping(gn); 
-            }
-
-            @Override
-            protected void set(String prefix, String uri) {
-                dps().insertPrefix(gn, prefix, uri);
-                super.set(prefix, uri);
-            }
-
-            @Override
-            protected String get(String prefix) {
-                return dps().readPrefix(gn, prefix);
-            }
-
-            @Override
-            protected void remove(String prefix) {
-                dps().getPrefixMapping().removeNsPrefix(prefix);
-                super.remove(prefix);
-            }
-            
-            @Override
-            public Map<String, String> getNsPrefixMap() {
-                return prefixMapping().getNsPrefixMap();
-                //return graph().getPrefixMapping().getNsPrefixMap();
-            }
-        };
-    }
-    
-    //static { register() ; }
-    
-    
-//    static QueryEngineFactory factory() {
-//        return new QueryEngineFactory() {
-//            @Override
-//            public boolean accept(Op op, DatasetGraph dataset, Context context) {
-//                DatasetGraphSwitchable dsg = extract(dataset) ;
-//                if ( dsg == null ) return false;
-//                QueryEngineFactory f = QueryEngineRegistry.findFactory(op, dsg.get(), context);
-//                return f.accept(op, dataset, context);
-//            }
-//
-//            @Override
-//            public Plan create(Op op, DatasetGraph dataset, Binding inputBinding, Context context) {
-//                DatasetGraphSwitchable dsg = extract(dataset) ;
-//                if ( dsg == null ) return null;
-//                QueryEngineFactory f = QueryEngineRegistry.findFactory(op, dsg.get(), context);
-//                return f.create(op, dataset, inputBinding, context);
-//            }
-//
-//            private DatasetGraphSwitchable extract(DatasetGraph dataset) {
-//                if ( dataset instanceof DatasetGraphSwitchable )
-//                    return (DatasetGraphSwitchable)dataset;
-//                return null;
-//            }
-//
-//            @Override
-//            public boolean accept(Query query, DatasetGraph dataset, Context context) {
-//                DatasetGraphSwitchable dsg = extract(dataset) ;
-//                if ( dsg == null ) return false;
-//                QueryEngineFactory f = QueryEngineRegistry.findFactory(query, dsg.get(), context);
-//                return f.accept(query, dataset, context);
-//            }
-//
-//            @Override
-//            public Plan create(Query query, DatasetGraph dataset, Binding inputBinding, Context context) {
-//                DatasetGraphSwitchable dsg = extract(dataset) ;
-//                if ( dsg == null ) return null;
-//                QueryEngineFactory f = QueryEngineRegistry.findFactory(query, dsg.get(), context);
-//                return f.create(query, dataset, inputBinding, context);
-//            }
-//        };
-//    }
-}
-