You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@rya.apache.org by ca...@apache.org on 2017/08/25 17:56:22 UTC

incubator-rya git commit: RYA-294 owl:someValuesFrom inference. Closes #217.

Repository: incubator-rya
Updated Branches:
  refs/heads/master 9c12630bb -> fc8d30ac6


RYA-294 owl:someValuesFrom inference. Closes #217.


Project: http://git-wip-us.apache.org/repos/asf/incubator-rya/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-rya/commit/fc8d30ac
Tree: http://git-wip-us.apache.org/repos/asf/incubator-rya/tree/fc8d30ac
Diff: http://git-wip-us.apache.org/repos/asf/incubator-rya/diff/fc8d30ac

Branch: refs/heads/master
Commit: fc8d30ac6ca107face91c8c3f573def60dc7af5d
Parents: 9c12630
Author: Jesse Hatfield <je...@parsons.com>
Authored: Wed Aug 23 21:14:48 2017 -0400
Committer: Caleb Meier <ca...@parsons.com>
Committed: Fri Aug 25 10:55:28 2017 -0700

----------------------------------------------------------------------
 .../api/RdfCloudTripleStoreConfiguration.java   |  20 ++
 .../src/main/java/MongoRyaDirectExample.java    |  54 +++++
 .../RdfCloudTripleStoreConnection.java          |   2 +
 .../inference/InferenceEngine.java              | 237 +++++++++++++++----
 .../inference/SomeValuesFromVisitor.java        | 110 +++++++++
 .../inference/AllValuesFromVisitorTest.java     |   8 +-
 .../inference/InferenceEngineTest.java          |  42 ++++
 .../rdftriplestore/inference/InferenceIT.java   |  68 +++++-
 .../inference/SomeValuesFromVisitorTest.java    | 152 ++++++++++++
 9 files changed, 639 insertions(+), 54 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/fc8d30ac/common/rya.api/src/main/java/org/apache/rya/api/RdfCloudTripleStoreConfiguration.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/org/apache/rya/api/RdfCloudTripleStoreConfiguration.java b/common/rya.api/src/main/java/org/apache/rya/api/RdfCloudTripleStoreConfiguration.java
index eeb49b5..6bebb86 100644
--- a/common/rya.api/src/main/java/org/apache/rya/api/RdfCloudTripleStoreConfiguration.java
+++ b/common/rya.api/src/main/java/org/apache/rya/api/RdfCloudTripleStoreConfiguration.java
@@ -75,6 +75,7 @@ public abstract class RdfCloudTripleStoreConfiguration extends Configuration {
     public static final String INFER_INCLUDE_INTERSECTION_OF = "infer.include.intersectionof";
     public static final String INFER_INCLUDE_INVERSEOF = "infer.include.inverseof";
     public static final String INFER_INCLUDE_ONE_OF = "infer.include.oneof";
+    public static final String INFER_INCLUDE_SOME_VALUES_FROM = "infer.include.somevaluesfrom";
     public static final String INFER_INCLUDE_SUBCLASSOF = "infer.include.subclassof";
     public static final String INFER_INCLUDE_SUBPROPOF = "infer.include.subpropof";
     public static final String INFER_INCLUDE_SYMMPROP = "infer.include.symmprop";
@@ -378,6 +379,25 @@ public abstract class RdfCloudTripleStoreConfiguration extends Configuration {
         setBoolean(INFER_INCLUDE_ONE_OF, value);
     }
 
+    /**
+     * @return {@code true} if owl:someValuesFrom inferencing is enabled.
+     * {@code false} otherwise. Defaults to {@code true} if nothing is
+     * specified.
+     */
+    public Boolean isInferSomeValuesFrom() {
+        return getBoolean(INFER_INCLUDE_SOME_VALUES_FROM, true);
+    }
+
+    /**
+     * Sets whether owl:someValuesFrom inferencing is enabled or disabled.
+     * @param value {@code true} if owl:someValuesFrom inferencing is enabled.
+     * {@code false} otherwise.
+     */
+    public void setInferSomeValuesFrom(final Boolean value) {
+        Preconditions.checkNotNull(value);
+        setBoolean(INFER_INCLUDE_SOME_VALUES_FROM, value);
+    }
+
     public Boolean isInferSubClassOf() {
         return getBoolean(INFER_INCLUDE_SUBCLASSOF, true);
     }

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/fc8d30ac/extras/indexingExample/src/main/java/MongoRyaDirectExample.java
----------------------------------------------------------------------
diff --git a/extras/indexingExample/src/main/java/MongoRyaDirectExample.java b/extras/indexingExample/src/main/java/MongoRyaDirectExample.java
index 7f0d308..a7cf2e5 100644
--- a/extras/indexingExample/src/main/java/MongoRyaDirectExample.java
+++ b/extras/indexingExample/src/main/java/MongoRyaDirectExample.java
@@ -119,6 +119,7 @@ public class MongoRyaDirectExample {
                 testInfer(conn, sail);
                 testPropertyChainInference(conn, sail);
                 testPropertyChainInferenceAltRepresentation(conn, sail);
+                testSomeValuesFromInference(conn, sail);
                 testAllValuesFromInference(conn, sail);
                 testIntersectionOfInference(conn, sail);
                 testOneOfInference(conn, sail);
@@ -519,6 +520,59 @@ public class MongoRyaDirectExample {
         Validate.isTrue(resultHandler.getCount() == 2);
     }
 
+    public static void testSomeValuesFromInference(final SailRepositoryConnection conn, final Sail sail) throws MalformedQueryException, RepositoryException,
+    UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException, InferenceEngineException {
+        final String lubm = "http://swat.cse.lehigh.edu/onto/univ-bench.owl#";
+        log.info("Adding Data");
+        String insert = "PREFIX lubm: <" + lubm + ">\n"
+                + "INSERT DATA { GRAPH <http://updated/test> {\n"
+                + "  <urn:Department0> a lubm:Department; lubm:subOrganizationOf <urn:University0> .\n"
+                + "  <urn:ResearchGroup0> a lubm:ResearchGroup; lubm:subOrganizationOf <urn:Department0> .\n"
+                + "  <urn:Alice> lubm:headOf <urn:Department0> .\n"
+                + "  <urn:Bob> lubm:headOf <urn:ResearchGroup0> .\n"
+                + "  <urn:Carol> lubm:worksFor <urn:Department0> .\n"
+                + "}}";
+        Update update = conn.prepareUpdate(QueryLanguage.SPARQL, insert);
+        update.execute();
+        final String inferQuery = "select distinct ?x { GRAPH <http://updated/test> { ?x a <" + lubm + "Chair> }}";
+        final String explicitQuery = "prefix lubm: <" + lubm + ">\n"
+                + "select distinct ?x { GRAPH <http://updated/test> {\n"
+                + "  { ?x a lubm:Chair }\n"
+                + "  UNION\n"
+                + "  { ?x lubm:headOf [ a lubm:Department ] }\n"
+                + "}}";
+        log.info("Running Explicit Query");
+        final CountingResultHandler resultHandler = new CountingResultHandler();
+        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, explicitQuery);
+        tupleQuery.evaluate(resultHandler);
+        log.info("Result count : " + resultHandler.getCount());
+        Validate.isTrue(resultHandler.getCount() == 1);
+        log.info("Running Inference-dependent Query");
+        resultHandler.resetCount();
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, inferQuery);
+        tupleQuery.evaluate(resultHandler);
+        log.info("Result count : " + resultHandler.getCount());
+        Validate.isTrue(resultHandler.getCount() == 0);
+        log.info("Adding owl:someValuesFrom Schema");
+        insert = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n"
+                + "PREFIX owl: <" + OWL.NAMESPACE + ">\n"
+                + "PREFIX lubm: <" + lubm + ">\n"
+                + "INSERT DATA\n"
+                + "{ GRAPH <http://updated/test> {\n"
+                + "  lubm:Chair owl:equivalentClass [ owl:onProperty lubm:headOf ; owl:someValuesFrom lubm:Department ] ."
+                + "}}";
+        update = conn.prepareUpdate(QueryLanguage.SPARQL, insert);
+        update.execute();
+        log.info("Refreshing InferenceEngine");
+        ((RdfCloudTripleStore) sail).getInferenceEngine().refreshGraph();
+        log.info("Re-running Inference-dependent Query");
+        resultHandler.resetCount();
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, inferQuery);
+        tupleQuery.evaluate(resultHandler);
+        log.info("Result count : " + resultHandler.getCount());
+        Validate.isTrue(resultHandler.getCount() == 1);
+    }
+
     public static void testAllValuesFromInference(final SailRepositoryConnection conn, final Sail sail) throws MalformedQueryException, RepositoryException,
     UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException, InferenceEngineException {
         log.info("Adding Data");

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/fc8d30ac/sail/src/main/java/org/apache/rya/rdftriplestore/RdfCloudTripleStoreConnection.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/org/apache/rya/rdftriplestore/RdfCloudTripleStoreConnection.java b/sail/src/main/java/org/apache/rya/rdftriplestore/RdfCloudTripleStoreConnection.java
index dee5c8d..40a69b0 100644
--- a/sail/src/main/java/org/apache/rya/rdftriplestore/RdfCloudTripleStoreConnection.java
+++ b/sail/src/main/java/org/apache/rya/rdftriplestore/RdfCloudTripleStoreConnection.java
@@ -60,6 +60,7 @@ import org.apache.rya.rdftriplestore.inference.InverseOfVisitor;
 import org.apache.rya.rdftriplestore.inference.OneOfVisitor;
 import org.apache.rya.rdftriplestore.inference.PropertyChainVisitor;
 import org.apache.rya.rdftriplestore.inference.SameAsVisitor;
+import org.apache.rya.rdftriplestore.inference.SomeValuesFromVisitor;
 import org.apache.rya.rdftriplestore.inference.SubClassOfVisitor;
 import org.apache.rya.rdftriplestore.inference.SubPropertyOfVisitor;
 import org.apache.rya.rdftriplestore.inference.SymmetricPropertyVisitor;
@@ -353,6 +354,7 @@ public class RdfCloudTripleStoreConnection extends SailConnectionBase {
                     ) {
                 try {
                     tupleExpr.visit(new DomainRangeVisitor(queryConf, inferenceEngine));
+                    tupleExpr.visit(new SomeValuesFromVisitor(queryConf, inferenceEngine));
                     tupleExpr.visit(new AllValuesFromVisitor(queryConf, inferenceEngine));
                     tupleExpr.visit(new HasValueVisitor(queryConf, inferenceEngine));
                     tupleExpr.visit(new IntersectionOfVisitor(queryConf, inferenceEngine));

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/fc8d30ac/sail/src/main/java/org/apache/rya/rdftriplestore/inference/InferenceEngine.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/org/apache/rya/rdftriplestore/inference/InferenceEngine.java b/sail/src/main/java/org/apache/rya/rdftriplestore/inference/InferenceEngine.java
index 936fd41..a0a5cfc 100644
--- a/sail/src/main/java/org/apache/rya/rdftriplestore/inference/InferenceEngine.java
+++ b/sail/src/main/java/org/apache/rya/rdftriplestore/inference/InferenceEngine.java
@@ -83,6 +83,7 @@ public class InferenceEngine {
     private Map<URI, Set<URI>> rangeByType;
     private Map<Resource, Map<URI, Value>> hasValueByType;
     private Map<URI, Map<Resource, Value>> hasValueByProperty;
+    private Map<Resource, Map<Resource, URI>> someValuesFromByRestrictionType;
     private Map<Resource, Map<Resource, URI>> allValuesFromByValueType;
     private final ConcurrentHashMap<Resource, List<Set<Resource>>> intersections = new ConcurrentHashMap<>();
     private final ConcurrentHashMap<Resource, Set<Resource>> enumerations = new ConcurrentHashMap<>();
@@ -540,7 +541,7 @@ public class InferenceEngine {
                 // p's subproperties. Would be redundant for properties discovered via this rule.
                 while (!domainViaInverseProperty.isEmpty()) {
                     final URI property = domainViaInverseProperty.pop();
-                    final Set<URI> subProperties = findParents(subPropertyOfGraph, property);
+                    final Set<URI> subProperties = getSubProperties(property);
                     subProperties.removeAll(propertiesWithDomain);
                     propertiesWithDomain.addAll(subProperties);
                     domainViaSuperProperty.addAll(subProperties);
@@ -549,7 +550,7 @@ public class InferenceEngine {
                 // p's subproperties. Would be redundant for properties discovered via this rule.
                 while (!rangeViaInverseProperty.isEmpty()) {
                     final URI property = rangeViaInverseProperty.pop();
-                    final Set<URI> subProperties = findParents(subPropertyOfGraph, property);
+                    final Set<URI> subProperties = getSubProperties(property);
                     subProperties.removeAll(propertiesWithRange);
                     propertiesWithRange.addAll(subProperties);
                     rangeViaSuperProperty.addAll(subProperties);
@@ -606,6 +607,7 @@ public class InferenceEngine {
         }
         // Query for specific types of restriction and add their details to the schema
         refreshHasValueRestrictions(restrictions);
+        refreshSomeValuesFromRestrictions(restrictions);
         refreshAllValuesFromRestrictions(restrictions);
     }
 
@@ -637,34 +639,56 @@ public class InferenceEngine {
         }
     }
 
+    private void refreshSomeValuesFromRestrictions(final Map<Resource, URI> restrictions) throws QueryEvaluationException {
+        someValuesFromByRestrictionType = new ConcurrentHashMap<>();
+        ryaDaoQueryWrapper.queryAll(null, OWL.SOMEVALUESFROM, null, new RDFHandlerBase() {
+            @Override
+            public void handleStatement(final Statement statement) throws RDFHandlerException {
+                final Resource restrictionClass = statement.getSubject();
+                if (restrictions.containsKey(restrictionClass) && statement.getObject() instanceof Resource) {
+                    final URI property = restrictions.get(restrictionClass);
+                    final Resource valueClass = (Resource) statement.getObject();
+                    // Should also be triggered by subclasses of the value class
+                    final Set<Resource> valueClasses = new HashSet<>();
+                    valueClasses.add(valueClass);
+                    if (valueClass instanceof URI) {
+                        valueClasses.addAll(getSubClasses((URI) valueClass));
+                    }
+                    for (final Resource valueSubClass : valueClasses) {
+                        if (!someValuesFromByRestrictionType.containsKey(restrictionClass)) {
+                            someValuesFromByRestrictionType.put(restrictionClass, new ConcurrentHashMap<>());
+                        }
+                        someValuesFromByRestrictionType.get(restrictionClass).put(valueSubClass, property);
+                    }
+                }
+            }
+        });
+    }
+
     private void refreshAllValuesFromRestrictions(final Map<Resource, URI> restrictions) throws QueryEvaluationException {
-        allValuesFromByValueType = new HashMap<>();
-        final CloseableIteration<Statement, QueryEvaluationException> iter = RyaDAOHelper.query(ryaDAO, null, OWL.ALLVALUESFROM, null, conf);
-        try {
-            while (iter.hasNext()) {
-                final Statement st = iter.next();
-                if (restrictions.containsKey(st.getSubject()) && st.getObject() instanceof URI) {
-                    final URI property = restrictions.get(st.getSubject());
-                    final URI valueClass = (URI) st.getObject();
+        allValuesFromByValueType = new ConcurrentHashMap<>();
+        ryaDaoQueryWrapper.queryAll(null, OWL.ALLVALUESFROM, null, new RDFHandlerBase() {
+            @Override
+            public void handleStatement(final Statement statement) throws RDFHandlerException {
+                final Resource directRestrictionClass = statement.getSubject();
+                if (restrictions.containsKey(directRestrictionClass) && statement.getObject() instanceof Resource) {
+                    final URI property = restrictions.get(directRestrictionClass);
+                    final Resource valueClass = (Resource) statement.getObject();
                     // Should also be triggered by subclasses of the property restriction
                     final Set<Resource> restrictionClasses = new HashSet<>();
-                    restrictionClasses.add(st.getSubject());
-                    if (st.getSubject() instanceof URI) {
-                        restrictionClasses.addAll(getSubClasses((URI) st.getSubject()));
+                    restrictionClasses.add(directRestrictionClass);
+                    if (directRestrictionClass instanceof URI) {
+                        restrictionClasses.addAll(getSubClasses((URI) directRestrictionClass));
                     }
                     for (final Resource restrictionClass : restrictionClasses) {
                         if (!allValuesFromByValueType.containsKey(valueClass)) {
-                            allValuesFromByValueType.put(valueClass, new HashMap<>());
+                            allValuesFromByValueType.put(valueClass, new ConcurrentHashMap<>());
                         }
                         allValuesFromByValueType.get(valueClass).put(restrictionClass, property);
                     }
                 }
             }
-        } finally {
-            if (iter != null) {
-                iter.close();
-            }
-        }
+        });
     }
 
     private void refreshIntersectionOf() throws QueryEvaluationException {
@@ -904,7 +928,8 @@ public class InferenceEngine {
      * internal subclass graph.
      * @param type the type {@link URI} to find super classes for.
      * @return the {@link Set} of {@link URI} types that are super classes types
-     * of the specified {@code type}. Returns an empty set if nothing was found.
+     * of the specified {@code type}. Returns an empty set if nothing was found,
+     * or if either type or the subclass graph is {@code null}.
      */
     public Set<URI> getSuperClasses(final URI type) {
         return findChildren(subClassOfGraph, type);
@@ -915,31 +940,102 @@ public class InferenceEngine {
      * internal subclass graph.
      * @param type the type {@link URI} to find sub classes for.
      * @return the {@link Set} of {@link URI} types that are sub classes types
-     * of the specified {@code type}. Returns an empty set if nothing was found.
+     * of the specified {@code type}. Returns an empty set if nothing was found,
+     * or if either type or the subclass graph is {@code null}.
      */
     public Set<URI> getSubClasses(final URI type) {
         return findParents(subClassOfGraph, type);
     }
 
+    /**
+     * Returns all superproperties of the specified property based on the
+     * internal subproperty graph.
+     * @param property the property {@link URI} to find superproperties for.
+     * @return the {@link Set} of {@link URI} properties that are superproperties
+     * of the specified {@code property}. Returns an empty set if nothing was found,
+     * or if either property or the subproperty graph is {@code null}.
+     */
+    public Set<URI> getSuperProperties(final URI property) {
+        return findChildren(subPropertyOfGraph, property);
+    }
+
+    /**
+     * Returns all subproperties of the specified property based on the
+     * internal subproperty graph.
+     * @param property the property {@link URI} to find subproperties for.
+     * @return the {@link Set} of {@link URI} properties that are subproperties
+     * of the specified {@code property}. Returns an empty set if nothing was found,
+     * or if either property or the subproperty graph is {@code null}.
+     */
+    public Set<URI> getSubProperties(final URI property) {
+        return findParents(subPropertyOfGraph, property);
+    }
+
+    /**
+     * Given a graph and a node, recursively traverse the graph from that node
+     * to find all predecessors.
+     * @param graph A {@link Graph}
+     * @param vertexId The starting node
+     * @return The set of predecessors, or an empty set if none are found or if
+     *      either argument is {@code null}
+     */
     public static Set<URI> findParents(final Graph graph, final URI vertexId) {
         return findParents(graph, vertexId, true);
     }
 
+    /**
+     * Given a graph and a node, find all immediate parents and optionally
+     * traverse the graph recursively to find all indirect predecessors.
+     * @param graph A {@link Graph}
+     * @param vertexId The starting node
+     * @param isRecursive If true, traverse the graph recursively
+     * @return The set of predecessors, or an empty set if none are found or if
+     *      either argument is {@code null}
+     */
     public static Set<URI> findParents(final Graph graph, final URI vertexId, final boolean isRecursive) {
         return findConnected(graph, vertexId, Direction.IN, isRecursive);
     }
 
+    /**
+     * Given a graph and a node, recursively traverse the graph from that node
+     * to find all successors.
+     * @param graph A {@link Graph}
+     * @param vertexId The starting node
+     * @return The set of successors, or an empty set if none are found or if
+     *      either argument is {@code null}
+     */
     public static Set<URI> findChildren(final Graph graph, final URI vertexId) {
         return findChildren(graph, vertexId, true);
     }
 
+    /**
+     * Given a graph and a node, find all immediate children and optionally
+     * traverse the graph recursively to find all indirect successors.
+     * @param graph A {@link Graph}
+     * @param vertexId The starting node
+     * @param isRecursive If true, traverse the graph recursively
+     * @return The set of successors, or an empty set if none are found or if
+     *      either argument is {@code null}
+     */
     public static Set<URI> findChildren(final Graph graph, final URI vertexId, final boolean isRecursive) {
         return findConnected(graph, vertexId, Direction.OUT, isRecursive);
     }
 
+    /**
+     * Given a graph, a starting node, and a direction, find immediate neighbors
+     * of the start node in that direction, and optionally traverse the graph
+     * recursively in that same direction to find indirect connections.
+     * @param graph A {@link Graph}
+     * @param vertexId The starting node
+     * @param traversal Look for connected nodes in this direction
+     * @param isRecursive If true, recursively follow the connected nodes' own
+     *      edges (in the same direction)
+     * @return The set of connected nodes, or an empty set if none are found, or
+     *      if either the graph or the starting vertex are {@code null}.
+     */
     private static Set<URI> findConnected(final Graph graph, final URI vertexId, final Direction traversal, final boolean isRecursive) {
         final Set<URI> connected = new HashSet<>();
-        if (graph == null) {
+        if (graph == null || vertexId == null) {
             return connected;
         }
         final Vertex v = getVertex(graph, vertexId);
@@ -1263,6 +1359,75 @@ public class InferenceEngine {
     }
 
     /**
+     * Given some schema mapping types to (type, property) pairs that somehow imply the key type,
+     * and given a particular type being queried for, expand the combinations of types and
+     * properties that can imply the query type by including any pairs that could imply subtypes of
+     * the query type (using the subclass graph), and by expanding each property into a set of all
+     * subproperties that imply it (using the subproperty graph). Does not consider subtypes of
+     * potential triggering types.
+     * @param queryType The type whose possible derivations are needed
+     * @param schemaMap Map of schema information such that each key represents a type that can
+     *      somehow be derived from (other type x property) combinations, and the value provides
+     *      those combinations that can be used for the implication.
+     * @return Combinations of types and properties that can directly or indirectly imply the query
+     *      type according to the schema provided and the subclass/superproperty graphs. Any
+     *      individual type/property combination is sufficient. Returns an empty map if either
+     *      parameter is {@code null}.
+     */
+    private Map<Resource, Set<URI>> getTypePropertyImplyingType(final Resource queryType, final Map<Resource, Map<Resource, URI>> schemaMap) {
+        final Map<Resource, Set<URI>> implications = new HashMap<>();
+        if (schemaMap != null && queryType != null) {
+            // Check for any subtypes which would in turn imply the type being queried for
+            final HashSet<Resource> queryTypes = new HashSet<>();
+            queryTypes.add(queryType);
+            if (queryType instanceof URI) {
+                queryTypes.addAll(getSubClasses((URI) queryType));
+            }
+            for (final Resource querySubType : queryTypes) {
+                if (schemaMap.containsKey(querySubType)) {
+                    final Map<Resource, URI> otherTypeToProperty = schemaMap.get(querySubType);
+                    for (final Resource otherType : otherTypeToProperty.keySet()) {
+                        if (!implications.containsKey(otherType)) {
+                            implications.put(otherType, new HashSet<>());
+                        }
+                        final URI property = otherTypeToProperty.get(otherType);
+                        if (property != null) {
+                            implications.get(otherType).add(property);
+                            // Also add subproperties that would in turn imply the property
+                            implications.get(otherType).addAll(getSubProperties(property));
+                        }
+                    }
+                }
+            }
+        }
+        return implications;
+    }
+
+    /**
+     * For a given type, return information about any owl:someValuesFrom restriction that could
+     * imply an individual's membership in that type: When a property restriction R applies to
+     * property p and states "R owl:someValuesFrom T", then whenever the object of a triple belongs
+     * to T, and the predicate is p, then the subject of the triple is implied to have the type R
+     * (it belongs to the class defined by the restriction).
+     * @param restrictionType The type to be inferred, which is the type of the subject of the
+     *      triple, or the type for which all members are stated to have some value of the
+     *      appropriate type. Takes class hierarchy into account, so possible inferences include
+     *      any ways of inferring subtypes of the restriction type, and object types that trigger
+     *      inference include any subtypes of relevant value types. Also considers property
+     *      hierarchy, so properties that trigger inference will include subproperties of those
+     *      referenced by relevant restrictions.
+     * @return A map from object type (the object of the someValuesFrom condition or the subtype of
+     *      such a type) to the set of properties (including any property referenced by such a
+     *      restriction and all of its subproperties) such that for any individual which belongs to
+     *      the object type, any subject which has some value of that type for that property belongs
+     *      to the restriction type. Empty map if the parameter is {@code null} or if the
+     *      someValuesFrom schema has not been populated.
+     */
+    public Map<Resource, Set<URI>> getSomeValuesFromByRestrictionType(Resource restrictionType) {
+        return getTypePropertyImplyingType(restrictionType, someValuesFromByRestrictionType);
+    }
+
+    /**
      * For a given type, return information about any owl:allValuesFrom restriction that could imply
      * an individual's membership in that type: If the subject of a triple belongs to the type
      * associated with the restriction itself, and the predicate is the one referenced by the
@@ -1276,33 +1441,11 @@ public class InferenceEngine {
      * @return A map from subject type (a property restriction type or a subtype of one) to the set
      *      of properties (including any property referenced by such a restriction and all of its
      *      subproperties) such that for any individual which belongs to the subject type, all
-     *      values it has for any of those properties belong to the value type.
+     *      values it has for any of those properties belong to the value type. Empty map if the
+     *      parameter is {@code null} or if the allValuesFrom schema has not been populated.
      */
     public Map<Resource, Set<URI>> getAllValuesFromByValueType(final Resource valueType) {
-        final Map<Resource, Set<URI>> implications = new HashMap<>();
-        if (allValuesFromByValueType != null) {
-            // Check for any subtypes which would in turn imply the value type
-            final HashSet<Resource> valueTypes = new HashSet<>();
-            valueTypes.add(valueType);
-            if (valueType instanceof URI) {
-                valueTypes.addAll(getSubClasses((URI) valueType));
-            }
-            for (final Resource valueSubType : valueTypes) {
-                if (allValuesFromByValueType.containsKey(valueSubType)) {
-                    final Map<Resource, URI> restrictionToProperty = allValuesFromByValueType.get(valueSubType);
-                    for (final Resource restrictionType : restrictionToProperty.keySet()) {
-                        if (!implications.containsKey(restrictionType)) {
-                            implications.put(restrictionType, new HashSet<>());
-                        }
-                        final URI property = restrictionToProperty.get(restrictionType);
-                        implications.get(restrictionType).add(property);
-                        // Also add subproperties that would in turn imply the property
-                        implications.get(restrictionType).addAll(findParents(subPropertyOfGraph, property));
-                    }
-                }
-            }
-        }
-        return implications;
+        return getTypePropertyImplyingType(valueType, allValuesFromByValueType);
     }
 
     /**

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/fc8d30ac/sail/src/main/java/org/apache/rya/rdftriplestore/inference/SomeValuesFromVisitor.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/org/apache/rya/rdftriplestore/inference/SomeValuesFromVisitor.java b/sail/src/main/java/org/apache/rya/rdftriplestore/inference/SomeValuesFromVisitor.java
new file mode 100644
index 0000000..16a315e
--- /dev/null
+++ b/sail/src/main/java/org/apache/rya/rdftriplestore/inference/SomeValuesFromVisitor.java
@@ -0,0 +1,110 @@
+package org.apache.rya.rdftriplestore.inference;
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.util.Map;
+import java.util.Set;
+import java.util.UUID;
+
+import org.apache.rya.api.RdfCloudTripleStoreConfiguration;
+import org.apache.rya.api.utils.NullableStatementImpl;
+import org.apache.rya.rdftriplestore.utils.FixedStatementPattern;
+import org.openrdf.model.Resource;
+import org.openrdf.model.URI;
+import org.openrdf.model.vocabulary.OWL;
+import org.openrdf.model.vocabulary.RDF;
+import org.openrdf.query.algebra.StatementPattern;
+import org.openrdf.query.algebra.Var;
+
+/**
+ * Expands the query tree to account for any existential class expressions (property restrictions
+ * using owl:someValuesFrom) in the ontology known to the {@link InferenceEngine}.
+ *
+ * Operates on {@link StatementPattern} nodes whose predicate is rdf:type and whose object is a
+ * defined type (not a variable) which corresponds to a someValuesFrom expression in the ontology.
+ * When applicable, replaces the node with a union of itself and a subtree that matches any instance
+ * that can be inferred to have the type in question via the semantics of owl:someValuesFrom.
+ *
+ * An existential class expression references a predicate and a value class, and represents the set
+ * of individuals with at least one value of that class for that predicate. Therefore, membership
+ * in the class expression should be inferred for any individual which is the subject of a triple
+ * with that predicate and with an object belonging to the value type. This implication is similar
+ * to rdfs:domain except that it only applies when the object of the triple belongs to a specific
+ * type.
+ *
+ * (Note: The inference in the other direction would be that, if an individual is declared to belong
+ * to the class expression, then there exists some other individual which satisfies the requirement
+ * that there is at least one value of the appropriate type. However, this other individual may be
+ * any arbitrary resource, explicitly represented in the data or otherwise, so this implication is
+ * not used.)
+ */
+public class SomeValuesFromVisitor extends AbstractInferVisitor {
+    /**
+     * Creates a new {@link SomeValuesFromVisitor}.
+     * @param conf The {@link RdfCloudTripleStoreConfiguration}.
+     * @param inferenceEngine The InferenceEngine containing the relevant ontology.
+     */
+    public SomeValuesFromVisitor(RdfCloudTripleStoreConfiguration conf, InferenceEngine inferenceEngine) {
+        super(conf, inferenceEngine);
+        include = conf.isInferSomeValuesFrom();
+    }
+
+    /**
+     * Checks whether the StatementPattern is a type query whose solutions could be inferred by
+     * someValuesFrom inference, and if so, replaces the node with a union of itself and any
+     * possible inference.
+     */
+    @Override
+    protected void meetSP(StatementPattern node) throws Exception {
+        final Var subjVar = node.getSubjectVar();
+        final Var predVar = node.getPredicateVar();
+        final Var objVar = node.getObjectVar();
+        // Only applies to type queries where the type is defined
+        if (predVar != null && RDF.TYPE.equals(predVar.getValue()) && objVar != null && objVar.getValue() instanceof Resource) {
+            final Resource typeToInfer = (Resource) objVar.getValue();
+            Map<Resource, Set<URI>> relevantSvfRestrictions = inferenceEngine.getSomeValuesFromByRestrictionType(typeToInfer);
+            if (!relevantSvfRestrictions.isEmpty()) {
+                // We can infer the queried type if it is to a someValuesFrom restriction (or a
+                // supertype of one), and the node in question (subjVar) is the subject of a triple
+                // whose predicate is the restriction's property and whose object is an arbitrary
+                // node of the restriction's value type.
+                final Var valueTypeVar = new Var("t-" + UUID.randomUUID());
+                final Var svfPredVar = new Var("p-" + UUID.randomUUID());
+                final Var neighborVar = new Var("n-" + UUID.randomUUID());
+                neighborVar.setAnonymous(true);
+                final StatementPattern membershipPattern = new DoNotExpandSP(neighborVar,
+                        new Var(RDF.TYPE.stringValue(), RDF.TYPE), valueTypeVar);
+                final StatementPattern valuePattern = new StatementPattern(subjVar, svfPredVar, neighborVar);
+                final InferJoin svfPattern = new InferJoin(membershipPattern, valuePattern);
+                // Use a FixedStatementPattern to contain the appropriate (predicate, value type)
+                // pairs, and check each one against the general pattern.
+                final FixedStatementPattern svfPropertyTypes = new FixedStatementPattern(svfPredVar,
+                        new Var(OWL.SOMEVALUESFROM.stringValue(), OWL.SOMEVALUESFROM), valueTypeVar);
+                for (Resource svfValueType : relevantSvfRestrictions.keySet()) {
+                    for (URI svfProperty : relevantSvfRestrictions.get(svfValueType)) {
+                        svfPropertyTypes.statements.add(new NullableStatementImpl(svfProperty,
+                                OWL.SOMEVALUESFROM, svfValueType));
+                    }
+                }
+                final InferJoin svfInferenceQuery = new InferJoin(svfPropertyTypes, svfPattern);
+                node.replaceWith(new InferUnion(node.clone(), svfInferenceQuery));
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/fc8d30ac/sail/src/test/java/org/apache/rya/rdftriplestore/inference/AllValuesFromVisitorTest.java
----------------------------------------------------------------------
diff --git a/sail/src/test/java/org/apache/rya/rdftriplestore/inference/AllValuesFromVisitorTest.java b/sail/src/test/java/org/apache/rya/rdftriplestore/inference/AllValuesFromVisitorTest.java
index d284d57..d239577 100644
--- a/sail/src/test/java/org/apache/rya/rdftriplestore/inference/AllValuesFromVisitorTest.java
+++ b/sail/src/test/java/org/apache/rya/rdftriplestore/inference/AllValuesFromVisitorTest.java
@@ -108,11 +108,11 @@ public class AllValuesFromVisitorTest {
         Assert.assertTrue(left instanceof StatementPattern);
         Assert.assertTrue(right instanceof StatementPattern);
         // Verify expected predicate/restriction pairs
+        Assert.assertTrue(fsp.statements.contains(new NullableStatementImpl(parentsArePeople, OWL.ONPROPERTY, parent)));
+        Assert.assertTrue(fsp.statements.contains(new NullableStatementImpl(relativesArePeople, OWL.ONPROPERTY, relative)));
+        Assert.assertTrue(fsp.statements.contains(new NullableStatementImpl(relativesArePeople, OWL.ONPROPERTY, parent)));
+        Assert.assertTrue(fsp.statements.contains(new NullableStatementImpl(parentsAreTallPeople, OWL.ONPROPERTY, parent)));
         Assert.assertEquals(4, fsp.statements.size());
-        fsp.statements.contains(new NullableStatementImpl(parentsArePeople, OWL.ONPROPERTY, parent));
-        fsp.statements.contains(new NullableStatementImpl(relativesArePeople, OWL.ONPROPERTY, relative));
-        fsp.statements.contains(new NullableStatementImpl(relativesArePeople, OWL.ONPROPERTY, parent));
-        fsp.statements.contains(new NullableStatementImpl(parentsAreTallPeople, OWL.ONPROPERTY, parent));
         // Verify general pattern for matching instances of each pair: Join on unknown subject; left
         // triple states it belongs to the restriction while right triple relates it to the original
         // subject variable by the relevant property. Restriction and property variables are given

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/fc8d30ac/sail/src/test/java/org/apache/rya/rdftriplestore/inference/InferenceEngineTest.java
----------------------------------------------------------------------
diff --git a/sail/src/test/java/org/apache/rya/rdftriplestore/inference/InferenceEngineTest.java b/sail/src/test/java/org/apache/rya/rdftriplestore/inference/InferenceEngineTest.java
index f290324..7ef56c5 100644
--- a/sail/src/test/java/org/apache/rya/rdftriplestore/inference/InferenceEngineTest.java
+++ b/sail/src/test/java/org/apache/rya/rdftriplestore/inference/InferenceEngineTest.java
@@ -227,6 +227,48 @@ public class InferenceEngineTest extends TestCase {
     }
 
     @Test
+    public void testSomeValuesFrom() throws Exception {
+        final String insert = "INSERT DATA { GRAPH <http://updated/test> {\n"
+                // base restrictions
+                + "  <urn:Chair> owl:onProperty <urn:headOf> ; owl:someValuesFrom <urn:Department> .\n"
+                + "  <urn:Dean> owl:onProperty <urn:headOf> ; owl:someValuesFrom <urn:College> .\n"
+                // classes related to the restriction type
+                + "  <urn:ScienceDepartmentChair> rdfs:subClassOf <urn:Chair> .\n"
+                + "  <urn:Chair> rdfs:subClassOf <urn:Person> .\n"
+                + "  <urn:Dean> rdfs:subClassOf <urn:Person> .\n"
+                + "  <urn:Student> rdfs:subClassOf <urn:Person> .\n"
+                // classes related to the value type
+                + "  <urn:ScienceDepartment> rdfs:subClassOf <urn:Department> .\n"
+                + "  <urn:HumanitiesDepartment> rdfs:subClassOf <urn:Department> .\n"
+                + "  <urn:Department> rdfs:subClassOf <urn:Organization> .\n"
+                + "  <urn:College> rdfs:subClassOf <urn:Organization> .\n"
+                // properties related to the restriction property
+                + "  <urn:temporaryHeadOf> rdfs:subPropertyOf <urn:headOf> .\n"
+                + "  <urn:headOf> rdfs:subPropertyOf <urn:worksFor> .\n"
+                + "}}";
+        conn.prepareUpdate(QueryLanguage.SPARQL, insert).execute();
+        inferenceEngine.refreshGraph();
+        final Set<URI> properties = new HashSet<>();
+        properties.add(vf.createURI("urn:headOf"));
+        properties.add(vf.createURI("urn:temporaryHeadOf"));
+        final Map<Resource, Set<URI>> chairDerivations = new HashMap<>();
+        chairDerivations.put(vf.createURI("urn:Department"), properties);
+        chairDerivations.put(vf.createURI("urn:ScienceDepartment"), properties);
+        chairDerivations.put(vf.createURI("urn:HumanitiesDepartment"), properties);
+        final Map<Resource, Set<URI>> deanDerivations = new HashMap<>();
+        deanDerivations.put(vf.createURI("urn:College"), properties);
+        final Map<Resource, Set<URI>> combinedDerivations = new HashMap<>(chairDerivations);
+        combinedDerivations.put(vf.createURI("urn:College"), properties);
+        // Get someValuesFrom restrictions given the direct types
+        Assert.assertEquals(deanDerivations, inferenceEngine.getSomeValuesFromByRestrictionType(vf.createURI("urn:Dean")));
+        Assert.assertEquals(chairDerivations, inferenceEngine.getSomeValuesFromByRestrictionType(vf.createURI("urn:Chair")));
+        // Finds the subtype's restrictions given the supertype
+        Assert.assertEquals(combinedDerivations, inferenceEngine.getSomeValuesFromByRestrictionType(vf.createURI("urn:Person")));
+        // Finds nothing if given a subtype which is not a restriction
+        Assert.assertEquals(new HashMap<>(), inferenceEngine.getSomeValuesFromByRestrictionType(vf.createURI("urn:ScienceDepartmentChair")));
+    }
+
+    @Test
     public void testAllValuesFrom() throws Exception {
         final String insert = "INSERT DATA { GRAPH <http://updated/test> {\n"
                 + "  <urn:Dog> owl:onProperty <urn:relative> ; owl:allValuesFrom <urn:Dog> .\n"

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/fc8d30ac/sail/src/test/java/org/apache/rya/rdftriplestore/inference/InferenceIT.java
----------------------------------------------------------------------
diff --git a/sail/src/test/java/org/apache/rya/rdftriplestore/inference/InferenceIT.java b/sail/src/test/java/org/apache/rya/rdftriplestore/inference/InferenceIT.java
index 1fcfa2c..11dfeb0 100644
--- a/sail/src/test/java/org/apache/rya/rdftriplestore/inference/InferenceIT.java
+++ b/sail/src/test/java/org/apache/rya/rdftriplestore/inference/InferenceIT.java
@@ -18,9 +18,11 @@ package org.apache.rya.rdftriplestore.inference;
  * under the License.
  */
 
+import java.util.HashMap;
 import java.util.HashSet;
 import java.util.LinkedList;
 import java.util.List;
+import java.util.Map;
 import java.util.Set;
 
 import org.apache.accumulo.core.client.Connector;
@@ -48,6 +50,8 @@ import org.openrdf.repository.sail.SailRepositoryConnection;
 import junit.framework.TestCase;
 
 public class InferenceIT extends TestCase {
+    private final static String LUBM = "http://swat.cse.lehigh.edu/onto/univ-bench.owl#";
+
     private Connector connector;
     private AccumuloRyaDAO dao;
     private final ValueFactory vf = new ValueFactoryImpl();
@@ -148,7 +152,7 @@ public class InferenceIT extends TestCase {
 
     @Test
     public void testDomainRangeQuery() throws Exception {
-        final String ontology = "PREFIX lubm: <http://swat.lehigh.edu/onto/univ-bench.owl#>\n"
+        final String ontology = "PREFIX lubm: <" + LUBM + ">\n"
                 + "INSERT DATA {\n"
                 + "  lubm:advisor rdfs:domain lubm:Person ;\n"
                 + "               rdfs:range lubm:Professor ;"
@@ -162,7 +166,7 @@ public class InferenceIT extends TestCase {
                 + "  lubm:Faculty rdfs:subClassOf lubm:Person .\n"
                 + "  lubm:Student rdfs:subClassOf lubm:Person .\n"
                 + "}";
-        final String instances = "PREFIX lubm: <http://swat.lehigh.edu/onto/univ-bench.owl#>\n"
+        final String instances = "PREFIX lubm: <" + LUBM + ">\n"
                 + "INSERT DATA {\n"
                 + "  <urn:Professor1> a lubm:Professor .\n"
                 + "  <urn:Student1> a lubm:Student .\n"
@@ -172,7 +176,7 @@ public class InferenceIT extends TestCase {
                 + "  <urn:Professor4> lubm:teachesCourse <urn:CS100> .\n"
                 + "  <urn:Student1> lubm:takesCourse <urn:CS100> .\n"
                 + "}";
-        final String query = "SELECT ?x { ?x a <http://swat.lehigh.edu/onto/univ-bench.owl#Faculty> }";
+        final String query = "SELECT ?x { ?x a <" + LUBM + "Faculty> }";
         conn.prepareUpdate(QueryLanguage.SPARQL, ontology).execute();
         inferenceEngine.refreshGraph();
         conn.prepareUpdate(QueryLanguage.SPARQL, instances).execute();
@@ -191,6 +195,64 @@ public class InferenceIT extends TestCase {
     }
 
     @Test
+    public void testSomeValuesFromQuery() throws Exception {
+        final String ontology = "PREFIX lubm: <" + LUBM + ">\n"
+                + "INSERT DATA { GRAPH <http://updated/test> {\n"
+                + "  lubm:Chair rdfs:subClassOf lubm:Professor; \n"
+                + "    a owl:Restriction ;\n"
+                + "    owl:onProperty lubm:headOf ;\n"
+                + "    owl:someValuesFrom lubm:Department .\n"
+                + "  lubm:Dean rdfs:subClassOf lubm:Professor; \n"
+                + "    a owl:Restriction ;\n"
+                + "    owl:onProperty lubm:headOf ;\n"
+                + "    owl:someValuesFrom lubm:College .\n"
+                + "  lubm:Student rdfs:subClassOf lubm:Person ;\n"
+                + "    a owl:Restriction ;\n"
+                + "    owl:onProperty lubm:takesCourse ;\n"
+                + "    owl:someValuesFrom lubm:Course .\n"
+                + "  lubm:GraduateStudent rdfs:subClassOf lubm:Student; \n"
+                + "    a owl:Restriction ;\n"
+                + "    owl:onProperty lubm:takesCourse ;\n"
+                + "    owl:someValuesFrom lubm:GraduateCourse .\n"
+                + "  lubm:Professor rdfs:subClassOf lubm:Person .\n"
+                + "  lubm:headOf rdfs:subPropertyOf lubm:worksFor .\n"
+                + "  <urn:passesCourse> rdfs:subPropertyOf lubm:takesCourse ."
+                + "}}";
+        final String instances = "PREFIX lubm: <" + LUBM + ">\n"
+                + "INSERT DATA { GRAPH <http://updated/test> {\n"
+                + "  <urn:CS101> a <urn:UndergraduateCourse> .\n"
+                + "  <urn:CS301> a lubm:Course .\n"
+                + "  <urn:CS501> a lubm:GraduateCourse .\n"
+                // valid ways of inferring Student (including via GraduateStudent):
+                + "  <urn:Alice> lubm:takesCourse <urn:CS301>, <urn:CS501> .\n"
+                + "  <urn:Bob> <urn:passesCourse> [ a lubm:GraduateCourse ] .\n"
+                + "  <urn:Carol> a lubm:GraduateStudent; lubm:takesCourse <urn:CS301> .\n"
+                // similar patterns that don't match the appropriate restrictions:
+                + "  <urn:Dan> lubm:takesCourse <urn:CS101> .\n"
+                + "  <urn:Eve> lubm:headOf [ a lubm:Department ] .\n"
+                + "  <urn:Frank> lubm:headOf [ a lubm:College ] .\n"
+                + "}}";
+        final String query = "SELECT ?individual { GRAPH <http://updated/test> {\n"
+                + "  ?individual a <" + LUBM + "Student>\n"
+                + "}} \n";
+        // Query should match student and graduate student restrictions, but not the others
+        conn.prepareUpdate(QueryLanguage.SPARQL, ontology).execute();
+        inferenceEngine.refreshGraph();
+        conn.prepareUpdate(QueryLanguage.SPARQL, instances).execute();
+        conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate(resultHandler);
+        Map<Value, Integer> expected = new HashMap<>();
+        expected.put(vf.createURI("urn:Alice"), 2); // from both courses
+        expected.put(vf.createURI("urn:Bob"), 1); // from course
+        expected.put(vf.createURI("urn:Carol"), 2); // from course and explicit type
+        Map<Value, Integer> returned = new HashMap<>();
+        for (BindingSet bs : solutions) {
+            Value v = bs.getBinding("individual").getValue();
+            returned.put(v, returned.getOrDefault(v, 0) + 1);
+        }
+        Assert.assertEquals(expected, returned);
+    }
+
+    @Test
     public void testAllValuesFromQuery() throws Exception {
         final String ontology = "INSERT DATA { GRAPH <http://updated/test> {\n"
                 + "  <urn:Cairn_Terrier> rdfs:subClassOf <urn:Terrier> .\n"

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/fc8d30ac/sail/src/test/java/org/apache/rya/rdftriplestore/inference/SomeValuesFromVisitorTest.java
----------------------------------------------------------------------
diff --git a/sail/src/test/java/org/apache/rya/rdftriplestore/inference/SomeValuesFromVisitorTest.java b/sail/src/test/java/org/apache/rya/rdftriplestore/inference/SomeValuesFromVisitorTest.java
new file mode 100644
index 0000000..013e535
--- /dev/null
+++ b/sail/src/test/java/org/apache/rya/rdftriplestore/inference/SomeValuesFromVisitorTest.java
@@ -0,0 +1,152 @@
+package org.apache.rya.rdftriplestore.inference;
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.rya.accumulo.AccumuloRdfConfiguration;
+import org.apache.rya.api.utils.NullableStatementImpl;
+import org.apache.rya.rdftriplestore.utils.FixedStatementPattern;
+import org.junit.Assert;
+import org.junit.Test;
+import org.openrdf.model.Resource;
+import org.openrdf.model.URI;
+import org.openrdf.model.ValueFactory;
+import org.openrdf.model.impl.ValueFactoryImpl;
+import org.openrdf.model.vocabulary.OWL;
+import org.openrdf.model.vocabulary.RDF;
+import org.openrdf.query.algebra.Join;
+import org.openrdf.query.algebra.Projection;
+import org.openrdf.query.algebra.ProjectionElem;
+import org.openrdf.query.algebra.ProjectionElemList;
+import org.openrdf.query.algebra.StatementPattern;
+import org.openrdf.query.algebra.TupleExpr;
+import org.openrdf.query.algebra.Union;
+import org.openrdf.query.algebra.Var;
+
+import com.google.common.collect.Sets;
+
+public class SomeValuesFromVisitorTest {
+    private static final AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration();
+    private static final ValueFactory vf = new ValueFactoryImpl();
+
+    // Value types
+    private final URI course = vf.createURI("lubm:Course");
+    private final URI gradCourse = vf.createURI("lubm:GraduateCourse");
+    private final URI department = vf.createURI("lubm:Department");
+    private final URI organization = vf.createURI("lubm:Organization");
+    // Predicates
+    private final URI takesCourse = vf.createURI("lubm:takesCourse");
+    private final URI headOf = vf.createURI("lubm:headOf");
+    private final URI worksFor = vf.createURI("lubm:worksFor");
+    // Supertype of restriction types
+    private final URI person = vf.createURI("lubm:Person");
+
+    @Test
+    public void testSomeValuesFrom() throws Exception {
+        // Configure a mock instance engine with an ontology:
+        final InferenceEngine inferenceEngine = mock(InferenceEngine.class);
+        Map<Resource, Set<URI>> personSVF = new HashMap<>();
+        personSVF.put(gradCourse, Sets.newHashSet(takesCourse));
+        personSVF.put(course, Sets.newHashSet(takesCourse));
+        personSVF.put(department, Sets.newHashSet(headOf));
+        personSVF.put(organization, Sets.newHashSet(worksFor, headOf));
+        when(inferenceEngine.getSomeValuesFromByRestrictionType(person)).thenReturn(personSVF);
+        // Query for a specific type and rewrite using the visitor:
+        StatementPattern originalSP = new StatementPattern(new Var("s"), new Var("p", RDF.TYPE), new Var("o", person));
+        final Projection query = new Projection(originalSP, new ProjectionElemList(new ProjectionElem("s", "subject")));
+        query.visit(new SomeValuesFromVisitor(conf, inferenceEngine));
+        // Expected structure: a union of two elements: one is equal to the original statement
+        // pattern, and the other one joins a list of predicate/value type combinations
+        // with another join querying for any nodes who are the subject of a triple with that
+        // predicate and with an object of that type.
+        //
+        // Union(
+        //     SP(?node a :impliedType),
+        //     Join(
+        //         FSP(<?property someValuesFrom ?valueType> {
+        //             takesCourse/Course;
+        //             takesCourse/GraduateCourse;
+        //             headOf/Department;
+        //             headOf/Organization;
+        //             worksFor/Organization;
+        //         }),
+        //         Join(
+        //             SP(_:object a ?valueType),
+        //             SP(?node ?property _:object)
+        //         )
+        //     )
+        Assert.assertTrue(query.getArg() instanceof Union);
+        TupleExpr left = ((Union) query.getArg()).getLeftArg();
+        TupleExpr right = ((Union) query.getArg()).getRightArg();
+        Assert.assertEquals(originalSP, left);
+        Assert.assertTrue(right instanceof Join);
+        final Join join = (Join) right;
+        Assert.assertTrue(join.getLeftArg() instanceof FixedStatementPattern);
+        Assert.assertTrue(join.getRightArg() instanceof Join);
+        FixedStatementPattern fsp = (FixedStatementPattern) join.getLeftArg();
+        left = ((Join) join.getRightArg()).getLeftArg();
+        right = ((Join) join.getRightArg()).getRightArg();
+        Assert.assertTrue(left instanceof StatementPattern);
+        Assert.assertTrue(right instanceof StatementPattern);
+        // Verify expected predicate/type pairs
+        Assert.assertTrue(fsp.statements.contains(new NullableStatementImpl(takesCourse, OWL.SOMEVALUESFROM, course)));
+        Assert.assertTrue(fsp.statements.contains(new NullableStatementImpl(takesCourse, OWL.SOMEVALUESFROM, gradCourse)));
+        Assert.assertTrue(fsp.statements.contains(new NullableStatementImpl(headOf, OWL.SOMEVALUESFROM, department)));
+        Assert.assertTrue(fsp.statements.contains(new NullableStatementImpl(headOf, OWL.SOMEVALUESFROM, organization)));
+        Assert.assertTrue(fsp.statements.contains(new NullableStatementImpl(worksFor, OWL.SOMEVALUESFROM, organization)));
+        Assert.assertEquals(5, fsp.statements.size());
+        // Verify pattern for matching instances of each pair: a Join of <_:x rdf:type ?t> and
+        // <?s ?p _:x> where p and t are the predicate/type pair and s is the original subject
+        // variable.
+        StatementPattern leftSP = (StatementPattern) left;
+        StatementPattern rightSP = (StatementPattern) right;
+        Assert.assertEquals(rightSP.getObjectVar(), leftSP.getSubjectVar());
+        Assert.assertEquals(RDF.TYPE, leftSP.getPredicateVar().getValue());
+        Assert.assertEquals(fsp.getObjectVar(), leftSP.getObjectVar());
+        Assert.assertEquals(originalSP.getSubjectVar(), rightSP.getSubjectVar());
+        Assert.assertEquals(fsp.getSubjectVar(), rightSP.getPredicateVar());
+    }
+
+    @Test
+    public void testSomeValuesFromDisabled() throws Exception {
+        // Disable someValuesOf inference
+        final AccumuloRdfConfiguration disabledConf = conf.clone();
+        disabledConf.setInferSomeValuesFrom(false);
+        // Configure a mock instance engine with an ontology:
+        final InferenceEngine inferenceEngine = mock(InferenceEngine.class);
+        Map<Resource, Set<URI>> personSVF = new HashMap<>();
+        personSVF.put(gradCourse, Sets.newHashSet(takesCourse));
+        personSVF.put(course, Sets.newHashSet(takesCourse));
+        personSVF.put(department, Sets.newHashSet(headOf));
+        personSVF.put(organization, Sets.newHashSet(worksFor, headOf));
+        when(inferenceEngine.getSomeValuesFromByRestrictionType(person)).thenReturn(personSVF);
+        // Query for a specific type visit -- should not change
+        StatementPattern originalSP = new StatementPattern(new Var("s"), new Var("p", RDF.TYPE), new Var("o", person));
+        final Projection originalQuery = new Projection(originalSP, new ProjectionElemList(new ProjectionElem("s", "subject")));
+        final Projection modifiedQuery = originalQuery.clone();
+        modifiedQuery.visit(new SomeValuesFromVisitor(disabledConf, inferenceEngine));
+        Assert.assertEquals(originalQuery, modifiedQuery);
+    }
+}