You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@stanbol.apache.org by al...@apache.org on 2012/03/16 20:35:07 UTC

svn commit: r1301713 - /incubator/stanbol/trunk/enhancer/engines/refactor/src/main/java/org/apache/stanbol/enhancer/engines/refactor/RefactorEnhancementEngine.java

Author: alexdma
Date: Fri Mar 16 19:35:07 2012
New Revision: 1301713

URL: http://svn.apache.org/viewvc?rev=1301713&view=rev
Log:
Another improvement for STANBOL-468 (saves another 30 lines of code) : now the enhancement graph is also added to the session, which is then exported for refactoring with the merge flag set. Earlier, all session contents were manually merged with each other and the enhancement graph. (TODO merge entity signatures *before* adding them to the session: this will be the major disk space saver)

Modified:
    incubator/stanbol/trunk/enhancer/engines/refactor/src/main/java/org/apache/stanbol/enhancer/engines/refactor/RefactorEnhancementEngine.java

Modified: incubator/stanbol/trunk/enhancer/engines/refactor/src/main/java/org/apache/stanbol/enhancer/engines/refactor/RefactorEnhancementEngine.java
URL: http://svn.apache.org/viewvc/incubator/stanbol/trunk/enhancer/engines/refactor/src/main/java/org/apache/stanbol/enhancer/engines/refactor/RefactorEnhancementEngine.java?rev=1301713&r1=1301712&r2=1301713&view=diff
==============================================================================
--- incubator/stanbol/trunk/enhancer/engines/refactor/src/main/java/org/apache/stanbol/enhancer/engines/refactor/RefactorEnhancementEngine.java (original)
+++ incubator/stanbol/trunk/enhancer/engines/refactor/src/main/java/org/apache/stanbol/enhancer/engines/refactor/RefactorEnhancementEngine.java Fri Mar 16 19:35:07 2012
@@ -26,11 +26,9 @@ import java.util.Collections;
 import java.util.Dictionary;
 import java.util.Enumeration;
 import java.util.HashMap;
-import java.util.HashSet;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
-import java.util.Set;
 
 import org.apache.clerezza.rdf.core.MGraph;
 import org.apache.clerezza.rdf.core.Resource;
@@ -91,8 +89,6 @@ import org.semanticweb.owlapi.model.IRI;
 import org.semanticweb.owlapi.model.OWLOntology;
 import org.semanticweb.owlapi.model.OWLOntologyCreationException;
 import org.semanticweb.owlapi.model.OWLOntologyManager;
-import org.semanticweb.owlapi.model.OWLOntologySetProvider;
-import org.semanticweb.owlapi.util.OWLOntologyMerger;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -279,8 +275,8 @@ public class RefactorEnhancementEngine e
                           + engineConfiguration.getScope(), this);
 
             } catch (UnmodifiableOntologyCollectorException e) {
-                log.error("Cannot populate locked session '{}'. Aborting.", session.getID());
-                break;
+                throw new EngineException("Cannot populate locked session '" + session.getID()
+                                          + "'. Aborting.", e);
             } catch (OWLOntologyCreationException e) {
                 log.error("Failed to obtain ontology for entity " + entityReference + ". Skipping.", e);
                 continue;
@@ -291,101 +287,80 @@ public class RefactorEnhancementEngine e
 
         }
 
-        // Now merge the RDF from the TBox - the ontologies - and the ABox - the RDF data fetched
-        final OWLOntologyManager omgr = OWLManager.createOWLOntologyManager();
-
-        OWLOntologySetProvider provider = new OWLOntologySetProvider() {
-
-            @Override
-            public Set<OWLOntology> getOntologies() {
-                Set<OWLOntology> ontologies = new HashSet<OWLOntology>();
-                ontologies.addAll(session.getManagedOntologies(OWLOntology.class, true));
-                /*
-                 * We add to the set the graph containing the metadata generated by previous enhancement
-                 * engines. It is important becaus we want to menage during the refactoring also some
-                 * information fron that graph. As the graph is provided as a Clerezza MGraph, we first need
-                 * to convert it to an OWLAPI OWLOntology. There is no chance that the mGraph could be null as
-                 * it was previously controlled by the JobManager through the canEnhance method and the
-                 * computeEnhancement is always called iff the former returns true.
-                 */
-                OWLOntology fiseMetadataOntology = OWLAPIToClerezzaConverter
-                        .clerezzaGraphToOWLOntology(mGraph);
-                ontologies.add(fiseMetadataOntology);
-                return ontologies;
-            }
-        };
+        try {
+            /*
+             * We add to the set the graph containing the metadata generated by previous enhancement engines.
+             * It is important becaus we want to menage during the refactoring also some information fron that
+             * graph. As the graph is provided as a Clerezza MGraph, we first need to convert it to an OWLAPI
+             * OWLOntology.
+             * 
+             * There is no chance that the mGraph could be null as it was previously controlled by the
+             * JobManager through the canEnhance method and the computeEnhancement is always called iff the
+             * former returns true.
+             */
+            session.addOntology(new RootOntologySource(OWLAPIToClerezzaConverter
+                    .clerezzaGraphToOWLOntology(mGraph)));
+        } catch (UnmodifiableOntologyCollectorException e1) {
+            throw new EngineException("Cannot add enhancement graph to OntoNet session for refactoring", e1);
+        }
 
         /*
-         * We merge all the ontologies from the session into a single ontology that will be used for the
-         * refactoring.
+         * Export the entire session (incl. entities and enhancement graph) as a single merged ontology.
          * 
-         * TODO the refactorer should have methods to accommodate OntologyCollector instead.
+         * TODO the refactorer should have methods to accommodate an OntologyCollector directly instead.
          */
-        OWLOntologyMerger merger = new OWLOntologyMerger(provider);
-        OWLOntology ontology;
-        try {
-            ontology = merger.createMergedOntology(omgr,
-                IRI.create("http://fise.iks-project.eu/dulcifier/integrity-check"));
+        OWLOntology ontology = session.export(OWLOntology.class, true);
+        log.debug("Refactoring recipe IRI is : " + engineConfiguration.getRecipeId());
 
-            log.debug("Refactoring recipe IRI is : " + engineConfiguration.getRecipeId());
-
-            /*
-             * We pass the ontology and the recipe IRI to the Refactor that returns the refactored graph
-             * expressed by using the given vocabulary.
-             */
-            try {
-                /*
-                 * To perform the refactoring of the ontology to a given vocabulary we use the Stanbol
-                 * Refactor.
-                 */
-                Recipe recipe = ruleStore.getRecipe(IRI.create(engineConfiguration.getRecipeId()));
-
-                log.debug("Recipe {} contains {} rules.", recipe, recipe.getkReSRuleList().size());
-                log.debug("The ontology to be refactor is {}", ontology);
+        /*
+         * We pass the ontology and the recipe IRI to the Refactor that returns the refactored graph expressed
+         * by using the given vocabulary.
+         */
+        try {
+            // To perform the refactoring of the ontology to a given vocabulary we use the Stanbol Refactor.
+            Recipe recipe = ruleStore.getRecipe(IRI.create(engineConfiguration.getRecipeId()));
 
-                ontology = refactorer.ontologyRefactoring(ontology,
-                    IRI.create(engineConfiguration.getRecipeId()));
+            log.debug("Recipe {} contains {} rules.", recipe, recipe.getkReSRuleList().size());
+            log.debug("The ontology to be refactor is {}", ontology);
 
-            } catch (RefactoringException e) {
-                log.error("The refactoring engine failed the execution.", e);
-            } catch (NoSuchRecipeException e) {
-                log.error("The recipe with ID " + engineConfiguration.getRecipeId() + " does not exists", e);
-            }
+            ontology = refactorer
+                    .ontologyRefactoring(ontology, IRI.create(engineConfiguration.getRecipeId()));
 
-            log.debug("Merged ontologies in " + ontology);
+        } catch (RefactoringException e) {
+            log.error("The refactoring engine failed the execution.", e);
+        } catch (NoSuchRecipeException e) {
+            log.error("The recipe with ID " + engineConfiguration.getRecipeId() + " does not exists", e);
+        }
 
-            /*
-             * The new generated ontology is converted to Clarezza format and than added os substitued to the
-             * old mGraph.
-             */
-            if (engineConfiguration.isInGraphAppendMode()) {
-                log.debug("Metadata of the content will replace old ones.", this);
-            } else {
-                mGraph.clear();
-                log.debug("Content metadata will be appended to the existing ones.", this);
-            }
-            mGraph.addAll(OWLAPIToClerezzaConverter.owlOntologyToClerezzaTriples(ontology));
+        /*
+         * The newly generated ontology is converted to Clarezza format and then added os substitued to the
+         * old mGraph.
+         */
+        if (engineConfiguration.isInGraphAppendMode()) {
+            log.debug("Metadata of the content will replace old ones.", this);
+        } else {
+            mGraph.clear();
+            log.debug("Content metadata will be appended to the existing ones.", this);
+        }
+        mGraph.addAll(OWLAPIToClerezzaConverter.owlOntologyToClerezzaTriples(ontology));
 
-            /*
-             * The session needs to be destroyed, as it is no more useful.
-             * 
-             * clear contents before destroying (FIXME only do this until this is implemented in the
-             * destroySession() method).
-             */
-            for (IRI iri : session.listManagedOntologies()) {
-                try {
-                    String key = ontologyProvider.getKey(iri);
-                    ontologyProvider.getStore().deleteTripleCollection(new UriRef(key));
-                } catch (Exception ex) {
-                    log.error("Failed to delete triple collection " + iri, ex);
-                    continue;
-                }
+        /*
+         * The session needs to be destroyed, as it is no more useful.
+         * 
+         * clear contents before destroying (FIXME only do this until this is implemented in the
+         * destroySession() method).
+         */
+        for (IRI iri : session.listManagedOntologies()) {
+            try {
+                String key = ontologyProvider.getKey(iri);
+                ontologyProvider.getStore().deleteTripleCollection(new UriRef(key));
+            } catch (Exception ex) {
+                log.error("Failed to delete triple collection " + iri, ex);
+                continue;
             }
-            sessionManager.destroySession(session.getID());
-
-        } catch (OWLOntologyCreationException e) {
-            throw new EngineException("Cannot create the ontology for the refactoring", e);
         }
+        sessionManager.destroySession(session.getID());
+
     }
 
     @SuppressWarnings("unchecked")