You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@stanbol.apache.org by re...@apache.org on 2016/05/17 22:20:55 UTC

svn commit: r1744328 [10/24] - in /stanbol/trunk: ./ commons/indexedgraph/src/main/java/org/apache/stanbol/commons/indexedgraph/ commons/indexedgraph/src/test/java/org/apache/stanbol/commons/indexedgraph/ commons/installer/bundleprovider/src/main/java/...

Modified: stanbol/trunk/enhancement-engines/textannotationnewmodel/src/test/java/org/apache/stanbol/enhancer/engines/textannotationnewmodel/impl/TextAnnotationNewModelEngineTest.java
URL: http://svn.apache.org/viewvc/stanbol/trunk/enhancement-engines/textannotationnewmodel/src/test/java/org/apache/stanbol/enhancer/engines/textannotationnewmodel/impl/TextAnnotationNewModelEngineTest.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- stanbol/trunk/enhancement-engines/textannotationnewmodel/src/test/java/org/apache/stanbol/enhancer/engines/textannotationnewmodel/impl/TextAnnotationNewModelEngineTest.java (original)
+++ stanbol/trunk/enhancement-engines/textannotationnewmodel/src/test/java/org/apache/stanbol/enhancer/engines/textannotationnewmodel/impl/TextAnnotationNewModelEngineTest.java Tue May 17 22:20:49 2016
@@ -29,15 +29,13 @@ import java.util.Hashtable;
 import java.util.Iterator;
 import java.util.Map;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.PlainLiteral;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.TypedLiteral;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.serializedform.SupportedFormat;
 import org.apache.clerezza.rdf.jena.parser.JenaParserProvider;
-import org.apache.stanbol.commons.indexedgraph.IndexedMGraph;
+import org.apache.stanbol.commons.indexedgraph.IndexedGraph;
 import org.apache.stanbol.enhancer.contentitem.inmemory.InMemoryContentItemFactory;
 import org.apache.stanbol.enhancer.servicesapi.ContentItem;
 import org.apache.stanbol.enhancer.servicesapi.ContentItemFactory;
@@ -57,7 +55,7 @@ import org.junit.Test;
 import org.osgi.service.cm.ConfigurationException;
 import org.osgi.service.component.ComponentContext;
 
-import org.apache.clerezza.rdf.core.Resource;
+import org.apache.clerezza.commons.rdf.RDFTerm;
 
 public class TextAnnotationNewModelEngineTest {
     
@@ -66,8 +64,8 @@ public class TextAnnotationNewModelEngin
     private static final String TEST_ENHANCEMENTS = "enhancement-results.rdf";
     
     private static final JenaParserProvider rdfParser = new JenaParserProvider();
-    private static MGraph origEnhancements;
-    private static UriRef ciUri;
+    private static Graph origEnhancements;
+    private static IRI ciUri;
     
     private ContentItem contentItem;
     
@@ -80,15 +78,15 @@ public class TextAnnotationNewModelEngin
     public static void init() throws IOException, ConfigurationException {
         InputStream in = TextAnnotationNewModelEngineTest.class.getClassLoader().getResourceAsStream(TEST_ENHANCEMENTS);
         Assert.assertNotNull("Unable to load reaource '"+TEST_ENHANCEMENTS+"' via Classpath",in);
-        origEnhancements = new IndexedMGraph();
+        origEnhancements = new IndexedGraph();
         rdfParser.parse(origEnhancements, in, SupportedFormat.RDF_XML, null);
         Assert.assertFalse(origEnhancements.isEmpty());
         //parse the ID of the ContentItem form the enhancements
         Iterator<Triple> it = origEnhancements.filter(null, Properties.ENHANCER_EXTRACTED_FROM, null);
         Assert.assertTrue(it.hasNext());
-        Resource id = it.next().getObject();
-        Assert.assertTrue(id instanceof UriRef);
-        ciUri = (UriRef)id;
+        RDFTerm id = it.next().getObject();
+        Assert.assertTrue(id instanceof IRI);
+        ciUri = (IRI)id;
         //validate that the enhancements in the file are valid
         //NOTE: the input data are no longer fully valid to test some features of this engine
         //      because of that this initial test is deactivated
@@ -108,7 +106,7 @@ public class TextAnnotationNewModelEngin
     @Before
     public void initTest() throws IOException {
         contentItem = ciFactory.createContentItem(ciUri, 
-            new StringSource(SINGLE_SENTENCE), new IndexedMGraph(origEnhancements));
+            new StringSource(SINGLE_SENTENCE), new IndexedGraph(origEnhancements));
     }
     
     @Test
@@ -116,15 +114,15 @@ public class TextAnnotationNewModelEngin
         Assert.assertEquals(EnhancementEngine.ENHANCE_ASYNC, engine.canEnhance(contentItem));
         engine.computeEnhancements(contentItem);
         //validate
-        MGraph g = contentItem.getMetadata();
+        Graph g = contentItem.getMetadata();
         Iterator<Triple> it = g.filter(null, RDF_TYPE, ENHANCER_TEXTANNOTATION);
         Assert.assertTrue(it.hasNext());
         while(it.hasNext()){
-            NonLiteral ta = it.next().getSubject();
-            Assert.assertTrue(ta instanceof UriRef);
-            Map<UriRef,Resource> expected = new HashMap<UriRef,Resource>();
+            BlankNodeOrIRI ta = it.next().getSubject();
+            Assert.assertTrue(ta instanceof IRI);
+            Map<IRI,RDFTerm> expected = new HashMap<IRI,RDFTerm>();
             expected.put(Properties.ENHANCER_EXTRACTED_FROM, contentItem.getUri());
-            EnhancementStructureHelper.validateTextAnnotation(g, (UriRef)ta, SINGLE_SENTENCE, expected,true);
+            EnhancementStructureHelper.validateTextAnnotation(g, (IRI)ta, SINGLE_SENTENCE, expected,true);
         }
         
     }

Modified: stanbol/trunk/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/TikaEngine.java
URL: http://svn.apache.org/viewvc/stanbol/trunk/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/TikaEngine.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- stanbol/trunk/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/TikaEngine.java (original)
+++ stanbol/trunk/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/TikaEngine.java Tue May 17 22:20:49 2016
@@ -43,10 +43,10 @@ import java.util.HashSet;
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.commons.io.IOUtils;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.Properties;
@@ -298,10 +298,10 @@ public class TikaEngine
                 }
             }
             String random = randomUUID().toString();
-            UriRef textBlobUri = new UriRef("urn:tika:text:"+random);
+            IRI textBlobUri = new IRI("urn:tika:text:"+random);
             ci.addPart(textBlobUri, plainTextSink.getBlob());
             if(xhtmlHandler != null){
-                UriRef xhtmlBlobUri = new UriRef("urn:tika:xhtml:"+random);
+                IRI xhtmlBlobUri = new IRI("urn:tika:xhtml:"+random);
                 ci.addPart(xhtmlBlobUri,  xhtmlSink.getBlob());
             }
             //add the extracted metadata
@@ -312,15 +312,15 @@ public class TikaEngine
             }
             ci.getLock().writeLock().lock();
             try {
-                MGraph graph = ci.getMetadata();
-                UriRef id = ci.getUri();
+                Graph graph = ci.getMetadata();
+                IRI id = ci.getUri();
                 Set<String> mapped = ontologyMappings.apply(graph, id, metadata);
                 if(includeUnmappedProperties){
                     Set<String> unmapped = new HashSet<String>(Arrays.asList(metadata.names()));
                     unmapped.removeAll(mapped);
                     for(String name : unmapped){
                         if(name.indexOf(':') >=0 || includeAllUnmappedProperties){ //only mapped
-                            UriRef prop = new UriRef(new StringBuilder(TIKA_URN_PREFIX).append(name).toString());
+                            IRI prop = new IRI(new StringBuilder(TIKA_URN_PREFIX).append(name).toString());
                             for(String value : metadata.getValues(name)){
                                 //TODO: without the Property for the name we have no datatype
                                 //      information ... so we add PlainLiterals for now

Modified: stanbol/trunk/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/metadata/ConstantMapping.java
URL: http://svn.apache.org/viewvc/stanbol/trunk/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/metadata/ConstantMapping.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- stanbol/trunk/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/metadata/ConstantMapping.java (original)
+++ stanbol/trunk/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/metadata/ConstantMapping.java Tue May 17 22:20:49 2016
@@ -21,19 +21,19 @@ import java.util.Collection;
 import java.util.Collections;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.tika.metadata.Metadata;
 
 public class ConstantMapping extends Mapping{
 
     
-    private Collection<Resource> values;
+    private Collection<RDFTerm> values;
 
-    public ConstantMapping(UriRef ontProperty, Resource...values) {
+    public ConstantMapping(IRI ontProperty, RDFTerm...values) {
         super(ontProperty, null);
         if(values == null || values.length < 1){
             throw new IllegalArgumentException("The parsed values MUST NOT be NULL nor an empty array");
@@ -46,8 +46,8 @@ public class ConstantMapping extends Map
     }
 
     @Override
-    public boolean apply(MGraph graph, NonLiteral subject, Metadata metadata) {
-        for(Resource value : values){
+    public boolean apply(Graph graph, BlankNodeOrIRI subject, Metadata metadata) {
+        for(RDFTerm value : values){
             graph.add(new TripleImpl(subject, ontProperty, value));
             mappingLogger.log(subject, ontProperty, null, value);
         }

Modified: stanbol/trunk/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/metadata/Mapping.java
URL: http://svn.apache.org/viewvc/stanbol/trunk/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/metadata/Mapping.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- stanbol/trunk/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/metadata/Mapping.java (original)
+++ stanbol/trunk/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/metadata/Mapping.java Tue May 17 22:20:49 2016
@@ -35,19 +35,17 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.BNode;
+import org.apache.clerezza.commons.rdf.BlankNode;
 import org.apache.clerezza.rdf.core.InvalidLiteralTypeException;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
+import org.apache.clerezza.commons.rdf.Graph;
 import org.apache.clerezza.rdf.core.NoConvertorException;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.PlainLiteral;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.TypedLiteral;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
-import org.apache.clerezza.rdf.core.impl.TypedLiteralImpl;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.Literal;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TypedLiteralImpl;
 import org.apache.clerezza.rdf.ontologies.RDFS;
 import org.apache.clerezza.rdf.ontologies.XSD;
 import org.apache.tika.metadata.DublinCore;
@@ -58,7 +56,7 @@ import org.slf4j.LoggerFactory;
 
 /**
  * Used as value for Apache Tika {@link Metadata} mappings. Holds the
- * ontology property as {@link UriRef} and optionally a Tika {@link Property}.
+ * ontology property as {@link IRI} and optionally a Tika {@link Property}.
  * Later can be used to parse the correct datatype for values contained in the
  * {@link Metadata}
  * 
@@ -74,21 +72,21 @@ public abstract class Mapping {
      * List with allowed DataTypes.<ul>
      * <li> <code>null</code> is used for {@link PlainLiteral}s
      * <li> {@link XSD} datatyoes are used for {@link TypedLiteral}s
-     * <li> {@link RDFS#Resource} is used for {@link NonLiteral} values. Note
-     * that only {@link UriRef} is supported, because for Tika {@link BNode}s
+     * <li> {@link RDFS#RDFTerm} is used for {@link BlankNodeOrIRI} values. Note
+     * that only {@link IRI} is supported, because for Tika {@link BlankNode}s
      * do not make sense.
      * </ul>
      */
-    public static final Set<UriRef> ONT_TYPES;
+    public static final Set<IRI> ONT_TYPES;
     /**
      * Map with the same keys as contained in {@link #ONT_TYPES}. The values
      * are the java types.
      */
-    protected static final Map<UriRef,Class<?>> ONT_TYPE_MAP;
+    protected static final Map<IRI,Class<?>> ONT_TYPE_MAP;
     
     static {
         //use a linked HasSetMap to have the nice ordering (mainly for logging)
-        Map<UriRef,Class<?>> map = new LinkedHashMap<UriRef,Class<?>>();
+        Map<IRI,Class<?>> map = new LinkedHashMap<IRI,Class<?>>();
         //Plain Literal values
         map.put(null,null);
         //Typed Literal values
@@ -107,7 +105,7 @@ public abstract class Mapping {
         map.put(XSD.short_,Short.class);
         map.put(XSD.string,String.class);
         map.put(XSD.time,Date.class);
-        //Data Types for NonLiteral values
+        //Data Types for BlankNodeOrIRI values
         map.put(RDFS.Resource,URI.class);
         ONT_TYPE_MAP = Collections.unmodifiableMap(map);
         ONT_TYPES = ONT_TYPE_MAP.keySet();
@@ -119,14 +117,14 @@ public abstract class Mapping {
         //XSD.token,XSD.unsignedByte,XSD.unsignedInt,XSD.unsignedLong,XSD.unsignedShort,
     }
     
-    protected final UriRef ontProperty;
+    protected final IRI ontProperty;
     
     protected final Converter converter;
     /**
      * Getter for the OntologyProperty for this mapping
      * @return the ontProperty
      */
-    public final UriRef getOntologyProperty() {
+    public final IRI getOntologyProperty() {
         return ontProperty;
     }
     /**
@@ -141,12 +139,12 @@ public abstract class Mapping {
      */
     public abstract Set<String> getMappedTikaProperties();
     
-    protected final UriRef ontType;
+    protected final IRI ontType;
     
-    protected Mapping(UriRef ontProperty,UriRef ontType){
+    protected Mapping(IRI ontProperty,IRI ontType){
         this(ontProperty,ontType,null);
     }
-    protected Mapping(UriRef ontProperty,UriRef ontType,Converter converter){
+    protected Mapping(IRI ontProperty,IRI ontType,Converter converter){
         if(ontProperty == null){
             throw new IllegalArgumentException("The parsed ontology property MUST NOT be NULL!");
         }
@@ -161,34 +159,34 @@ public abstract class Mapping {
     
     /**
      * Applies this mapping based on the parsed {@link Metadata} and stores the 
-     * results to {@link MGraph}
-     * @param graph the Graph to store the mapping results
+     * results to {@link Graph}
+     * @param graph the ImmutableGraph to store the mapping results
      * @param subject the subject (context) to add the mappings
      * @param metadata the metadata used for applying the mapping
      * @return <code>true</code> if the mapping could be applied based on the
      * parsed data. Otherwise <code>false</code>. This is intended to be used
      * by components that need to check if required mappings could be applied.
      */
-    public abstract boolean apply(MGraph graph, NonLiteral subject, Metadata metadata);
+    public abstract boolean apply(Graph graph, BlankNodeOrIRI subject, Metadata metadata);
     /**
      * Converts the parsed value based on the mapping information to an RDF
-     * {@link Resource}. Optionally supports also validation if the parsed
+     * {@link RDFTerm}. Optionally supports also validation if the parsed
      * value is valid for the {@link Mapping#ontType ontology type} specified by
      * the parsed mapping.
      * @param value the value
      * @param mapping the mapping
      * @param validate 
-     * @return the {@link Resource} or <code>null</code> if the parsed value is
+     * @return the {@link RDFTerm} or <code>null</code> if the parsed value is
      * <code>null</code> or {@link String#isEmpty() empty}.
      * @throws IllegalArgumentException if the parsed {@link Mapping} is 
      * <code>null</code>
      */
-    protected Resource toResource(String value, boolean validate){
+    protected RDFTerm toResource(String value, boolean validate){
         Metadata dummy = null;//used for date validation
         if(value == null || value.isEmpty()){
             return null; //ignore null and empty values
         }
-        Resource object;
+        RDFTerm object;
         if(ontType == null){
             object = new PlainLiteralImpl(value);
         } else if(ontType == RDFS.Resource){
@@ -196,7 +194,7 @@ public abstract class Mapping {
                 if(validate){
                     new URI(value);
                 }
-                object = new UriRef(value);
+                object = new IRI(value);
             } catch (URISyntaxException e) {
                 log.warn("Unable to create Reference for value {} (not a valid URI)" +
                         " -> create a literal instead",value);
@@ -232,7 +230,7 @@ public abstract class Mapping {
             if(validate && clazz != null && 
                     !clazz.equals(Date.class)){ //we need not to validate dates
                 try {
-                    lf.createObject(clazz,(TypedLiteral)object);
+                    lf.createObject(clazz,(Literal)object);
                 } catch (NoConvertorException e) {
                     log.info("Unable to validate typed literals of type {} because" +
                             "there is no converter for Class {} registered with Clerezza",
@@ -261,8 +259,8 @@ public abstract class Mapping {
      */
     protected static final class MappingLogger{
         
-        private List<NonLiteral> subjects = new ArrayList<NonLiteral>();
-        private UriRef predicate;
+        private List<BlankNodeOrIRI> subjects = new ArrayList<BlankNodeOrIRI>();
+        private IRI predicate;
         private final int intendSize = 2;
         private final char[] intnedArray;
         private static final int MAX_INTEND = 5;
@@ -276,7 +274,7 @@ public abstract class Mapping {
                 Math.min(MAX_INTEND, intend)*intendSize);
         }
         
-        protected void log(NonLiteral subject,UriRef predicate, String prop, Resource object){
+        protected void log(BlankNodeOrIRI subject,IRI predicate, String prop, RDFTerm object){
             if(!log.isDebugEnabled()){
                 return;
             }
@@ -305,6 +303,6 @@ public abstract class Mapping {
     }
     
     public static interface Converter {
-        Resource convert(Resource value);
+        RDFTerm convert(RDFTerm value);
     }
 }

Modified: stanbol/trunk/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/metadata/OntologyMappings.java
URL: http://svn.apache.org/viewvc/stanbol/trunk/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/metadata/OntologyMappings.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- stanbol/trunk/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/metadata/OntologyMappings.java (original)
+++ stanbol/trunk/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/metadata/OntologyMappings.java Tue May 17 22:20:49 2016
@@ -29,10 +29,10 @@ import java.util.Set;
 import java.util.TreeSet;
 
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.TypedLiteral;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.Literal;
 import org.apache.clerezza.rdf.ontologies.OWL;
 import org.apache.clerezza.rdf.ontologies.RDFS;
 import org.apache.clerezza.rdf.ontologies.SKOS;
@@ -63,7 +63,7 @@ public class OntologyMappings implements
     
     private static OntologyMappings defaultMappings;
     
-    private final Map<UriRef,Collection<Mapping>> mappings = new HashMap<UriRef,Collection<Mapping>>();
+    private final Map<IRI,Collection<Mapping>> mappings = new HashMap<IRI,Collection<Mapping>>();
     /**
      * Used to protect the default mappings from modifications
      */
@@ -334,11 +334,11 @@ public class OntologyMappings implements
             new PropertyMapping(ma+"averageBitRate",XSD.double_,
                 new Mapping.Converter(){//we need to convert from MByte/min to kByte/sec
                     @Override
-                    public Resource convert(Resource value) {
-                        if(value instanceof TypedLiteral &&
-                                XSD.double_.equals(((TypedLiteral)value).getDataType())){
+                    public RDFTerm convert(RDFTerm value) {
+                        if(value instanceof Literal &&
+                                XSD.double_.equals(((Literal)value).getDataType())){
                             LiteralFactory lf = LiteralFactory.getInstance();
-                            double mm = lf.createObject(Double.class, (TypedLiteral)value);
+                            double mm = lf.createObject(Double.class, (Literal)value);
                             return lf.createTypedLiteral(Double.valueOf(
                                 mm*1024/60));
                         } else {
@@ -348,7 +348,7 @@ public class OntologyMappings implements
                 
             },XMPDM.FILE_DATA_RATE.getName()));
 
-        //GEO -> Media Resource Ontology
+        //GEO -> Media RDFTerm Ontology
         mappings.addMapping(new ResourceMapping(ma+"hasLocation", 
             new Mapping[]{ //required
                 new PropertyMapping(ma+"locationLatitude", XSD.double_,Geographic.LATITUDE.getName()),
@@ -466,7 +466,7 @@ public class OntologyMappings implements
         }
         propMappings.add(mapping);
     }
-    public void removePropertyMappings(UriRef property){
+    public void removePropertyMappings(IRI property){
         if(readonly){
             throw new IllegalStateException("This "+getClass().getSimpleName()+" instance is read only!");
         }
@@ -475,13 +475,13 @@ public class OntologyMappings implements
     
     /**
      * Applies the registered Ontology Mappings to the parsed metadata and
-     * context. Mappings are added to the parsed Graph
+     * context. Mappings are added to the parsed ImmutableGraph
      * @param graph
      * @param context
      * @param metadata
      * @return Set containing the names of mapped keys
      */
-    public Set<String> apply(MGraph graph, UriRef context, Metadata metadata){
+    public Set<String> apply(Graph graph, IRI context, Metadata metadata){
         Set<String> keys = new HashSet<String>(Arrays.asList(metadata.names()));
         Set<String> mappedKeys = new HashSet<String>();
         for(Mapping mapping : this){

Modified: stanbol/trunk/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/metadata/PropertyMapping.java
URL: http://svn.apache.org/viewvc/stanbol/trunk/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/metadata/PropertyMapping.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- stanbol/trunk/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/metadata/PropertyMapping.java (original)
+++ stanbol/trunk/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/metadata/PropertyMapping.java Tue May 17 22:20:49 2016
@@ -23,11 +23,11 @@ import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.tika.metadata.Metadata;
 
 public final class PropertyMapping extends Mapping {
@@ -37,24 +37,24 @@ public final class PropertyMapping exten
      */
     protected final Set<String> tikaProperties;
 
-    public PropertyMapping(String ontProperty, UriRef ontType,String...tikaProperties) {
-        this(ontProperty == null? null : new UriRef(ontProperty), ontType,tikaProperties);
+    public PropertyMapping(String ontProperty, IRI ontType,String...tikaProperties) {
+        this(ontProperty == null? null : new IRI(ontProperty), ontType,tikaProperties);
     }
-    public PropertyMapping(String ontProperty, UriRef ontType,Converter converter,String...tikaProperties) {
-        this(ontProperty == null? null : new UriRef(ontProperty), ontType,converter,tikaProperties);
+    public PropertyMapping(String ontProperty, IRI ontType,Converter converter,String...tikaProperties) {
+        this(ontProperty == null? null : new IRI(ontProperty), ontType,converter,tikaProperties);
     }
 
     public PropertyMapping(String ontProperty,String...tikaProperties) {
-        this(ontProperty == null? null : new UriRef(ontProperty),null,tikaProperties);
+        this(ontProperty == null? null : new IRI(ontProperty),null,tikaProperties);
     }
 
-    public PropertyMapping(UriRef ontProperty,String...tikaProperties) {
+    public PropertyMapping(IRI ontProperty,String...tikaProperties) {
         this(ontProperty,null,tikaProperties);
     }
-    public PropertyMapping(UriRef ontProperty, UriRef ontType,String...tikaProperties) {
+    public PropertyMapping(IRI ontProperty, IRI ontType,String...tikaProperties) {
         this(ontProperty,ontType,null,tikaProperties);
     }
-    public PropertyMapping(UriRef ontProperty, UriRef ontType,Converter converter,String...tikaProperties) {
+    public PropertyMapping(IRI ontProperty, IRI ontType,Converter converter,String...tikaProperties) {
         super(ontProperty, ontType,converter);
         if(tikaProperties == null || tikaProperties.length < 1){
             throw new IllegalArgumentException("The list of parsed Tika properties MUST NOT be NULL nor empty!");
@@ -68,13 +68,13 @@ public final class PropertyMapping exten
     }
 
     @Override
-    public boolean apply(MGraph graph, NonLiteral subject, Metadata metadata) {
-        Set<Resource> values = new HashSet<Resource>();
+    public boolean apply(Graph graph, BlankNodeOrIRI subject, Metadata metadata) {
+        Set<RDFTerm> values = new HashSet<RDFTerm>();
         for(String tikaProperty : tikaProperties){
             String[] tikaPropValues = metadata.getValues(tikaProperty);
             if(tikaPropValues != null && tikaPropValues.length > 0){
                 for(String tikaPropValue : tikaPropValues){
-                    Resource resource = toResource(tikaPropValue, true);
+                    RDFTerm resource = toResource(tikaPropValue, true);
                     if(resource != null){
                         values.add(resource);
                         mappingLogger.log(subject, ontProperty, tikaProperty, resource);
@@ -87,7 +87,7 @@ public final class PropertyMapping exten
         if(values.isEmpty()){
             return false;
         } else {
-            for(Resource resource : values){
+            for(RDFTerm resource : values){
                 graph.add(new TripleImpl(subject, ontProperty, resource));
             }
             return true;

Modified: stanbol/trunk/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/metadata/ResourceMapping.java
URL: http://svn.apache.org/viewvc/stanbol/trunk/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/metadata/ResourceMapping.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- stanbol/trunk/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/metadata/ResourceMapping.java (original)
+++ stanbol/trunk/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/metadata/ResourceMapping.java Tue May 17 22:20:49 2016
@@ -23,12 +23,12 @@ import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.BNode;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.BlankNode;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.tika.metadata.Metadata;
 
 public final class ResourceMapping extends Mapping{
@@ -41,16 +41,16 @@ public final class ResourceMapping exten
     Set<String> mappedTikaProperties;
     
     public ResourceMapping(String ontProperty, Mapping...required) {
-        this(new UriRef(ontProperty), required);
+        this(new IRI(ontProperty), required);
     }
     public ResourceMapping(String ontProperty, Mapping[] required, Mapping[] optional,Mapping[] additional) {
-        this(new UriRef(ontProperty), required,optional,additional);
+        this(new IRI(ontProperty), required,optional,additional);
     }
 
-    public ResourceMapping(UriRef ontProperty, Mapping...requried) {
+    public ResourceMapping(IRI ontProperty, Mapping...requried) {
         this(ontProperty,requried,null,null);
     }
-    public ResourceMapping(UriRef ontProperty, Mapping[] required, Mapping[] optional,Mapping[] additional) {
+    public ResourceMapping(IRI ontProperty, Mapping[] required, Mapping[] optional,Mapping[] additional) {
         super(ontProperty,null);
         required = required == null ? EMPTY : required;
         optional = optional == null ? EMPTY : optional;
@@ -91,12 +91,12 @@ public final class ResourceMapping exten
     }
 
     @Override
-    public boolean apply(MGraph graph, NonLiteral subject, Metadata metadata) {
+    public boolean apply(Graph graph, BlankNodeOrIRI subject, Metadata metadata) {
         boolean added = false;
-        NonLiteral s = new BNode();
+        BlankNodeOrIRI s = new BlankNode();
         mappingLogger.log(subject, ontProperty, null, s);
         if(!required.isEmpty()) {
-            MGraph g = new SimpleMGraph();
+            Graph g = new SimpleGraph();
             for(Mapping m : required){
                 if(!m.apply(g, s, metadata)){
                     return false;

Modified: stanbol/trunk/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/metadata/TypeMapping.java
URL: http://svn.apache.org/viewvc/stanbol/trunk/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/metadata/TypeMapping.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- stanbol/trunk/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/metadata/TypeMapping.java (original)
+++ stanbol/trunk/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/metadata/TypeMapping.java Tue May 17 22:20:49 2016
@@ -16,7 +16,7 @@
 */
 package org.apache.stanbol.enhancer.engines.tika.metadata;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.ontologies.RDF;
 
 /**
@@ -27,9 +27,9 @@ import org.apache.clerezza.rdf.ontologie
 public class TypeMapping extends ConstantMapping {
 
     public TypeMapping(String type) {
-        this(new UriRef(type));
+        this(new IRI(type));
     }
-    public TypeMapping(UriRef...types) {
+    public TypeMapping(IRI...types) {
         super(RDF.type, types);
     }
 

Modified: stanbol/trunk/enhancement-engines/tika/src/test/java/org/apache/stanbol/enhancer/engines/tika/TikaEngineTest.java
URL: http://svn.apache.org/viewvc/stanbol/trunk/enhancement-engines/tika/src/test/java/org/apache/stanbol/enhancer/engines/tika/TikaEngineTest.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- stanbol/trunk/enhancement-engines/tika/src/test/java/org/apache/stanbol/enhancer/engines/tika/TikaEngineTest.java (original)
+++ stanbol/trunk/enhancement-engines/tika/src/test/java/org/apache/stanbol/enhancer/engines/tika/TikaEngineTest.java Tue May 17 22:20:49 2016
@@ -45,15 +45,12 @@ import java.util.Map.Entry;
 import java.util.Set;
 import java.util.regex.Pattern;
 
-import org.apache.clerezza.rdf.core.Literal;
+import org.apache.clerezza.commons.rdf.Literal;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.PlainLiteral;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.TypedLiteral;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.ontologies.RDF;
 import org.apache.clerezza.rdf.ontologies.XSD;
 import org.apache.commons.io.IOUtils;
@@ -112,7 +109,7 @@ public class TikaEngineTest {
         ContentItem ci = createContentItem("test.html", "text/html; charset=UTF-8");
         assertFalse(engine.canEnhance(ci) == CANNOT_ENHANCE);
         engine.computeEnhancements(ci);
-        Entry<UriRef,Blob> contentPart = ContentItemHelper.getBlob(ci, 
+        Entry<IRI,Blob> contentPart = ContentItemHelper.getBlob(ci, 
             singleton("text/plain"));
         assertNotNull(contentPart);
         Blob plainTextBlob = contentPart.getValue();
@@ -142,7 +139,7 @@ public class TikaEngineTest {
         ContentItem ci = createContentItem("test.pdf", "application/pdf");
         assertFalse(engine.canEnhance(ci) == CANNOT_ENHANCE);
         engine.computeEnhancements(ci);
-        Entry<UriRef,Blob> contentPart = ContentItemHelper.getBlob(ci, 
+        Entry<IRI,Blob> contentPart = ContentItemHelper.getBlob(ci, 
             singleton("text/plain"));
         assertNotNull(contentPart);
         Blob plainTextBlob = contentPart.getValue();
@@ -199,7 +196,7 @@ public class TikaEngineTest {
         ContentItem ci = createContentItem("test.doc", "application/msword");
         assertFalse(engine.canEnhance(ci) == CANNOT_ENHANCE);
         engine.computeEnhancements(ci);
-        Entry<UriRef,Blob> contentPart = ContentItemHelper.getBlob(ci, 
+        Entry<IRI,Blob> contentPart = ContentItemHelper.getBlob(ci, 
             singleton("text/plain"));
         assertNotNull(contentPart);
         Blob plainTextBlob = contentPart.getValue();
@@ -227,7 +224,7 @@ public class TikaEngineTest {
         ContentItem ci = createContentItem("test.rtf", "application/rtf");
         assertFalse(engine.canEnhance(ci) == CANNOT_ENHANCE);
         engine.computeEnhancements(ci);
-        Entry<UriRef,Blob> contentPart = ContentItemHelper.getBlob(ci, 
+        Entry<IRI,Blob> contentPart = ContentItemHelper.getBlob(ci, 
             singleton("text/plain"));
         assertNotNull(contentPart);
         Blob plainTextBlob = contentPart.getValue();
@@ -256,7 +253,7 @@ public class TikaEngineTest {
         ContentItem ci = createContentItem("test.odt", "application/vnd.oasis.opendocument.text");
         assertFalse(engine.canEnhance(ci) == CANNOT_ENHANCE);
         engine.computeEnhancements(ci);
-        Entry<UriRef,Blob> contentPart = ContentItemHelper.getBlob(ci, 
+        Entry<IRI,Blob> contentPart = ContentItemHelper.getBlob(ci, 
             singleton("text/plain"));
         assertNotNull(contentPart);
         Blob plainTextBlob = contentPart.getValue();
@@ -285,7 +282,7 @@ public class TikaEngineTest {
         ContentItem ci = createContentItem("test.email.txt", "message/rfc822");
         assertFalse(engine.canEnhance(ci) == CANNOT_ENHANCE);
         engine.computeEnhancements(ci);
-        Entry<UriRef,Blob> contentPart = ContentItemHelper.getBlob(ci, 
+        Entry<IRI,Blob> contentPart = ContentItemHelper.getBlob(ci, 
             singleton("text/plain"));
         assertNotNull(contentPart);
         Blob plainTextBlob = contentPart.getValue();
@@ -314,27 +311,27 @@ public class TikaEngineTest {
         //no check the extracted metadata!
         //DC
         //STANBOL-757: dc:date no longer added by Tika 1.2 (dc:created is still present)
-        //verifyValue(ci, new UriRef(NamespaceEnum.dc+"date"), XSD.dateTime,"2010-09-06T09:25:34Z");
-        verifyValue(ci, new UriRef(NamespaceEnum.dc+"format"), null,"message/rfc822");
+        //verifyValue(ci, new IRI(NamespaceEnum.dc+"date"), XSD.dateTime,"2010-09-06T09:25:34Z");
+        verifyValue(ci, new IRI(NamespaceEnum.dc+"format"), null,"message/rfc822");
         //STANBOL-757: dc:subject no longer added by Tika1.2 (dc:title is used instead)
-        //verifyValue(ci, new UriRef(NamespaceEnum.dc+"subject"), null,"[jira] Commented: (TIKA-461) RFC822 messages not parsed");
-        verifyValue(ci, new UriRef(NamespaceEnum.dc+"title"), null,"[jira] Commented: (TIKA-461) RFC822 messages not parsed");
-        verifyValue(ci, new UriRef(NamespaceEnum.dc+"creator"), null,"Julien Nioche (JIRA) <ji...@apache.org>");
-        verifyValue(ci, new UriRef(NamespaceEnum.dc+"created"), XSD.dateTime,"2010-09-06T09:25:34Z");
+        //verifyValue(ci, new IRI(NamespaceEnum.dc+"subject"), null,"[jira] Commented: (TIKA-461) RFC822 messages not parsed");
+        verifyValue(ci, new IRI(NamespaceEnum.dc+"title"), null,"[jira] Commented: (TIKA-461) RFC822 messages not parsed");
+        verifyValue(ci, new IRI(NamespaceEnum.dc+"creator"), null,"Julien Nioche (JIRA) <ji...@apache.org>");
+        verifyValue(ci, new IRI(NamespaceEnum.dc+"created"), XSD.dateTime,"2010-09-06T09:25:34Z");
         
         //Media Ontology
-        verifyValue(ci, new UriRef(NamespaceEnum.media+"creationDate"),XSD.dateTime,"2010-09-06T09:25:34Z");
-        verifyValue(ci, new UriRef(NamespaceEnum.media+"hasFormat"),null,"message/rfc822");
-        verifyValue(ci, new UriRef(NamespaceEnum.media+"hasCreator"),null,"Julien Nioche (JIRA) <ji...@apache.org>");
-        verifyValue(ci, new UriRef(NamespaceEnum.media+"hasContributor"),null,"Julien Nioche (JIRA) <ji...@apache.org>");
+        verifyValue(ci, new IRI(NamespaceEnum.media+"creationDate"),XSD.dateTime,"2010-09-06T09:25:34Z");
+        verifyValue(ci, new IRI(NamespaceEnum.media+"hasFormat"),null,"message/rfc822");
+        verifyValue(ci, new IRI(NamespaceEnum.media+"hasCreator"),null,"Julien Nioche (JIRA) <ji...@apache.org>");
+        verifyValue(ci, new IRI(NamespaceEnum.media+"hasContributor"),null,"Julien Nioche (JIRA) <ji...@apache.org>");
         //STANBOL-757: This was present with Tika 1.1 because its mapping from dc:subject 
-//        verifyValue(ci, new UriRef(NamespaceEnum.media+"hasKeyword"),null,"[jira] Commented: (TIKA-461) RFC822 messages not parsed");
+//        verifyValue(ci, new IRI(NamespaceEnum.media+"hasKeyword"),null,"[jira] Commented: (TIKA-461) RFC822 messages not parsed");
 
         
         //Nepomuk Message
         String message = "http://www.semanticdesktop.org/ontologies/2007/03/22/nmo#";
-        verifyValue(ci, new UriRef(message+"from"),null,"Julien Nioche (JIRA) <ji...@apache.org>");
-        verifyValue(ci, new UriRef(message+"to"),null,"dev@tika.apache.org");
+        verifyValue(ci, new IRI(message+"from"),null,"Julien Nioche (JIRA) <ji...@apache.org>");
+        verifyValue(ci, new IRI(message+"to"),null,"dev@tika.apache.org");
         
     }
     @Test
@@ -343,7 +340,7 @@ public class TikaEngineTest {
         ContentItem ci = createContentItem("testMP3id3v24.mp3", "audio/mpeg");
         assertFalse(engine.canEnhance(ci) == CANNOT_ENHANCE);
         engine.computeEnhancements(ci);
-        Entry<UriRef,Blob> contentPart = ContentItemHelper.getBlob(ci, 
+        Entry<IRI,Blob> contentPart = ContentItemHelper.getBlob(ci, 
             singleton("text/plain"));
         assertNotNull(contentPart);
         Blob plainTextBlob = contentPart.getValue();
@@ -359,16 +356,16 @@ public class TikaEngineTest {
         Blob xhtmlBlob = contentPart.getValue();
         assertNotNull(xhtmlBlob);
         //Test AudioTrack metadata
-        NonLiteral audioTrack = verifyNonLiteral(ci, new UriRef(NamespaceEnum.media+"hasTrack"));
+        BlankNodeOrIRI audioTrack = verifyBlankNodeOrIRI(ci, new IRI(NamespaceEnum.media+"hasTrack"));
         //types
         verifyValues(ci, audioTrack, RDF.type, 
-            new UriRef(NamespaceEnum.media+"MediaFragment"),
-            new UriRef(NamespaceEnum.media+"Track"),
-            new UriRef(NamespaceEnum.media+"AudioTrack"));
+            new IRI(NamespaceEnum.media+"MediaFragment"),
+            new IRI(NamespaceEnum.media+"Track"),
+            new IRI(NamespaceEnum.media+"AudioTrack"));
         //properties
-        verifyValue(ci, audioTrack, new UriRef(NamespaceEnum.media+"hasFormat"), XSD.string, "Mono");
-        verifyValue(ci, audioTrack, new UriRef(NamespaceEnum.media+"samplingRate"), XSD.int_, "44100");
-        verifyValue(ci, audioTrack, new UriRef(NamespaceEnum.media+"hasCompression"), XSD.string, "MP3");
+        verifyValue(ci, audioTrack, new IRI(NamespaceEnum.media+"hasFormat"), XSD.string, "Mono");
+        verifyValue(ci, audioTrack, new IRI(NamespaceEnum.media+"samplingRate"), XSD.int_, "44100");
+        verifyValue(ci, audioTrack, new IRI(NamespaceEnum.media+"hasCompression"), XSD.string, "MP3");
     }
     /**
      * Tests mappings for the Mp4 metadata extraction capabilities added to
@@ -383,7 +380,7 @@ public class TikaEngineTest {
         ContentItem ci = createContentItem("testMP4.m4a", "audio/mp4");
         assertFalse(engine.canEnhance(ci) == CANNOT_ENHANCE);
         engine.computeEnhancements(ci);
-        Entry<UriRef,Blob> contentPart = ContentItemHelper.getBlob(ci, 
+        Entry<IRI,Blob> contentPart = ContentItemHelper.getBlob(ci, 
             singleton("text/plain"));
         assertNotNull(contentPart);
         Blob plainTextBlob = contentPart.getValue();
@@ -399,42 +396,42 @@ public class TikaEngineTest {
         Blob xhtmlBlob = contentPart.getValue();
         assertNotNull(xhtmlBlob);
         //Test AudioTrack metadata
-        NonLiteral audioTrack = verifyNonLiteral(ci, new UriRef(NamespaceEnum.media+"hasTrack"));
+        BlankNodeOrIRI audioTrack = verifyBlankNodeOrIRI(ci, new IRI(NamespaceEnum.media+"hasTrack"));
         //types
         verifyValues(ci, audioTrack, RDF.type, 
-            new UriRef(NamespaceEnum.media+"MediaFragment"),
-            new UriRef(NamespaceEnum.media+"Track"),
-            new UriRef(NamespaceEnum.media+"AudioTrack"));
+            new IRI(NamespaceEnum.media+"MediaFragment"),
+            new IRI(NamespaceEnum.media+"Track"),
+            new IRI(NamespaceEnum.media+"AudioTrack"));
         //properties
-        verifyValue(ci, audioTrack, new UriRef(NamespaceEnum.media+"hasFormat"), XSD.string, "Stereo");
-        verifyValue(ci, audioTrack, new UriRef(NamespaceEnum.media+"samplingRate"), XSD.int_, "44100");
-        verifyValue(ci, audioTrack, new UriRef(NamespaceEnum.media+"hasCompression"), XSD.string, "M4A");
+        verifyValue(ci, audioTrack, new IRI(NamespaceEnum.media+"hasFormat"), XSD.string, "Stereo");
+        verifyValue(ci, audioTrack, new IRI(NamespaceEnum.media+"samplingRate"), XSD.int_, "44100");
+        verifyValue(ci, audioTrack, new IRI(NamespaceEnum.media+"hasCompression"), XSD.string, "M4A");
     }
     @Test
     public void testGEOMetadata() throws EngineException, IOException, ParseException{
         log.info(">>> testGEOMetadata <<<");
-        //first validate Media Resource Ontology
-        UriRef hasLocation = new UriRef(NamespaceEnum.media+"hasLocation");
-        UriRef locationLatitude = new UriRef(NamespaceEnum.media+"locationLatitude");
-        UriRef locationLongitude = new UriRef(NamespaceEnum.media+"locationLongitude");
-        //UriRef locationAltitude = new UriRef(NamespaceEnum.media+"locationAltitude");
+        //first validate Media RDFTerm Ontology
+        IRI hasLocation = new IRI(NamespaceEnum.media+"hasLocation");
+        IRI locationLatitude = new IRI(NamespaceEnum.media+"locationLatitude");
+        IRI locationLongitude = new IRI(NamespaceEnum.media+"locationLongitude");
+        //IRI locationAltitude = new IRI(NamespaceEnum.media+"locationAltitude");
         ContentItem ci = createContentItem("testJPEG_GEO.jpg", OCTET_STREAM.toString());//"video/x-ms-asf");
         assertFalse(engine.canEnhance(ci) == CANNOT_ENHANCE);
         engine.computeEnhancements(ci);
         Iterator<Triple> it = ci.getMetadata().filter(ci.getUri(),hasLocation, null);
         assertTrue(it.hasNext());
-        Resource r = it.next().getObject();
+        RDFTerm r = it.next().getObject();
         assertFalse(it.hasNext());
-        assertTrue(r instanceof NonLiteral);
-        NonLiteral location = verifyNonLiteral(ci, hasLocation);
+        assertTrue(r instanceof BlankNodeOrIRI);
+        BlankNodeOrIRI location = verifyBlankNodeOrIRI(ci, hasLocation);
         //lat
         verifyValue(ci, location, locationLatitude, XSD.double_, "12.54321");
         //long
         verifyValue(ci, location, locationLongitude, XSD.double_, "-54.1234");
         
         //second the GEO ont
-        UriRef lat = new UriRef(NamespaceEnum.geo+"lat");
-        UriRef lon = new UriRef(NamespaceEnum.geo+"long");
+        IRI lat = new IRI(NamespaceEnum.geo+"lat");
+        IRI lon = new IRI(NamespaceEnum.geo+"long");
         //lat
         verifyValue(ci, lat, XSD.double_, "12.54321");
         //long
@@ -448,15 +445,15 @@ public class TikaEngineTest {
         ContentItem ci = createContentItem("testMP3id3v24.mp3", "audio/mpeg");
         assertFalse(engine.canEnhance(ci) == CANNOT_ENHANCE);
         engine.computeEnhancements(ci);
-        verifyValue(ci,new UriRef(NamespaceEnum.dc+"creator"),null,"Test Artist");
-        verifyValue(ci, new UriRef(NamespaceEnum.dc+"title"),null,"Test Album");
-        verifyValue(ci, new UriRef(NamespaceEnum.dc+"format"),null,"audio/mpeg");
-        verifyValue(ci, new UriRef(NamespaceEnum.media+"hasFormat"),null,"audio/mpeg");
-        verifyValue(ci, new UriRef(NamespaceEnum.media+"mainOriginalTitle"),null,"Test Album");
-        verifyValue(ci, new UriRef(NamespaceEnum.media+"hasContributor"),null,"Test Artist");
-        verifyValue(ci, new UriRef(NamespaceEnum.media+"releaseDate"),XSD.string,"2008");
-        verifyValue(ci, new UriRef(NamespaceEnum.media+"hasGenre"),null,"Rock");
-        verifyValue(ci, new UriRef(NamespaceEnum.media+"hasCreator"),null,"Test Artist");
+        verifyValue(ci,new IRI(NamespaceEnum.dc+"creator"),null,"Test Artist");
+        verifyValue(ci, new IRI(NamespaceEnum.dc+"title"),null,"Test Album");
+        verifyValue(ci, new IRI(NamespaceEnum.dc+"format"),null,"audio/mpeg");
+        verifyValue(ci, new IRI(NamespaceEnum.media+"hasFormat"),null,"audio/mpeg");
+        verifyValue(ci, new IRI(NamespaceEnum.media+"mainOriginalTitle"),null,"Test Album");
+        verifyValue(ci, new IRI(NamespaceEnum.media+"hasContributor"),null,"Test Artist");
+        verifyValue(ci, new IRI(NamespaceEnum.media+"releaseDate"),XSD.string,"2008");
+        verifyValue(ci, new IRI(NamespaceEnum.media+"hasGenre"),null,"Rock");
+        verifyValue(ci, new IRI(NamespaceEnum.media+"hasCreator"),null,"Test Artist");
     }
     @Test
     public void testExifMetadata() throws EngineException, ParseException, IOException {
@@ -465,32 +462,32 @@ public class TikaEngineTest {
         ContentItem ci = createContentItem("testJPEG_EXIF.jpg", "image/jpeg");
         assertFalse(engine.canEnhance(ci) == CANNOT_ENHANCE);
         engine.computeEnhancements(ci);
-        verifyValue(ci, new UriRef(exif+"make"),null,"Canon");
-        verifyValue(ci, new UriRef(exif+"software"),null,"Adobe Photoshop CS3 Macintosh");
-        verifyValue(ci, new UriRef(exif+"dateTimeOriginal"),XSD.dateTime,"2009-08-11T09:09:45");
-        verifyValue(ci, new UriRef(exif+"relatedImageWidth"),XSD.int_,"100");
-        verifyValue(ci, new UriRef(exif+"fNumber"),XSD.double_,"5.6");
-        verifyValue(ci, new UriRef(exif+"model"),null,"Canon EOS 40D");
-        verifyValue(ci, new UriRef(exif+"isoSpeedRatings"),XSD.int_,"400");
-        verifyValue(ci, new UriRef(exif+"xResolution"),XSD.double_,"240.0");
-        verifyValue(ci, new UriRef(exif+"flash"),XSD.boolean_,"false");
-        verifyValue(ci, new UriRef(exif+"exposureTime"),XSD.double_,"6.25E-4");
-        verifyValue(ci, new UriRef(exif+"yResolution"),XSD.double_,"240.0");
-        verifyValue(ci, new UriRef(exif+"resolutionUnit"),XSD.string,"Inch");
-        verifyValue(ci, new UriRef(exif+"focalLength"),XSD.double_,"194.0");
-        verifyValue(ci, new UriRef(exif+"relatedImageLength"),XSD.int_,"68");
-        verifyValue(ci, new UriRef(exif+"bitsPerSample"),XSD.int_,"8");
+        verifyValue(ci, new IRI(exif+"make"),null,"Canon");
+        verifyValue(ci, new IRI(exif+"software"),null,"Adobe Photoshop CS3 Macintosh");
+        verifyValue(ci, new IRI(exif+"dateTimeOriginal"),XSD.dateTime,"2009-08-11T09:09:45");
+        verifyValue(ci, new IRI(exif+"relatedImageWidth"),XSD.int_,"100");
+        verifyValue(ci, new IRI(exif+"fNumber"),XSD.double_,"5.6");
+        verifyValue(ci, new IRI(exif+"model"),null,"Canon EOS 40D");
+        verifyValue(ci, new IRI(exif+"isoSpeedRatings"),XSD.int_,"400");
+        verifyValue(ci, new IRI(exif+"xResolution"),XSD.double_,"240.0");
+        verifyValue(ci, new IRI(exif+"flash"),XSD.boolean_,"false");
+        verifyValue(ci, new IRI(exif+"exposureTime"),XSD.double_,"6.25E-4");
+        verifyValue(ci, new IRI(exif+"yResolution"),XSD.double_,"240.0");
+        verifyValue(ci, new IRI(exif+"resolutionUnit"),XSD.string,"Inch");
+        verifyValue(ci, new IRI(exif+"focalLength"),XSD.double_,"194.0");
+        verifyValue(ci, new IRI(exif+"relatedImageLength"),XSD.int_,"68");
+        verifyValue(ci, new IRI(exif+"bitsPerSample"),XSD.int_,"8");
         //also Media Ontology mappings for Exif
-        verifyValue(ci, new UriRef(NamespaceEnum.media+"frameHeight"),XSD.int_,"68");
-        verifyValue(ci, new UriRef(NamespaceEnum.media+"frameWidth"),XSD.int_,"100");
-        verifyValue(ci, new UriRef(NamespaceEnum.media+"hasFormat"),null,"image/jpeg");
-        verifyValue(ci, new UriRef(NamespaceEnum.media+"creationDate"),XSD.dateTime,"2009-08-11T09:09:45");
-        verifyValues(ci, new UriRef(NamespaceEnum.media+"hasKeyword"),null,"serbor","moscow-birds","canon-55-250");
+        verifyValue(ci, new IRI(NamespaceEnum.media+"frameHeight"),XSD.int_,"68");
+        verifyValue(ci, new IRI(NamespaceEnum.media+"frameWidth"),XSD.int_,"100");
+        verifyValue(ci, new IRI(NamespaceEnum.media+"hasFormat"),null,"image/jpeg");
+        verifyValue(ci, new IRI(NamespaceEnum.media+"creationDate"),XSD.dateTime,"2009-08-11T09:09:45");
+        verifyValues(ci, new IRI(NamespaceEnum.media+"hasKeyword"),null,"serbor","moscow-birds","canon-55-250");
         //and finally the mapped DC properties
-        verifyValue(ci, new UriRef(NamespaceEnum.dc+"format"),null,"image/jpeg");
-        verifyValue(ci, new UriRef(NamespaceEnum.dc+"created"),XSD.dateTime,"2009-08-11T09:09:45");
-        verifyValue(ci, new UriRef(NamespaceEnum.dc+"modified"),XSD.dateTime,"2009-10-02T23:02:49");
-        verifyValues(ci, new UriRef(NamespaceEnum.dc+"subject"), null, "serbor","moscow-birds","canon-55-250");
+        verifyValue(ci, new IRI(NamespaceEnum.dc+"format"),null,"image/jpeg");
+        verifyValue(ci, new IRI(NamespaceEnum.dc+"created"),XSD.dateTime,"2009-08-11T09:09:45");
+        verifyValue(ci, new IRI(NamespaceEnum.dc+"modified"),XSD.dateTime,"2009-10-02T23:02:49");
+        verifyValues(ci, new IRI(NamespaceEnum.dc+"subject"), null, "serbor","moscow-birds","canon-55-250");
     }
     
     /**
@@ -508,7 +505,7 @@ public class TikaEngineTest {
         assertFalse(engine.canEnhance(ci) == CANNOT_ENHANCE);
         engine.computeEnhancements(ci);
         //test that the "xmpDM:logComment" is present
-        verifyValue(ci, new UriRef("urn:tika.apache.org:tika:xmpDM:logComment"), null,"Test Comments");
+        verifyValue(ci, new IRI("urn:tika.apache.org:tika:xmpDM:logComment"), null,"Test Comments");
     }
     
     @Test
@@ -517,7 +514,7 @@ public class TikaEngineTest {
         ContentItem ci = createContentItem("test.pdf", OCTET_STREAM.toString());
         assertFalse(engine.canEnhance(ci) == CANNOT_ENHANCE);
         engine.computeEnhancements(ci);
-        Entry<UriRef,Blob> contentPart = ContentItemHelper.getBlob(ci, 
+        Entry<IRI,Blob> contentPart = ContentItemHelper.getBlob(ci, 
             singleton("text/plain"));
         assertNotNull(contentPart);
         Blob plainTextBlob = contentPart.getValue();
@@ -557,7 +554,7 @@ public class TikaEngineTest {
         ContentItem ci = createContentItem("test.pages", "application/x-iwork-pages-sffpages");
         assertFalse(engine.canEnhance(ci) == CANNOT_ENHANCE);
         engine.computeEnhancements(ci);
-        Entry<UriRef,Blob> contentPart = ContentItemHelper.getBlob(ci, 
+        Entry<IRI,Blob> contentPart = ContentItemHelper.getBlob(ci, 
             singleton("text/plain"));
         //it MUST NOT give an error but also not add a content part
         assertNull(contentPart);
@@ -570,7 +567,7 @@ public class TikaEngineTest {
         ContentItem ci = createContentItem("test.xhtml", XHTML.toString()+"; charset=UTF-8");
         assertFalse(engine.canEnhance(ci) == CANNOT_ENHANCE);
         engine.computeEnhancements(ci);
-        Entry<UriRef,Blob> contentPart = ContentItemHelper.getBlob(ci, 
+        Entry<IRI,Blob> contentPart = ContentItemHelper.getBlob(ci, 
             singleton("text/plain"));
         assertNotNull(contentPart);
         Blob plainTextBlob = contentPart.getValue();
@@ -631,84 +628,81 @@ public class TikaEngineTest {
     /*
      * Internal helper methods 
      */
-    private NonLiteral verifyNonLiteral(ContentItem ci, UriRef property){
-        return verifyNonLiteral(ci, ci.getUri(), property);
+    private BlankNodeOrIRI verifyBlankNodeOrIRI(ContentItem ci, IRI property){
+        return verifyBlankNodeOrIRI(ci, ci.getUri(), property);
     }
-    private static NonLiteral verifyNonLiteral(ContentItem ci, UriRef subject, UriRef property){
+    private static BlankNodeOrIRI verifyBlankNodeOrIRI(ContentItem ci, IRI subject, IRI property){
         Iterator<Triple> it = ci.getMetadata().filter(subject,property, null);
         assertTrue(it.hasNext());
-        Resource r = it.next().getObject();
+        RDFTerm r = it.next().getObject();
         assertFalse(it.hasNext());
-        assertTrue(r instanceof NonLiteral);
-        return (NonLiteral)r;
+        assertTrue(r instanceof BlankNodeOrIRI);
+        return (BlankNodeOrIRI)r;
     }
-    private static UriRef verifyValue(ContentItem ci, UriRef property, UriRef value){
+    private static IRI verifyValue(ContentItem ci, IRI property, IRI value){
         return verifyValue(ci, ci.getUri(), property, value);
     }
-    private static UriRef verifyValue(ContentItem ci, NonLiteral subject, UriRef property, UriRef value){
+    private static IRI verifyValue(ContentItem ci, BlankNodeOrIRI subject, IRI property, IRI value){
         Iterator<Triple> it = ci.getMetadata().filter(subject,property, null);
         assertTrue(it.hasNext());
-        Resource r = it.next().getObject();
+        RDFTerm r = it.next().getObject();
         assertFalse(it.hasNext());
-        assertTrue(r instanceof UriRef);
+        assertTrue(r instanceof IRI);
         assertEquals(value,r);
-        return (UriRef)r;
+        return (IRI)r;
    }
-    private static Literal verifyValue(ContentItem ci, UriRef property, UriRef dataType, String lexValue) throws ParseException{
+    private static Literal verifyValue(ContentItem ci, IRI property, IRI dataType, String lexValue) throws ParseException{
         return verifyValue(ci, ci.getUri(), property, dataType, lexValue);
     }
-    private static Literal verifyValue(ContentItem ci, NonLiteral subject, UriRef property, UriRef dataType, String lexValue) throws ParseException{
+    private static Literal verifyValue(ContentItem ci, BlankNodeOrIRI subject, IRI property, IRI dataType, String lexValue) throws ParseException{
         Iterator<Triple> it = ci.getMetadata().filter(subject,property, null);
         assertTrue(it.hasNext());
-        Resource r = it.next().getObject();
+        RDFTerm r = it.next().getObject();
         assertFalse(it.hasNext());
-        if(dataType == null){
-            assertTrue(r instanceof PlainLiteral);
-        } else {
-            assertTrue(r instanceof TypedLiteral);
-            assertEquals(dataType, ((TypedLiteral)r).getDataType());
+        if(dataType != null){
+            assertEquals(dataType, ((Literal)r).getDataType());
         }
         //if we check dates and the lexical value is not UTC than we need to
         //consider the time zone of the host running this test
         if(XSD.dateTime.equals(dataType) && lexValue.charAt(lexValue.length()-1) != 'Z'){
             Date expectedDate = dateDefaultTimezone.parse(lexValue);
-            assertEquals(expectedDate, lf.createObject(Date.class, ((TypedLiteral)r)));
+            assertEquals(expectedDate, lf.createObject(Date.class, ((Literal)r)));
         } else {
             assertEquals(lexValue,((Literal)r).getLexicalForm());
         }
         return (Literal)r;
     }
-    private static Set<Literal> verifyValues(ContentItem ci, UriRef property, UriRef dataType, String...lexValues){
+    private static Set<Literal> verifyValues(ContentItem ci, IRI property, IRI dataType, String...lexValues){
         return verifyValues(ci, ci.getUri(), property, dataType, lexValues);
     }
-    private static Set<Literal> verifyValues(ContentItem ci, NonLiteral subject, UriRef property, UriRef dataType, String...lexValues){
+    private static Set<Literal> verifyValues(ContentItem ci, BlankNodeOrIRI subject, IRI property, IRI dataType, String...lexValues){
         Iterator<Triple> it = ci.getMetadata().filter(subject,property, null);
         assertTrue(it.hasNext());
         Set<String> expected = new HashSet<String>(Arrays.asList(lexValues));
         Set<Literal> found = new HashSet<Literal>(expected.size());
         while(it.hasNext()){
-            Resource r = it.next().getObject();
+            RDFTerm r = it.next().getObject();
             if(dataType == null){
-                assertTrue(r instanceof PlainLiteral);
+                assertTrue(r instanceof Literal);
             } else {
-                assertTrue(r instanceof TypedLiteral);
-                assertEquals(dataType, ((TypedLiteral)r).getDataType());
+                assertTrue(r instanceof Literal);
+                assertEquals(dataType, ((Literal)r).getDataType());
             }
             assertTrue(expected.remove(((Literal)r).getLexicalForm()));
             found.add((Literal)r);
         }
         return found;
     }
-    private static Set<NonLiteral> verifyValues(ContentItem ci, NonLiteral subject, UriRef property, NonLiteral...references){
+    private static Set<BlankNodeOrIRI> verifyValues(ContentItem ci, BlankNodeOrIRI subject, IRI property, BlankNodeOrIRI...references){
         Iterator<Triple> it = ci.getMetadata().filter(subject,property, null);
         assertTrue(it.hasNext());
-        Set<NonLiteral> expected = new HashSet<NonLiteral>(Arrays.asList(references));
-        Set<NonLiteral> found = new HashSet<NonLiteral>(expected.size());
+        Set<BlankNodeOrIRI> expected = new HashSet<BlankNodeOrIRI>(Arrays.asList(references));
+        Set<BlankNodeOrIRI> found = new HashSet<BlankNodeOrIRI>(expected.size());
         while(it.hasNext()){
-            Resource r = it.next().getObject();
-            assertTrue(r instanceof NonLiteral);
+            RDFTerm r = it.next().getObject();
+            assertTrue(r instanceof BlankNodeOrIRI);
             assertTrue(expected.remove(r));
-            found.add((NonLiteral)r);
+            found.add((BlankNodeOrIRI)r);
         }
         return found;
     }

Modified: stanbol/trunk/enhancement-engines/topic/api/src/main/java/org/apache/stanbol/enhancer/topic/api/TopicClassifier.java
URL: http://svn.apache.org/viewvc/stanbol/trunk/enhancement-engines/topic/api/src/main/java/org/apache/stanbol/enhancer/topic/api/TopicClassifier.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- stanbol/trunk/enhancement-engines/topic/api/src/main/java/org/apache/stanbol/enhancer/topic/api/TopicClassifier.java (original)
+++ stanbol/trunk/enhancement-engines/topic/api/src/main/java/org/apache/stanbol/enhancer/topic/api/TopicClassifier.java Tue May 17 22:20:49 2016
@@ -16,8 +16,8 @@
  */
 package org.apache.stanbol.enhancer.topic.api;
 
-import org.apache.clerezza.rdf.core.Graph;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.enhancer.topic.api.training.TrainingSet;
 import org.apache.stanbol.enhancer.topic.api.training.TrainingSetException;
 import org.osgi.framework.InvalidSyntaxException;
@@ -180,5 +180,5 @@ public interface TopicClassifier {
      * 
      * @return the number of concepts successfully imported (including roots).
      */
-    int importConceptsFromGraph(Graph graph, UriRef conceptClass, UriRef broaderProperty) throws ClassifierException;
+    int importConceptsFromGraph(ImmutableGraph graph, IRI conceptClass, IRI broaderProperty) throws ClassifierException;
 }

Modified: stanbol/trunk/enhancement-engines/topic/engine/src/main/java/org/apache/stanbol/enhancer/engine/topic/TopicClassificationEngine.java
URL: http://svn.apache.org/viewvc/stanbol/trunk/enhancement-engines/topic/engine/src/main/java/org/apache/stanbol/enhancer/engine/topic/TopicClassificationEngine.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- stanbol/trunk/enhancement-engines/topic/engine/src/main/java/org/apache/stanbol/enhancer/engine/topic/TopicClassificationEngine.java (original)
+++ stanbol/trunk/enhancement-engines/topic/engine/src/main/java/org/apache/stanbol/enhancer/engine/topic/TopicClassificationEngine.java Tue May 17 22:20:49 2016
@@ -35,14 +35,14 @@ import java.util.Map.Entry;
 import java.util.Set;
 import java.util.UUID;
 
-import org.apache.clerezza.rdf.core.Graph;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.clerezza.rdf.utils.GraphNode;
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.lang.StringUtils;
@@ -479,7 +479,7 @@ public class TopicClassificationEngine e
 
     @Override
     public void computeEnhancements(ContentItem ci) throws EngineException {
-        Entry<UriRef,Blob> contentPart = ContentItemHelper.getBlob(ci, SUPPORTED_MIMETYPES);
+        Entry<IRI,Blob> contentPart = ContentItemHelper.getBlob(ci, SUPPORTED_MIMETYPES);
         if (contentPart == null) {
             throw new IllegalStateException(
                     "No ContentPart with a supported Mime Type" + "found for ContentItem " + ci.getUri()
@@ -507,7 +507,7 @@ public class TopicClassificationEngine e
                 contentPart.getKey(), ci.getUri());
             return;
         }
-        MGraph metadata = ci.getMetadata();
+        Graph metadata = ci.getMetadata();
         List<TopicSuggestion> topics;
         try {
             topics = suggestTopics(text);
@@ -517,20 +517,20 @@ public class TopicClassificationEngine e
         } catch (ClassifierException e) {
             throw new EngineException(e);
         }
-        UriRef precision = new UriRef(NamespaceEnum.fise + "classifier/precision");
-        UriRef recall = new UriRef(NamespaceEnum.fise + "classifier/recall");
-        UriRef f1 = new UriRef(NamespaceEnum.fise + "classifier/f1");
+        IRI precision = new IRI(NamespaceEnum.fise + "classifier/precision");
+        IRI recall = new IRI(NamespaceEnum.fise + "classifier/recall");
+        IRI f1 = new IRI(NamespaceEnum.fise + "classifier/f1");
 
         LiteralFactory lf = LiteralFactory.getInstance();
         ci.getLock().writeLock().lock();
         try {
             // Global text annotation to attach all the topic annotation to it.
-            UriRef textAnnotation = EnhancementEngineHelper.createTextEnhancement(ci, this);
+            IRI textAnnotation = EnhancementEngineHelper.createTextEnhancement(ci, this);
             metadata.add(new TripleImpl(textAnnotation,
                     org.apache.stanbol.enhancer.servicesapi.rdf.Properties.DC_TYPE,
                     OntologicalClasses.SKOS_CONCEPT));
             for (TopicSuggestion topic : topics) {
-                UriRef enhancement = EnhancementEngineHelper.createEntityEnhancement(ci, this);
+                IRI enhancement = EnhancementEngineHelper.createEntityEnhancement(ci, this);
                 metadata.add(new TripleImpl(enhancement,
                         org.apache.stanbol.enhancer.servicesapi.rdf.Properties.RDF_TYPE,
                         TechnicalClasses.ENHANCER_TOPICANNOTATION));
@@ -540,7 +540,7 @@ public class TopicClassificationEngine e
                 // add link to entity
                 metadata.add(new TripleImpl(enhancement,
                         org.apache.stanbol.enhancer.servicesapi.rdf.Properties.ENHANCER_ENTITY_REFERENCE,
-                        new UriRef(topic.conceptUri)));
+                        new IRI(topic.conceptUri)));
                 metadata.add(new TripleImpl(enhancement,
                         org.apache.stanbol.enhancer.servicesapi.rdf.Properties.ENHANCER_ENTITY_TYPE,
                         OntologicalClasses.SKOS_CONCEPT));
@@ -1509,25 +1509,25 @@ public class TopicClassificationEngine e
     }
 
     @Override
-    public int importConceptsFromGraph(Graph graph, UriRef conceptClass, UriRef broaderProperty) throws ClassifierException {
+    public int importConceptsFromGraph(ImmutableGraph graph, IRI conceptClass, IRI broaderProperty) throws ClassifierException {
         int importedCount = 0;
         Iterator<Triple> conceptIterator = graph.filter(null,
             org.apache.stanbol.enhancer.servicesapi.rdf.Properties.RDF_TYPE, conceptClass);
         while (conceptIterator.hasNext()) {
             Triple conceptTriple = conceptIterator.next();
-            if (!(conceptTriple.getSubject() instanceof UriRef)) {
+            if (!(conceptTriple.getSubject() instanceof IRI)) {
                 continue;
             }
-            UriRef conceptUri = (UriRef) conceptTriple.getSubject();
+            IRI conceptUri = (IRI) conceptTriple.getSubject();
             GraphNode node = new GraphNode(conceptUri, graph);
             List<String> broaderConcepts = new ArrayList<String>();
             // TODO: use OWL property inference on sub-properties here instead of explicit
             // property filter
             Iterator<GraphNode> broaderIterator = node.getObjectNodes(broaderProperty);
             while (broaderIterator.hasNext()) {
-                Resource node2 = broaderIterator.next().getNode();
-                if (node2 instanceof UriRef) {
-                    broaderConcepts.add(((UriRef) node2).getUnicodeString());
+                RDFTerm node2 = broaderIterator.next().getNode();
+                if (node2 instanceof IRI) {
+                    broaderConcepts.add(((IRI) node2).getUnicodeString());
                 }
             }
             addConcept(conceptUri.getUnicodeString(), broaderConcepts);

Modified: stanbol/trunk/enhancement-engines/topic/engine/src/test/java/org/apache/stanbol/enhancer/engine/topic/TopicEngineTest.java
URL: http://svn.apache.org/viewvc/stanbol/trunk/enhancement-engines/topic/engine/src/test/java/org/apache/stanbol/enhancer/engine/topic/TopicEngineTest.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- stanbol/trunk/enhancement-engines/topic/engine/src/test/java/org/apache/stanbol/enhancer/engine/topic/TopicEngineTest.java (original)
+++ stanbol/trunk/enhancement-engines/topic/engine/src/test/java/org/apache/stanbol/enhancer/engine/topic/TopicEngineTest.java Tue May 17 22:20:49 2016
@@ -34,7 +34,7 @@ import java.util.Map;
 import java.util.Random;
 import java.util.TreeMap;
 
-import org.apache.clerezza.rdf.core.Graph;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
 import org.apache.clerezza.rdf.core.serializedform.Parser;
 import org.apache.clerezza.rdf.core.serializedform.SupportedFormat;
 import org.apache.clerezza.rdf.jena.parser.JenaParserProvider;
@@ -206,7 +206,7 @@ public class TopicEngineTest extends Emb
         log.info(" --- testImportModelFromSKOS --- ");
         Parser parser = Parser.getInstance();
         parser.bindParsingProvider(new JenaParserProvider());
-        Graph graph = parser.parse(getClass().getResourceAsStream("/sample-scheme.skos.rdf.xml"),
+        ImmutableGraph graph = parser.parse(getClass().getResourceAsStream("/sample-scheme.skos.rdf.xml"),
             SupportedFormat.RDF_XML);
         int imported = classifier.importConceptsFromGraph(graph, OntologicalClasses.SKOS_CONCEPT,
             Properties.SKOS_BROADER);

Modified: stanbol/trunk/enhancement-engines/topic/web/src/main/java/org/apache/stanbol/enhancer/web/topic/resource/TopicModelResource.java
URL: http://svn.apache.org/viewvc/stanbol/trunk/enhancement-engines/topic/web/src/main/java/org/apache/stanbol/enhancer/web/topic/resource/TopicModelResource.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- stanbol/trunk/enhancement-engines/topic/web/src/main/java/org/apache/stanbol/enhancer/web/topic/resource/TopicModelResource.java (original)
+++ stanbol/trunk/enhancement-engines/topic/web/src/main/java/org/apache/stanbol/enhancer/web/topic/resource/TopicModelResource.java Tue May 17 22:20:49 2016
@@ -38,8 +38,8 @@ import javax.ws.rs.core.Response;
 import javax.ws.rs.core.Response.ResponseBuilder;
 import javax.ws.rs.core.UriInfo;
 
-import org.apache.clerezza.rdf.core.Graph;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.felix.scr.annotations.Activate;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.Property;
@@ -241,15 +241,15 @@ public final class TopicModelResource ex
         @Consumes(MediaType.WILDCARD)
         public Response importConceptsFromRDF(@QueryParam(value = "concept_class") String conceptClassUri,
                 @QueryParam(value = "broader_property") String broaderPropertyUri,
-                Graph graph,
+                ImmutableGraph graph,
                 @Context HttpHeaders headers) throws ClassifierException {
-            UriRef conceptClass = OntologicalClasses.SKOS_CONCEPT;
-            UriRef broaderProperty = Properties.SKOS_BROADER;
+            IRI conceptClass = OntologicalClasses.SKOS_CONCEPT;
+            IRI broaderProperty = Properties.SKOS_BROADER;
             if (conceptClassUri != null && !conceptClassUri.isEmpty()) {
-                conceptClass = new UriRef(conceptClassUri);
+                conceptClass = new IRI(conceptClassUri);
             }
             if (broaderPropertyUri != null && !broaderPropertyUri.isEmpty()) {
-                broaderProperty = new UriRef(broaderPropertyUri);
+                broaderProperty = new IRI(broaderPropertyUri);
             }
             int imported = classifier.importConceptsFromGraph(graph, conceptClass, broaderProperty);
             ResponseBuilder rb;

Modified: stanbol/trunk/enhancement-engines/uima/uimalocal-template/pom.xml
URL: http://svn.apache.org/viewvc/stanbol/trunk/enhancement-engines/uima/uimalocal-template/pom.xml?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- stanbol/trunk/enhancement-engines/uima/uimalocal-template/pom.xml (original)
+++ stanbol/trunk/enhancement-engines/uima/uimalocal-template/pom.xml Tue May 17 22:20:49 2016
@@ -84,8 +84,6 @@
     <dependency>
       <groupId>org.apache.clerezza</groupId>
       <artifactId>rdf.core</artifactId>
-      <version>0.14</version>
-      <type>jar</type>
     </dependency>
     <dependency>
       <groupId>org.apache.felix</groupId>

Modified: stanbol/trunk/enhancement-engines/uima/uimalocal-template/src/main/java/org/apache/stanbol/enhancer/engines/uimalocal/UIMALocal.java
URL: http://svn.apache.org/viewvc/stanbol/trunk/enhancement-engines/uima/uimalocal-template/src/main/java/org/apache/stanbol/enhancer/engines/uimalocal/UIMALocal.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- stanbol/trunk/enhancement-engines/uima/uimalocal-template/src/main/java/org/apache/stanbol/enhancer/engines/uimalocal/UIMALocal.java (original)
+++ stanbol/trunk/enhancement-engines/uima/uimalocal-template/src/main/java/org/apache/stanbol/enhancer/engines/uimalocal/UIMALocal.java Tue May 17 22:20:49 2016
@@ -29,7 +29,7 @@ import java.util.Map.Entry;
 import java.util.Set;
 import java.util.UUID;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.Properties;
 import org.apache.felix.scr.annotations.Property;
@@ -149,7 +149,7 @@ public class UIMALocal extends AbstractE
 
     @Override
     public void computeEnhancements(ContentItem ci) throws EngineException {
-        Entry<UriRef, Blob> contentPart = ContentItemHelper.getBlob(ci, SUPPORTED_MIMETYPES);
+        Entry<IRI, Blob> contentPart = ContentItemHelper.getBlob(ci, SUPPORTED_MIMETYPES);
         if (contentPart == null) {
             throw new IllegalStateException("No ContentPart with an supported Mimetype '"
                     + SUPPORTED_MIMETYPES + "' found for ContentItem " + ci.getUri()
@@ -182,16 +182,16 @@ public class UIMALocal extends AbstractE
 
         for (String typeName : uimaTypeNames) {
             List<FeatureStructure> featureSetList = concertToCasLight(jcas, typeName);
-            UriRef uimaUriRef = new UriRef(uimaUri);
+            IRI uimaIRI = new IRI(uimaUri);
 
             FeatureStructureListHolder holder;
             ci.getLock().writeLock().lock();
             try {
-                holder = ci.getPart(uimaUriRef, FeatureStructureListHolder.class);
+                holder = ci.getPart(uimaIRI, FeatureStructureListHolder.class);
             } catch (NoSuchPartException e) {
                 holder = new FeatureStructureListHolder();
                 logger.info("Adding FeatureSet List Holder content part with uri:" + uimaUri);
-                ci.addPart(uimaUriRef, holder);
+                ci.addPart(uimaIRI, holder);
                 logger.info(uimaUri + " content part added.");
             } finally {
                 ci.getLock().writeLock().unlock();

Modified: stanbol/trunk/enhancement-engines/uima/uimaremote/pom.xml
URL: http://svn.apache.org/viewvc/stanbol/trunk/enhancement-engines/uima/uimaremote/pom.xml?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- stanbol/trunk/enhancement-engines/uima/uimaremote/pom.xml (original)
+++ stanbol/trunk/enhancement-engines/uima/uimaremote/pom.xml Tue May 17 22:20:49 2016
@@ -106,8 +106,6 @@
     <dependency>
       <groupId>org.apache.clerezza</groupId>
       <artifactId>rdf.core</artifactId>
-      <version>0.14</version>
-      <type>bundle</type>
     </dependency>
     <dependency>
       <groupId>org.apache.felix</groupId>

Modified: stanbol/trunk/enhancement-engines/uima/uimaremote/src/main/java/org/apache/stanbol/enhancer/engines/uimaremote/UIMARemoteClient.java
URL: http://svn.apache.org/viewvc/stanbol/trunk/enhancement-engines/uima/uimaremote/src/main/java/org/apache/stanbol/enhancer/engines/uimaremote/UIMARemoteClient.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- stanbol/trunk/enhancement-engines/uima/uimaremote/src/main/java/org/apache/stanbol/enhancer/engines/uimaremote/UIMARemoteClient.java (original)
+++ stanbol/trunk/enhancement-engines/uima/uimaremote/src/main/java/org/apache/stanbol/enhancer/engines/uimaremote/UIMARemoteClient.java Tue May 17 22:20:49 2016
@@ -26,7 +26,7 @@ import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Set;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.Properties;
 import org.apache.felix.scr.annotations.Property;
@@ -138,7 +138,7 @@ public class UIMARemoteClient extends Ab
 
     @Override
     public void computeEnhancements(ContentItem ci) throws EngineException {
-        Entry<UriRef, Blob> contentPart = ContentItemHelper.getBlob(ci, SUPPORTED_MIMETYPES);
+        Entry<IRI, Blob> contentPart = ContentItemHelper.getBlob(ci, SUPPORTED_MIMETYPES);
         if (contentPart == null) {
             throw new IllegalStateException("No ContentPart with an supported Mimetype '"
                     + SUPPORTED_MIMETYPES + "' found for ContentItem " + ci.getUri()
@@ -156,16 +156,16 @@ public class UIMARemoteClient extends Ab
         for (UIMASimpleServletClient ussc : usscList) {
             logger.info("Accessing uima source:" + ussc.getSourceName() + " endpoint:" + ussc.getUri());
             List<FeatureStructure> featureSetList = ussc.process(text);
-            UriRef uimaUriRef = new UriRef(uimaUri);
+            IRI uimaIRI = new IRI(uimaUri);
 
             FeatureStructureListHolder holder;
             ci.getLock().writeLock().lock();
             try {
-                holder = ci.getPart(uimaUriRef, FeatureStructureListHolder.class);
+                holder = ci.getPart(uimaIRI, FeatureStructureListHolder.class);
             } catch (NoSuchPartException e) {
                 holder = new FeatureStructureListHolder();
                 logger.info("Adding FeatureSet List Holder content part with uri:" + uimaUri);
-                ci.addPart(uimaUriRef, holder);
+                ci.addPart(uimaIRI, holder);
                 logger.info(uimaUri + " content part added.");
             } finally {
                 ci.getLock().writeLock().unlock();

Modified: stanbol/trunk/enhancement-engines/uima/uimatotriples/pom.xml
URL: http://svn.apache.org/viewvc/stanbol/trunk/enhancement-engines/uima/uimatotriples/pom.xml?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- stanbol/trunk/enhancement-engines/uima/uimatotriples/pom.xml (original)
+++ stanbol/trunk/enhancement-engines/uima/uimatotriples/pom.xml Tue May 17 22:20:49 2016
@@ -112,8 +112,6 @@
     <dependency>
       <groupId>org.apache.clerezza</groupId>
       <artifactId>rdf.core</artifactId>
-      <version>0.14</version>
-      <type>jar</type>
     </dependency>
     <dependency>
       <groupId>org.apache.stanbol</groupId>

Modified: stanbol/trunk/enhancement-engines/uima/uimatotriples/src/main/java/org/apache/stanbol/enhancer/engines/uimatotriples/UIMAToTriples.java
URL: http://svn.apache.org/viewvc/stanbol/trunk/enhancement-engines/uima/uimatotriples/src/main/java/org/apache/stanbol/enhancer/engines/uimatotriples/UIMAToTriples.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- stanbol/trunk/enhancement-engines/uima/uimatotriples/src/main/java/org/apache/stanbol/enhancer/engines/uimatotriples/UIMAToTriples.java (original)
+++ stanbol/trunk/enhancement-engines/uima/uimatotriples/src/main/java/org/apache/stanbol/enhancer/engines/uimatotriples/UIMAToTriples.java Tue May 17 22:20:49 2016
@@ -26,10 +26,10 @@ import java.util.Map;
 import java.util.Set;
 
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.Properties;
 import org.apache.felix.scr.annotations.Property;
@@ -158,9 +158,9 @@ public class UIMAToTriples extends Abstr
 
 
         try {
-            UriRef uimaUriRef = new UriRef(uimaUri);
+            IRI uimaIRI = new IRI(uimaUri);
             logger.info(new StringBuilder("Trying to load holder for ref:").append(uimaUri).toString());
-            holder = ci.getPart(uimaUriRef, FeatureStructureListHolder.class);
+            holder = ci.getPart(uimaIRI, FeatureStructureListHolder.class);
             for (String source : sourceNames) {
                 logger.info(new StringBuilder("Processing UIMA source:").append(source).toString());
                 List<FeatureStructure> sourceList = holder.getFeatureStructureList(source);
@@ -176,14 +176,14 @@ public class UIMAToTriples extends Abstr
                     logger.debug(new StringBuilder("Checking ").append(typeName).toString());
                     if (tnfs.checkFeatureStructureAllowed(typeName, fs.getFeatures())) {
                         logger.debug(new StringBuilder("Adding ").append(typeName).toString());
-                        UriRef textAnnotation = EnhancementEngineHelper.createTextEnhancement(
+                        IRI textAnnotation = EnhancementEngineHelper.createTextEnhancement(
                                 ci, this);
-                        MGraph metadata = ci.getMetadata();
+                        Graph metadata = ci.getMetadata();
                         String uriRefStr = uimaUri + ":" + typeName;
                         if (mappings.containsKey(typeName)) {
                             uriRefStr = mappings.get(typeName);
                         }
-                        metadata.add(new TripleImpl(textAnnotation, DC_TYPE, new UriRef(uriRefStr)));
+                        metadata.add(new TripleImpl(textAnnotation, DC_TYPE, new IRI(uriRefStr)));
 
                         if (fs.getFeature("begin") != null) {
                             metadata.add(new TripleImpl(textAnnotation, ENHANCER_START,
@@ -205,7 +205,7 @@ public class UIMAToTriples extends Abstr
                                     predRefStr = mappings.get(f.getName());
                                 }
 
-                                UriRef predicate = new UriRef(predRefStr);
+                                IRI predicate = new IRI(predRefStr);
 
                                 metadata.add(new TripleImpl(textAnnotation, predicate, new PlainLiteralImpl(f.getValueAsString())));
                             }

Modified: stanbol/trunk/enhancement-engines/xmpextractor/src/main/java/org/apache/stanbol/enhancer/engines/xmpextractor/XmpExtractorEngine.java
URL: http://svn.apache.org/viewvc/stanbol/trunk/enhancement-engines/xmpextractor/src/main/java/org/apache/stanbol/enhancer/engines/xmpextractor/XmpExtractorEngine.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- stanbol/trunk/enhancement-engines/xmpextractor/src/main/java/org/apache/stanbol/enhancer/engines/xmpextractor/XmpExtractorEngine.java (original)
+++ stanbol/trunk/enhancement-engines/xmpextractor/src/main/java/org/apache/stanbol/enhancer/engines/xmpextractor/XmpExtractorEngine.java Tue May 17 22:20:49 2016
@@ -23,17 +23,17 @@ import java.io.InputStream;
 import java.util.Collections;
 import java.util.Map;
 
-import org.apache.clerezza.rdf.core.Graph;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;
 import org.apache.clerezza.rdf.core.serializedform.Parser;
 import org.apache.clerezza.rdf.utils.GraphNode;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.Property;
 import org.apache.felix.scr.annotations.Reference;
 import org.apache.felix.scr.annotations.Service;
-import org.apache.stanbol.commons.indexedgraph.IndexedMGraph;
+import org.apache.stanbol.commons.indexedgraph.IndexedGraph;
 import org.apache.stanbol.enhancer.servicesapi.ContentItem;
 import org.apache.stanbol.enhancer.servicesapi.EngineException;
 import org.apache.stanbol.enhancer.servicesapi.EnhancementEngine;
@@ -90,10 +90,10 @@ public class XmpExtractorEngine extends
 		}
     	byte[] bytes = baos.toByteArray();
     	if (bytes.length > 0) {
-	        MGraph model = new IndexedMGraph();
+	        Graph model = new IndexedGraph();
 			parser.parse(model, new ByteArrayInputStream(bytes), "application/rdf+xml");
 	        GraphNode gn = new GraphNode(
-					new UriRef("http://relative-uri.fake/"), model);
+					new IRI("http://relative-uri.fake/"), model);
 			gn.replaceWith(ci.getUri());
 	        ci.getLock().writeLock().lock();
 	        try {