You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@stanbol.apache.org by re...@apache.org on 2016/05/17 22:20:55 UTC
svn commit: r1744328 [16/24] - in /stanbol/trunk: ./
commons/indexedgraph/src/main/java/org/apache/stanbol/commons/indexedgraph/
commons/indexedgraph/src/test/java/org/apache/stanbol/commons/indexedgraph/
commons/installer/bundleprovider/src/main/java/...
Modified: stanbol/trunk/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/resource/ContentItemResource.java
URL: http://svn.apache.org/viewvc/stanbol/trunk/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/resource/ContentItemResource.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- stanbol/trunk/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/resource/ContentItemResource.java (original)
+++ stanbol/trunk/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/resource/ContentItemResource.java Tue May 17 22:20:49 2016
@@ -67,22 +67,21 @@ import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.ResponseBuilder;
import javax.ws.rs.core.UriInfo;
-import org.apache.clerezza.rdf.core.Language;
+import org.apache.clerezza.commons.rdf.Language;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.Literal;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.PlainLiteral;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
import org.apache.clerezza.rdf.core.serializedform.Serializer;
import org.apache.clerezza.rdf.core.serializedform.SupportedFormat;
import org.apache.clerezza.rdf.core.sparql.ParseException;
import org.apache.clerezza.rdf.ontologies.RDF;
import org.apache.commons.lang.StringUtils;
-import org.apache.stanbol.commons.indexedgraph.IndexedMGraph;
+import org.apache.stanbol.commons.indexedgraph.IndexedGraph;
import org.apache.stanbol.commons.viewable.Viewable;
import org.apache.stanbol.commons.web.base.resource.BaseStanbolResource;
import org.apache.stanbol.commons.web.base.resource.LayoutConfiguration;
@@ -107,13 +106,13 @@ public class ContentItemResource extends
private final Logger log = LoggerFactory.getLogger(getClass());
// TODO make this configurable trough a property
- public static final UriRef SUMMARY = new UriRef("http://www.w3.org/2000/01/rdf-schema#comment");
+ public static final IRI SUMMARY = new IRI("http://www.w3.org/2000/01/rdf-schema#comment");
// TODO make this configurable trough a property
- public static final UriRef THUMBNAIL = new UriRef("http://dbpedia.org/ontology/thumbnail");
- public static final UriRef DEPICTION = new UriRef("http://xmlns.com/foaf/0.1/depiction");
+ public static final IRI THUMBNAIL = new IRI("http://dbpedia.org/ontology/thumbnail");
+ public static final IRI DEPICTION = new IRI("http://xmlns.com/foaf/0.1/depiction");
- public final Map<UriRef,String> defaultThumbnails = new HashMap<UriRef,String>();
+ public final Map<IRI,String> defaultThumbnails = new HashMap<IRI,String>();
protected ContentItem contentItem;
@@ -140,10 +139,10 @@ public class ContentItemResource extends
* {@link Properties#ENHANCER_SELECTED_TEXT}.
* This map is initialised by {@link #initOccurrences()}.
*/
- protected Map<UriRef,Map<EntityExtractionSummary,EntityExtractionSummary>> extractionsByTypeMap =
- new HashMap<UriRef,Map<EntityExtractionSummary,EntityExtractionSummary>>();
+ protected Map<IRI,Map<EntityExtractionSummary,EntityExtractionSummary>> extractionsByTypeMap =
+ new HashMap<IRI,Map<EntityExtractionSummary,EntityExtractionSummary>>();
- private MGraph executionMetadata;
+ private Graph executionMetadata;
private ChainExecution chainExecution;
@@ -169,7 +168,7 @@ public class ContentItemResource extends
this.enhancementException = enhancementException;
if (localId != null) {
URI rawURI = uriInfo.getBaseUriBuilder().path(storePath).path("raw").path(localId).build();
- Entry<UriRef,Blob> plainTextContentPart = ContentItemHelper.getBlob(contentItem, Collections.singleton("text/plain"));
+ Entry<IRI,Blob> plainTextContentPart = ContentItemHelper.getBlob(contentItem, Collections.singleton("text/plain"));
if (plainTextContentPart != null) {
this.textContent = ContentItemHelper.getText(plainTextContentPart.getValue());
}
@@ -191,16 +190,16 @@ public class ContentItemResource extends
}
//init ExecutionMetadata
try {
- executionMetadata = ci.getPart(ExecutionMetadata.CHAIN_EXECUTION, MGraph.class);
+ executionMetadata = ci.getPart(ExecutionMetadata.CHAIN_EXECUTION, Graph.class);
} catch(NoSuchPartException e){
executionMetadata = null;
}
if(executionMetadata != null){
- NonLiteral ce = ExecutionMetadataHelper.getChainExecution(executionMetadata, ci.getUri());
+ BlankNodeOrIRI ce = ExecutionMetadataHelper.getChainExecution(executionMetadata, ci.getUri());
if(ce != null){
chainExecution = new ChainExecution(executionMetadata, ce);
engineExecutions = new ArrayList<Execution>();
- for(NonLiteral ex : ExecutionMetadataHelper.getExecutions(executionMetadata, ce)){
+ for(BlankNodeOrIRI ex : ExecutionMetadataHelper.getExecutions(executionMetadata, ce)){
engineExecutions.add(new Execution(chainExecution,executionMetadata, ex));
}
Collections.sort(engineExecutions);
@@ -275,8 +274,8 @@ public class ContentItemResource extends
/**
* Used to print occurrences with other types than the natively supported
*/
- public Collection<UriRef> getOtherOccurrencyTypes(){
- Set<UriRef> types = new HashSet<UriRef>(extractionsByTypeMap.keySet());
+ public Collection<IRI> getOtherOccurrencyTypes(){
+ Set<IRI> types = new HashSet<IRI>(extractionsByTypeMap.keySet());
types.remove(DBPEDIA_PERSON);
types.remove(DBPEDIA_ORGANISATION);
types.remove(DBPEDIA_PLACE);
@@ -285,7 +284,7 @@ public class ContentItemResource extends
types.remove(null); //other
return types;
}
- public static String extractLabel(UriRef uri){
+ public static String extractLabel(IRI uri){
String fullUri = uri.getUnicodeString();
int index = Math.max(fullUri.lastIndexOf('#'),fullUri.lastIndexOf('/'));
index = Math.max(index, fullUri.lastIndexOf(':'));
@@ -296,7 +295,7 @@ public class ContentItemResource extends
return uri.getUnicodeString();
}
}
- public Collection<EntityExtractionSummary> getOccurrences(UriRef type){
+ public Collection<EntityExtractionSummary> getOccurrences(IRI type){
Map<EntityExtractionSummary,EntityExtractionSummary> typeMap = extractionsByTypeMap.get(type);
Collection<EntityExtractionSummary> typeOccurrences;
if(typeMap != null){
@@ -340,14 +339,14 @@ public class ContentItemResource extends
}
private void initOccurrences() {
- MGraph graph = contentItem.getMetadata();
+ Graph graph = contentItem.getMetadata();
LiteralFactory lf = LiteralFactory.getInstance();
- Map<UriRef,Collection<NonLiteral>> suggestionMap = new HashMap<UriRef,Collection<NonLiteral>>();
+ Map<IRI,Collection<BlankNodeOrIRI>> suggestionMap = new HashMap<IRI,Collection<BlankNodeOrIRI>>();
// 1) get Entity Annotations
- Map<NonLiteral,Map<EAProps,Object>> entitySuggestionMap = new HashMap<NonLiteral,Map<EAProps,Object>>();
+ Map<BlankNodeOrIRI,Map<EAProps,Object>> entitySuggestionMap = new HashMap<BlankNodeOrIRI,Map<EAProps,Object>>();
Iterator<Triple> entityAnnotations = graph.filter(null, RDF.type, ENHANCER_ENTITYANNOTATION);
while(entityAnnotations.hasNext()){
- NonLiteral entityAnnotation = entityAnnotations.next().getSubject();
+ BlankNodeOrIRI entityAnnotation = entityAnnotations.next().getSubject();
//to avoid multiple lookups (e.g. if one entityAnnotation links to+
//several TextAnnotations) we cache the data in an intermediate Map
Map<EAProps,Object> eaData = new EnumMap<EAProps,Object>(EAProps.class);
@@ -356,12 +355,12 @@ public class ContentItemResource extends
eaData.put(EAProps.confidence, EnhancementEngineHelper.get(
graph, entityAnnotation, ENHANCER_CONFIDENCE, Double.class, lf));
entitySuggestionMap.put(entityAnnotation, eaData);
- Iterator<UriRef> textAnnotations = getReferences(graph, entityAnnotation, DC_RELATION);
+ Iterator<IRI> textAnnotations = getReferences(graph, entityAnnotation, DC_RELATION);
while(textAnnotations.hasNext()){
- UriRef textAnnotation = textAnnotations.next();
- Collection<NonLiteral> suggestions = suggestionMap.get(textAnnotation);
+ IRI textAnnotation = textAnnotations.next();
+ Collection<BlankNodeOrIRI> suggestions = suggestionMap.get(textAnnotation);
if(suggestions == null){
- suggestions = new ArrayList<NonLiteral>();
+ suggestions = new ArrayList<BlankNodeOrIRI>();
suggestionMap.put(textAnnotation, suggestions);
}
suggestions.add(entityAnnotation);
@@ -370,7 +369,7 @@ public class ContentItemResource extends
// 2) get the TextAnnotations
Iterator<Triple> textAnnotations = graph.filter(null, RDF.type, ENHANCER_TEXTANNOTATION);
while(textAnnotations.hasNext()){
- NonLiteral textAnnotation = textAnnotations.next().getSubject();
+ BlankNodeOrIRI textAnnotation = textAnnotations.next().getSubject();
//we need to process those to show multiple mentions
// if (graph.filter(textAnnotation, DC_RELATION, null).hasNext()) {
// // this is not the most specific occurrence of this name: skip
@@ -388,12 +387,12 @@ public class ContentItemResource extends
ENHANCER_END,Integer.class,lf);
Double confidence = EnhancementEngineHelper.get(graph, textAnnotation,
ENHANCER_CONFIDENCE, Double.class, lf);
- Iterator<UriRef> types = getReferences(graph, textAnnotation, DC_TYPE);
+ Iterator<IRI> types = getReferences(graph, textAnnotation, DC_TYPE);
if(!types.hasNext()){ //create an iterator over null in case no types are present
- types = Collections.singleton((UriRef)null).iterator();
+ types = Collections.singleton((IRI)null).iterator();
}
while(types.hasNext()){
- UriRef type = types.next();
+ IRI type = types.next();
Map<EntityExtractionSummary,EntityExtractionSummary> occurrenceMap = extractionsByTypeMap.get(type);
if(occurrenceMap == null){
occurrenceMap = new TreeMap<EntityExtractionSummary,EntityExtractionSummary>();
@@ -405,12 +404,12 @@ public class ContentItemResource extends
DC_LANGUAGE);
}
EntityExtractionSummary entity = new EntityExtractionSummary(text, type, start,end,confidence,defaultThumbnails);
- Collection<NonLiteral> suggestions = suggestionMap.get(textAnnotation);
+ Collection<BlankNodeOrIRI> suggestions = suggestionMap.get(textAnnotation);
if(suggestions != null){
- for(NonLiteral entityAnnotation : suggestions){
+ for(BlankNodeOrIRI entityAnnotation : suggestions){
Map<EAProps,Object> eaData = entitySuggestionMap.get(entityAnnotation);
entity.addSuggestion(
- (UriRef)eaData.get(EAProps.entity),
+ (IRI)eaData.get(EAProps.entity),
(String)eaData.get(EAProps.label),
(Double)eaData.get(EAProps.confidence),
graph);
@@ -577,14 +576,14 @@ public class ContentItemResource extends
protected final String name;
- protected final UriRef type;
+ protected final IRI type;
protected List<EntitySuggestion> suggestions = new ArrayList<EntitySuggestion>();
- protected Set<UriRef> suggestionSet = new HashSet<UriRef>();
+ protected Set<IRI> suggestionSet = new HashSet<IRI>();
protected List<Mention> mentions = new ArrayList<Mention>();
- public final Map<UriRef,String> defaultThumbnails;
+ public final Map<IRI,String> defaultThumbnails;
private Integer start;
@@ -594,7 +593,7 @@ public class ContentItemResource extends
private Double confidence;
- public EntityExtractionSummary(String name, UriRef type, Integer start, Integer end, Double confidence, Map<UriRef,String> defaultThumbnails) {
+ public EntityExtractionSummary(String name, IRI type, Integer start, Integer end, Double confidence, Map<IRI,String> defaultThumbnails) {
if(name == null){
this.name = extractLabel(type);
} else {
@@ -608,7 +607,7 @@ public class ContentItemResource extends
this.confidence = confidence;
}
- public void addSuggestion(UriRef uri, String label, Double confidence, TripleCollection properties) {
+ public void addSuggestion(IRI uri, String label, Double confidence, Graph properties) {
EntitySuggestion suggestion = new EntitySuggestion(uri, type, label, confidence, properties,
defaultThumbnails);
suggestionSet.add(uri);
@@ -748,24 +747,24 @@ public class ContentItemResource extends
public static class EntitySuggestion implements Comparable<EntitySuggestion> {
- protected final UriRef uri;
+ protected final IRI uri;
- protected final UriRef type;
+ protected final IRI type;
protected final String label;
protected final Double confidence;
- protected TripleCollection entityProperties;
+ protected Graph entityProperties;
- protected final Map<UriRef,String> defaultThumbnails;
+ protected final Map<IRI,String> defaultThumbnails;
- public EntitySuggestion(UriRef uri,
- UriRef type,
+ public EntitySuggestion(IRI uri,
+ IRI type,
String label,
Double confidence,
- TripleCollection entityProperties,
- Map<UriRef,String> defaultThumbnails) {
+ Graph entityProperties,
+ Map<IRI,String> defaultThumbnails) {
this.uri = uri;
if(label == null){
this.label = extractLabel(uri);
@@ -799,17 +798,17 @@ public class ContentItemResource extends
public String getThumbnailSrc() {
Iterator<Triple> thumbnails = entityProperties.filter(uri, THUMBNAIL, null);
while (thumbnails.hasNext()) {
- Resource object = thumbnails.next().getObject();
- if (object instanceof UriRef) {
- return ((UriRef) object).getUnicodeString();
+ RDFTerm object = thumbnails.next().getObject();
+ if (object instanceof IRI) {
+ return ((IRI) object).getUnicodeString();
}
}
//if no dbpedia ontology thumbnail was found. try the same with foaf:depiction
thumbnails = entityProperties.filter(uri, DEPICTION, null);
while (thumbnails.hasNext()) {
- Resource object = thumbnails.next().getObject();
- if (object instanceof UriRef) {
- return ((UriRef) object).getUnicodeString();
+ RDFTerm object = thumbnails.next().getObject();
+ if (object instanceof IRI) {
+ return ((IRI) object).getUnicodeString();
}
}
return getMissingThumbnailSrc();
@@ -826,9 +825,9 @@ public class ContentItemResource extends
public String getSummary() {
Iterator<Triple> abstracts = entityProperties.filter(uri, SUMMARY, null);
while (abstracts.hasNext()) {
- Resource object = abstracts.next().getObject();
- if (object instanceof PlainLiteral) {
- PlainLiteral abstract_ = (PlainLiteral) object;
+ RDFTerm object = abstracts.next().getObject();
+ if (object instanceof Literal) {
+ Literal abstract_ = (Literal) object;
if (new Language("en").equals(abstract_.getLanguage())) {
return abstract_.getLexicalForm();
}
@@ -869,15 +868,15 @@ public class ContentItemResource extends
* @return an RDF/JSON descriptions of places for the word map widget
*/
public String getPlacesAsJSON() throws ParseException, UnsupportedEncodingException {
- MGraph g = new IndexedMGraph();
+ Graph g = new IndexedGraph();
LiteralFactory lf = LiteralFactory.getInstance();
- MGraph metadata = contentItem.getMetadata();
+ Graph metadata = contentItem.getMetadata();
for (EntityExtractionSummary p : getPlaceOccurrences()) {
EntitySuggestion bestGuess = p.getBestGuess();
if (bestGuess == null) {
continue;
}
- UriRef uri = new UriRef(bestGuess.getUri());
+ IRI uri = new IRI(bestGuess.getUri());
Iterator<Triple> latitudes = metadata.filter(uri, GEO_LAT, null);
if (latitudes.hasNext()) {
g.add(latitudes.next());
Modified: stanbol/trunk/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/resource/EnhancementEnginesRootResource.java
URL: http://svn.apache.org/viewvc/stanbol/trunk/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/resource/EnhancementEnginesRootResource.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- stanbol/trunk/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/resource/EnhancementEnginesRootResource.java (original)
+++ stanbol/trunk/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/resource/EnhancementEnginesRootResource.java Tue May 17 22:20:49 2016
@@ -50,8 +50,8 @@ import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.ResponseBuilder;
import javax.ws.rs.core.UriInfo;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;
import org.apache.felix.scr.annotations.Activate;
import org.apache.felix.scr.annotations.Component;
import org.apache.felix.scr.annotations.Deactivate;
@@ -191,7 +191,7 @@ public class EnhancementEnginesRootResou
@Produces(value={JSON_LD, APPLICATION_JSON,N3,N_TRIPLE,RDF_JSON,RDF_XML,TURTLE,X_TURTLE})
public Response getEngines(@Context HttpHeaders headers){
String rootUrl = uriInfo.getBaseUriBuilder().path(getRootUrl()).build().toString();
- MGraph graph = new SimpleMGraph();
+ Graph graph = new SimpleGraph();
addActiveEngines(getActiveEngines(), graph, rootUrl);
ResponseBuilder res = Response.ok(graph);
// addCORSOrigin(servletContext,res, headers);
Modified: stanbol/trunk/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/resource/EnhancerRootResource.java
URL: http://svn.apache.org/viewvc/stanbol/trunk/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/resource/EnhancerRootResource.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- stanbol/trunk/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/resource/EnhancerRootResource.java (original)
+++ stanbol/trunk/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/resource/EnhancerRootResource.java Tue May 17 22:20:49 2016
@@ -47,10 +47,10 @@ import javax.ws.rs.core.Response.Respons
import javax.ws.rs.core.Response.Status;
import javax.ws.rs.core.UriInfo;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
import org.apache.clerezza.rdf.core.serializedform.Serializer;
import org.apache.clerezza.rdf.core.sparql.ParseException;
import org.apache.clerezza.rdf.core.sparql.QueryEngine;
@@ -62,6 +62,7 @@ import org.apache.clerezza.rdf.ontologie
import org.apache.felix.scr.annotations.Component;
import org.apache.felix.scr.annotations.Property;
import org.apache.felix.scr.annotations.Reference;
+import org.apache.felix.scr.annotations.ReferenceCardinality;
import org.apache.felix.scr.annotations.Service;
import org.apache.stanbol.enhancer.servicesapi.rdf.Enhancer;
import org.apache.stanbol.commons.viewable.Viewable;
@@ -95,7 +96,7 @@ public final class EnhancerRootResource
private ContentItemFactory ciFactory;
@Reference
private Serializer serializer;
- @Reference
+ @Reference(cardinality = ReferenceCardinality.OPTIONAL_UNARY)
private QueryEngine queryEngine;
@Path("")
@@ -123,7 +124,7 @@ public final class EnhancerRootResource
@GET
@Produces(value = {JSON_LD, APPLICATION_JSON, N3, N_TRIPLE, RDF_JSON, RDF_XML, TURTLE, X_TURTLE})
public Response getEngines(@Context HttpHeaders headers) {
- MGraph graph = getEnhancerConfigGraph();
+ Graph graph = getEnhancerConfigGraph();
ResponseBuilder res = Response.ok(graph);
//addCORSOrigin(servletContext,res, headers);
return res.build();
@@ -134,10 +135,10 @@ public final class EnhancerRootResource
*
* @return the graph with the configuration
*/
- private MGraph getEnhancerConfigGraph() {
+ private Graph getEnhancerConfigGraph() {
String rootUrl = getUriInfo().getBaseUriBuilder().path(getRootUrl()).build().toString();
- UriRef enhancerResource = new UriRef(rootUrl + "enhancer");
- MGraph graph = new SimpleMGraph();
+ IRI enhancerResource = new IRI(rootUrl + "enhancer");
+ Graph graph = new SimpleGraph();
graph.add(new TripleImpl(enhancerResource, RDF.type, Enhancer.ENHANCER));
addActiveEngines(engineManager, graph, rootUrl);
addActiveChains(chainManager, graph, rootUrl);
Modified: stanbol/trunk/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/resource/GenericEnhancerUiResource.java
URL: http://svn.apache.org/viewvc/stanbol/trunk/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/resource/GenericEnhancerUiResource.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- stanbol/trunk/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/resource/GenericEnhancerUiResource.java (original)
+++ stanbol/trunk/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/resource/GenericEnhancerUiResource.java Tue May 17 22:20:49 2016
@@ -36,9 +36,9 @@ import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.ResponseBuilder;
import javax.ws.rs.core.UriInfo;
-import org.apache.clerezza.rdf.core.Graph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.TripleCollection;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.Graph;
import org.apache.clerezza.rdf.core.serializedform.Serializer;
import org.apache.clerezza.rdf.core.sparql.QueryEngine;
import org.apache.felix.scr.annotations.Component;
@@ -181,7 +181,7 @@ public class GenericEnhancerUiResource e
*/
public Set<ExecutionNode> getExecutionNodes() {
if (_executionNodes == null) {
- Graph ep;
+ ImmutableGraph ep;
try {
ep = chain.getExecutionPlan();
} catch (ChainException e) {
@@ -189,11 +189,11 @@ public class GenericEnhancerUiResource e
}
if (ep != null) {
_executionNodes = new LinkedHashSet<ExecutionNode>();
- Set<NonLiteral> processed = new HashSet<NonLiteral>();
- Set<NonLiteral> next;
+ Set<BlankNodeOrIRI> processed = new HashSet<BlankNodeOrIRI>();
+ Set<BlankNodeOrIRI> next;
do {
next = ExecutionPlanHelper.getExecutable(ep, processed);
- for (NonLiteral node : next) {
+ for (BlankNodeOrIRI node : next) {
_executionNodes.add(new ExecutionNode(ep, node));
}
processed.addAll(next);
@@ -236,12 +236,12 @@ public class GenericEnhancerUiResource e
}
public class ExecutionNode {
- private final NonLiteral node;
- private final TripleCollection ep;
+ private final BlankNodeOrIRI node;
+ private final Graph ep;
private final boolean optional;
private final String engineName;
- public ExecutionNode(TripleCollection executionPlan, NonLiteral node) {
+ public ExecutionNode(Graph executionPlan, BlankNodeOrIRI node) {
this.node = node;
this.ep = executionPlan;
this.optional = ExecutionPlanHelper.isOptional(ep, node);
Modified: stanbol/trunk/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/utils/EnhancerUtils.java
URL: http://svn.apache.org/viewvc/stanbol/trunk/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/utils/EnhancerUtils.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- stanbol/trunk/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/utils/EnhancerUtils.java (original)
+++ stanbol/trunk/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/utils/EnhancerUtils.java Tue May 17 22:20:49 2016
@@ -23,10 +23,10 @@ import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
import org.apache.clerezza.rdf.ontologies.RDF;
import org.apache.clerezza.rdf.ontologies.RDFS;
import org.apache.stanbol.commons.web.base.resource.BaseStanbolResource;
@@ -98,7 +98,7 @@ public final class EnhancerUtils {
* @param graph the RDF graph to add the triples
* @param rootUrl the root URL used by the current request
*/
- public static void addActiveEngines(EnhancementEngineManager engineManager,MGraph graph, String rootUrl) {
+ public static void addActiveEngines(EnhancementEngineManager engineManager,Graph graph, String rootUrl) {
addActiveEngines(buildEnginesMap(engineManager).values(), graph, rootUrl);
}
/**
@@ -114,11 +114,11 @@ public final class EnhancerUtils {
* @param rootUrl the root URL used by the current request
* @see EnhancerUtils#buildEnginesMap(EnhancementEngineManager)
*/
- public static void addActiveEngines(Iterable<Entry<ServiceReference,EnhancementEngine>> activeEngines,MGraph graph, String rootUrl) {
- UriRef enhancerResource = new UriRef(rootUrl+"enhancer");
+ public static void addActiveEngines(Iterable<Entry<ServiceReference,EnhancementEngine>> activeEngines,Graph graph, String rootUrl) {
+ IRI enhancerResource = new IRI(rootUrl+"enhancer");
graph.add(new TripleImpl(enhancerResource, RDF.type, Enhancer.ENHANCER));
for(Entry<ServiceReference,EnhancementEngine> entry : activeEngines){
- UriRef engineResource = new UriRef(rootUrl+"enhancer/engine/"+entry.getValue().getName());
+ IRI engineResource = new IRI(rootUrl+"enhancer/engine/"+entry.getValue().getName());
graph.add(new TripleImpl(enhancerResource, Enhancer.HAS_ENGINE, engineResource));
graph.add(new TripleImpl(engineResource, RDF.type, ENHANCEMENT_ENGINE));
graph.add(new TripleImpl(engineResource, RDFS.label, new PlainLiteralImpl(entry.getValue().getName())));
@@ -137,7 +137,7 @@ public final class EnhancerUtils {
* @param graph the RDF graph to add the triples
* @param rootUrl the root URL used by the current request
*/
- public static void addActiveChains(ChainManager chainManager, MGraph graph, String rootUrl) {
+ public static void addActiveChains(ChainManager chainManager, Graph graph, String rootUrl) {
addActiveChains(buildChainsMap(chainManager).values(), chainManager.getDefault(), graph, rootUrl);
}
/**
@@ -153,11 +153,11 @@ public final class EnhancerUtils {
* @param graph the RDF graph to add the triples
* @param rootUrl the root URL used by the current request
*/
- public static void addActiveChains(Iterable<Entry<ServiceReference,Chain>> activeChains, Chain defaultChain, MGraph graph, String rootUrl) {
- UriRef enhancer = new UriRef(rootUrl+"enhancer");
+ public static void addActiveChains(Iterable<Entry<ServiceReference,Chain>> activeChains, Chain defaultChain, Graph graph, String rootUrl) {
+ IRI enhancer = new IRI(rootUrl+"enhancer");
graph.add(new TripleImpl(enhancer, RDF.type, Enhancer.ENHANCER));
for(Entry<ServiceReference,Chain> entry : activeChains){
- UriRef chainResource = new UriRef(rootUrl+"enhancer/chain/"+entry.getValue().getName());
+ IRI chainResource = new IRI(rootUrl+"enhancer/chain/"+entry.getValue().getName());
graph.add(new TripleImpl(enhancer, Enhancer.HAS_CHAIN, chainResource));
if(entry.getValue().equals(defaultChain)){
graph.add(new TripleImpl(enhancer, Enhancer.HAS_DEFAULT_CHAIN, chainResource));
Modified: stanbol/trunk/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/utils/RequestPropertiesHelper.java
URL: http://svn.apache.org/viewvc/stanbol/trunk/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/utils/RequestPropertiesHelper.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- stanbol/trunk/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/utils/RequestPropertiesHelper.java (original)
+++ stanbol/trunk/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/utils/RequestPropertiesHelper.java Tue May 17 22:20:49 2016
@@ -23,8 +23,8 @@ import java.util.Set;
import javax.ws.rs.QueryParam;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
import org.apache.stanbol.enhancer.servicesapi.Blob;
import org.apache.stanbol.enhancer.servicesapi.ContentItem;
import org.apache.stanbol.enhancer.servicesapi.helper.ContentItemHelper;
@@ -43,7 +43,7 @@ public final class RequestPropertiesHelp
/**
* @see ContentItemHelper#REQUEST_PROPERTIES_URI
*/
- public static final UriRef REQUEST_PROPERTIES_URI =
+ public static final IRI REQUEST_PROPERTIES_URI =
ContentItemHelper.REQUEST_PROPERTIES_URI;
/**
* Boolean switch parsed as {@link QueryParam} tha allows to deactivate the
@@ -52,8 +52,8 @@ public final class RequestPropertiesHelp
public static final String OMIT_METADATA = "stanbol.enhancer.web.omitMetadata";
/**
* {@link Set Set<String>} containing all the URIs of the
- * {@link ContentItem#getPart(UriRef, Class) ContentParts} representing
- * RDF data (compatible to Clerezza {@link TripleCollection}). If the
+ * {@link ContentItem#getPart(IRI, Class) ContentParts} representing
+ * RDF data (compatible to Clerezza {@link Graph}). If the
* returned set contains '*' than all such content parts need to be returned.<p>
* NOTE: This can also be used to include the Request Properties
* as "applciation/json" in the Response by adding this
@@ -74,7 +74,7 @@ public final class RequestPropertiesHelp
public static final String OUTPUT_CONTENT = "stanbol.enhancer.web.outputContent";
/**
* This allows to copy the {@link ExecutionMetadata} and {@link ExecutionPlan}
- * data stored in a {@link ContentItem#getPart(UriRef, Class) contentPart} with
+ * data stored in a {@link ContentItem#getPart(IRI, Class) contentPart} with
* the URI {@link ExecutionMetadata#CHAIN_EXECUTION} over to the
* {@link ContentItem#getMetadata() metadata} of the content item.<p>
* This feature is intended to allow users to retrieve such meta information
@@ -88,7 +88,7 @@ public final class RequestPropertiesHelp
*/
public static final String RDF_FORMAT = "stanbol.enhancer.web.rdfFormat";
/**
- * {@link Set Set<String>} containing all the {@link UriRef}s of
+ * {@link Set Set<String>} containing all the {@link IRI}s of
* {@link ContentItem#getPart(int, Class) ContentItem.getPart}(uri,{@link Blob})
* that where parsed with the request.
*/
Modified: stanbol/trunk/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/writers/ContentItemWriter.java
URL: http://svn.apache.org/viewvc/stanbol/trunk/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/writers/ContentItemWriter.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- stanbol/trunk/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/writers/ContentItemWriter.java (original)
+++ stanbol/trunk/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/writers/ContentItemWriter.java Tue May 17 22:20:49 2016
@@ -59,8 +59,8 @@ import javax.ws.rs.core.Response;
import javax.ws.rs.ext.MessageBodyWriter;
import javax.ws.rs.ext.Provider;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
import org.apache.clerezza.rdf.core.serializedform.Serializer;
import org.apache.clerezza.rdf.core.serializedform.UnsupportedSerializationFormatException;
import org.apache.commons.io.IOUtils;
@@ -197,7 +197,7 @@ public class ContentItemWriter implement
+ mediaType.toString(),Response.Status.NOT_ACCEPTABLE);
}
} else { // (2) return a single content part
- Entry<UriRef,Blob> contentPart = getBlob(ci, Collections.singleton(mediaType.toString()));
+ Entry<IRI,Blob> contentPart = getBlob(ci, Collections.singleton(mediaType.toString()));
if(contentPart == null){ //no alternate content with the requeste media type
throw new WebApplicationException("The requested enhancement chain has not created an "
+ "version of the parsed content in the reuqest media type "
@@ -267,11 +267,11 @@ public class ContentItemWriter implement
}
//(3) serialising the Content (Bloby)
//(3.a) Filter based on parameter
- List<Entry<UriRef,Blob>> includedBlobs = filterBlobs(ci, reqProp);
+ List<Entry<IRI,Blob>> includedBlobs = filterBlobs(ci, reqProp);
//(3.b) Serialise the filtered
if(!includedBlobs.isEmpty()) {
Map<String,ContentBody> contentParts = new LinkedHashMap<String,ContentBody>();
- for(Entry<UriRef,Blob> entry : includedBlobs){
+ for(Entry<IRI,Blob> entry : includedBlobs){
Blob blob = entry.getValue();
ContentType ct = ContentType.create(blob.getMimeType());
String cs = blob.getParameter().get("charset");
@@ -304,7 +304,7 @@ public class ContentItemWriter implement
ContentType.APPLICATION_JSON.withCharset(UTF8));
}
//(5) additional RDF metadata stored in contentParts
- for(Entry<UriRef,TripleCollection> entry : getContentParts(ci, TripleCollection.class).entrySet()){
+ for(Entry<IRI,Graph> entry : getContentParts(ci, Graph.class).entrySet()){
if(includeContentParts.isEmpty() || includeContentParts.contains(
entry.getKey())){
entityBuilder.addPart(entry.getKey().getUnicodeString(),
@@ -372,16 +372,16 @@ public class ContentItemWriter implement
* @param properties
* @return
*/
- private List<Entry<UriRef,Blob>> filterBlobs(ContentItem ci, Map<String,Object> properties) {
- final List<Entry<UriRef,Blob>> includedContentPartList;
+ private List<Entry<IRI,Blob>> filterBlobs(ContentItem ci, Map<String,Object> properties) {
+ final List<Entry<IRI,Blob>> includedContentPartList;
Set<MediaType> includeMediaTypes = getIncludedMediaTypes(properties);
if(includeMediaTypes == null){
includedContentPartList = Collections.emptyList();
} else {
- includedContentPartList = new ArrayList<Map.Entry<UriRef,Blob>>();
+ includedContentPartList = new ArrayList<Map.Entry<IRI,Blob>>();
Set<String> ignoreContentPartUris = getIgnoredContentURIs(properties);
nextContentPartEntry:
- for(Entry<UriRef,Blob> entry : getContentParts(ci,Blob.class).entrySet()){
+ for(Entry<IRI,Blob> entry : getContentParts(ci,Blob.class).entrySet()){
if(!ignoreContentPartUris.contains(entry.getKey().getUnicodeString())){
Blob blob = entry.getValue();
MediaType blobMediaType = MediaType.valueOf(blob.getMimeType());
@@ -505,11 +505,11 @@ public class ContentItemWriter implement
*/
private class ClerezzaContentBody extends AbstractContentBody implements ContentBody,ContentDescriptor {
- private TripleCollection graph;
+ private Graph graph;
private String charset;
private String name;
- protected ClerezzaContentBody(String name, TripleCollection graph, MediaType mimeType){
+ protected ClerezzaContentBody(String name, Graph graph, MediaType mimeType){
super(ContentType.create(new StringBuilder(mimeType.getType())
.append('/').append(mimeType.getSubtype()).toString(), UTF8));
charset = mimeType.getParameters().get("charset");
Modified: stanbol/trunk/enhancer/jersey/src/test/java/org/apache/stanbol/enhancer/jersey/ContentItemReaderWriterTest.java
URL: http://svn.apache.org/viewvc/stanbol/trunk/enhancer/jersey/src/test/java/org/apache/stanbol/enhancer/jersey/ContentItemReaderWriterTest.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- stanbol/trunk/enhancer/jersey/src/test/java/org/apache/stanbol/enhancer/jersey/ContentItemReaderWriterTest.java (original)
+++ stanbol/trunk/enhancer/jersey/src/test/java/org/apache/stanbol/enhancer/jersey/ContentItemReaderWriterTest.java Tue May 17 22:20:49 2016
@@ -51,11 +51,11 @@ import javax.ws.rs.core.MultivaluedHashM
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.ext.RuntimeDelegate;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
import org.apache.clerezza.rdf.core.serializedform.Parser;
import org.apache.clerezza.rdf.core.serializedform.Serializer;
import org.apache.clerezza.rdf.ontologies.RDF;
@@ -91,7 +91,7 @@ public class ContentItemReaderWriterTest
*/
@BeforeClass
public static void createTestContentItem() throws IOException {
- contentItem = ciFactory.createContentItem(new UriRef("urn:test"),
+ contentItem = ciFactory.createContentItem(new IRI("urn:test"),
new StringSource(
"<html>\n" +
" <body>\n" +
@@ -99,11 +99,11 @@ public class ContentItemReaderWriterTest
" </body>\n" +
"</html>","text/html"));
RuntimeDelegate.setInstance(new RuntimeDelegateImpl());
- contentItem.addPart(new UriRef("run:text:text"),
+ contentItem.addPart(new IRI("run:text:text"),
ciFactory.createBlob(new StringSource(
"This is a ContentItem to Mime Multipart test!")));
contentItem.getMetadata().add(new TripleImpl(
- new UriRef("urn:test"), RDF.type, new UriRef("urn:types:Document")));
+ new IRI("urn:test"), RDF.type, new IRI("urn:types:Document")));
//mark the main content as parsed and also that all
//contents and contentparts should be included
Map<String,Object> properties = initRequestPropertiesContentPart(contentItem);
@@ -111,8 +111,8 @@ public class ContentItemReaderWriterTest
properties.put(OUTPUT_CONTENT, Collections.singleton("*/*"));
properties.put(OUTPUT_CONTENT_PART, Collections.singleton("*"));
properties.put(RDF_FORMAT, "application/rdf+xml");
- MGraph em = initExecutionMetadataContentPart(contentItem);
- NonLiteral ep = createExecutionPlan(em, "testChain",null);
+ Graph em = initExecutionMetadataContentPart(contentItem);
+ BlankNodeOrIRI ep = createExecutionPlan(em, "testChain",null);
writeExecutionNode(em, ep, "testEngine", true, null,null);
initExecutionMetadata(em, em, contentItem.getUri(), "testChain", false);
@@ -201,7 +201,7 @@ public class ContentItemReaderWriterTest
//assert ID
assertEquals(contentItem.getUri(), ci.getUri());
//assert metadata
- MGraph copy = new SimpleMGraph();
+ Graph copy = new SimpleGraph();
copy.addAll(contentItem.getMetadata());
assertTrue(copy.removeAll(ci.getMetadata()));
assertTrue(copy.isEmpty());
@@ -210,12 +210,12 @@ public class ContentItemReaderWriterTest
String content = IOUtils.toString(contentItem.getStream(),"UTF-8");
String readContent = IOUtils.toString(ci.getStream(), "UTF-8");
assertEquals(content, readContent);
- Iterator<Entry<UriRef,Blob>> contentItemBlobsIt = ContentItemHelper.getContentParts(contentItem, Blob.class).entrySet().iterator();
- Iterator<Entry<UriRef,Blob>> ciBlobsIt = ContentItemHelper.getContentParts(ci, Blob.class).entrySet().iterator();
+ Iterator<Entry<IRI,Blob>> contentItemBlobsIt = ContentItemHelper.getContentParts(contentItem, Blob.class).entrySet().iterator();
+ Iterator<Entry<IRI,Blob>> ciBlobsIt = ContentItemHelper.getContentParts(ci, Blob.class).entrySet().iterator();
Set<String> expectedParsedContentIds = new HashSet<String>(); //later used to validate enhancementMetadata
while(contentItemBlobsIt.hasNext() && ciBlobsIt.hasNext()){
- Entry<UriRef,Blob> contentItemBlobPart = contentItemBlobsIt.next();
- Entry<UriRef,Blob> ciBlobPart = ciBlobsIt.next();
+ Entry<IRI,Blob> contentItemBlobPart = contentItemBlobsIt.next();
+ Entry<IRI,Blob> ciBlobPart = ciBlobsIt.next();
expectedParsedContentIds.add(ciBlobPart.getKey().getUnicodeString());
assertEquals(contentItemBlobPart.getKey(), ciBlobPart.getKey());
String partContentType = contentItemBlobPart.getValue().getMimeType();
@@ -226,8 +226,8 @@ public class ContentItemReaderWriterTest
assertEquals(partContent, readPartContent);
}
//validate ExecutionMetadata
- MGraph executionMetadata = contentItem.getPart(ExecutionMetadata.CHAIN_EXECUTION, MGraph.class);
- MGraph readExecutionMetadata = ci.getPart(ExecutionMetadata.CHAIN_EXECUTION, MGraph.class);
+ Graph executionMetadata = contentItem.getPart(ExecutionMetadata.CHAIN_EXECUTION, Graph.class);
+ Graph readExecutionMetadata = ci.getPart(ExecutionMetadata.CHAIN_EXECUTION, Graph.class);
assertNotNull(executionMetadata);
assertNotNull(readExecutionMetadata);
assertEquals(executionMetadata.size(), readExecutionMetadata.size());
Modified: stanbol/trunk/enhancer/jobmanager/event/src/main/java/org/apache/stanbol/enhancer/jobmanager/event/Constants.java
URL: http://svn.apache.org/viewvc/stanbol/trunk/enhancer/jobmanager/event/src/main/java/org/apache/stanbol/enhancer/jobmanager/event/Constants.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- stanbol/trunk/enhancer/jobmanager/event/src/main/java/org/apache/stanbol/enhancer/jobmanager/event/Constants.java (original)
+++ stanbol/trunk/enhancer/jobmanager/event/src/main/java/org/apache/stanbol/enhancer/jobmanager/event/Constants.java Tue May 17 22:20:49 2016
@@ -16,7 +16,7 @@
*/
package org.apache.stanbol.enhancer.jobmanager.event;
-import org.apache.clerezza.rdf.core.NonLiteral;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
import org.apache.stanbol.enhancer.jobmanager.event.impl.EnhancementJob;
import org.apache.stanbol.enhancer.servicesapi.rdf.ExecutionMetadata;
import org.osgi.service.event.Event;
@@ -41,7 +41,7 @@ public interface Constants {
*/
String PROPERTY_JOB_MANAGER = "stanbol.enhancer.jobmanager.event.job";
/**
- * Property used to provide the {@link NonLiteral} describing the
+ * Property used to provide the {@link BlankNodeOrIRI} describing the
* {@link ExecutionMetadata#EXECUTION} instance
*/
String PROPERTY_EXECUTION = "stanbol.enhancer.jobmanager.event.execution";
Modified: stanbol/trunk/enhancer/jobmanager/event/src/main/java/org/apache/stanbol/enhancer/jobmanager/event/impl/EnhancementJob.java
URL: http://svn.apache.org/viewvc/stanbol/trunk/enhancer/jobmanager/event/src/main/java/org/apache/stanbol/enhancer/jobmanager/event/impl/EnhancementJob.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- stanbol/trunk/enhancer/jobmanager/event/src/main/java/org/apache/stanbol/enhancer/jobmanager/event/impl/EnhancementJob.java (original)
+++ stanbol/trunk/enhancer/jobmanager/event/src/main/java/org/apache/stanbol/enhancer/jobmanager/event/impl/EnhancementJob.java Tue May 17 22:20:49 2016
@@ -43,10 +43,10 @@ import java.util.Set;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReadWriteLock;
-import org.apache.clerezza.rdf.core.Graph;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.IRI;
import org.apache.commons.collections.BidiMap;
import org.apache.commons.collections.bidimap.DualHashBidiMap;
import org.apache.stanbol.enhancer.servicesapi.Chain;
@@ -88,13 +88,13 @@ public class EnhancementJob {
/**
* The read only executionPlan
*/
- private final Graph executionPlan;
+ private final ImmutableGraph executionPlan;
/**
* The read/write able execution metadata. Also accessible via
- * {@link ContentItem#getPart(UriRef, Class)} with the URI
+ * {@link ContentItem#getPart(IRI, Class)} with the URI
* {@link ExecutionMetadata#CHAIN_EXECUTION}
*/
- private final MGraph executionMetadata;
+ private final Graph executionMetadata;
/**
* Map with the em:Execution nodes of the em:ChainExecution for this
* ContentItem. Values are are ep:ExecutionNodes of the ep:ExecutionPlan
@@ -103,11 +103,11 @@ public class EnhancementJob {
/**
* The em:ChainExecution for this {@link ContentItem}
*/
- private final NonLiteral chainExecutionNode;
+ private final BlankNodeOrIRI chainExecutionNode;
/**
* The ep:ExecutionPlan for this {@link ContentItem}
*/
- private final NonLiteral executionPlanNode;
+ private final BlankNodeOrIRI executionPlanNode;
/**
* The name of the {@link Chain} used to enhance this {@link ContentItem}.
*/
@@ -121,32 +121,32 @@ public class EnhancementJob {
* The completed ep:ExecutionPlan nodes. <p>
* NOTE: This contains ep:ExecutionNodes and NOT em:Exetution instances!
*/
- private final Set<NonLiteral> completed = new HashSet<NonLiteral>();
+ private final Set<BlankNodeOrIRI> completed = new HashSet<BlankNodeOrIRI>();
/**
* Unmodifiable and final set of completed executables. Replaced by a new
* instance every time {@link #completed} changes
*/
- private Set<NonLiteral> completedExec = Collections.emptySet();
+ private Set<BlankNodeOrIRI> completedExec = Collections.emptySet();
/**
* The running ep:ExecutionPlan nodes <p>
* NOTE: This contains ep:ExecutionNodes and NOT em:Exetution instances!
*/
- private final Set<NonLiteral> running = new HashSet<NonLiteral>();
+ private final Set<BlankNodeOrIRI> running = new HashSet<BlankNodeOrIRI>();
/**
* Unmodifiable and final set of running executables. Replaced by a new
* instance every time {@link #running} changes.
*/
- private Set<NonLiteral> runningExec = Collections.emptySet();
+ private Set<BlankNodeOrIRI> runningExec = Collections.emptySet();
/**
* Unmodifiable and final set of executable em:Execution nodes.
* Replaced by a new instance every time {@link #running} or
* {@link #completed} changes.
*/
- private Set<NonLiteral> executable;
+ private Set<BlankNodeOrIRI> executable;
/**
* Used to store any {@link Exception} parsed with the call to
- * {@link #setFailed(NonLiteral, EnhancementEngine, Exception)} causing the
+ * {@link #setFailed(BlankNodeOrIRI, EnhancementEngine, Exception)} causing the
* enhancement process to fail. This Exception is typically re-thrown by the
* {@link EnhancementJobManager#enhanceContent(ContentItem, Chain)} method.
* @see #getError()
@@ -162,7 +162,7 @@ public class EnhancementJob {
* @param executionPlan
* @param isDefaultChain
*/
- public EnhancementJob(ContentItem contentItem, String chainName, Graph executionPlan, boolean isDefaultChain) {
+ public EnhancementJob(ContentItem contentItem, String chainName, ImmutableGraph executionPlan, boolean isDefaultChain) {
if (contentItem == null || chainName == null || executionPlan == null) {
throw new IllegalArgumentException("The parsed contentItem and executionPlan MUST NOT be NULL");
}
@@ -191,7 +191,7 @@ public class EnhancementJob {
* Creates an EnhancemenJob based on already existing execution metadata present
* for a ContentItem.
* @param contentItem the ContentItem with an already existing content part
- * containing an {@link MGraph} with all required execution metadata and the
+ * containing an {@link Graph} with all required execution metadata and the
* execution plan.
* @throws IllegalArgumentException if the parsed {@link ContentItem} does
* not provide the required data to (re)initialise the EnhancementJob.
@@ -204,13 +204,13 @@ public class EnhancementJob {
this.readLock = contentItem.getLock().readLock();
this.writeLock = contentItem.getLock().writeLock();
try {
- contentItem.getPart(ExecutionMetadata.CHAIN_EXECUTION, MGraph.class);
+ contentItem.getPart(ExecutionMetadata.CHAIN_EXECUTION, Graph.class);
} catch (NoSuchPartException e) {
throw new IllegalArgumentException("Cannot (re)initialise an EnhancementJob" +
"without existing execution metadata content part!",e);
}
executionMetadata = initExecutionMetadataContentPart(contentItem);
- this.executionPlan = executionMetadata.getGraph();
+ this.executionPlan = executionMetadata.getImmutableGraph();
chainExecutionNode = getChainExecution(executionMetadata, contentItem.getUri());
if(chainExecutionNode == null){
throw new IllegalArgumentException("Cannot (re)initialise an EnhancementJob" +
@@ -231,10 +231,10 @@ public class EnhancementJob {
"enhance ContentItem '"+contentItem.getUri()+"'!");
}
//the executionPlan is part of the execution metadata
- Map<NonLiteral,NonLiteral> executionsMap = initExecutionMetadata(executionMetadata,
+ Map<BlankNodeOrIRI,BlankNodeOrIRI> executionsMap = initExecutionMetadata(executionMetadata,
executionPlan, contentItem.getUri(), null, null);
- for(Entry<NonLiteral,NonLiteral> executionEntry : executionsMap.entrySet()){
- UriRef status = getReference(executionMetadata, executionEntry.getKey(), STATUS);
+ for(Entry<BlankNodeOrIRI,BlankNodeOrIRI> executionEntry : executionsMap.entrySet()){
+ IRI status = getReference(executionMetadata, executionEntry.getKey(), STATUS);
if(status == null){
throw new IllegalArgumentException("The ex:Execution '"
+ executionEntry.getKey()+"' of the ex:ChainExecution for ContentItme '"
@@ -260,8 +260,8 @@ public class EnhancementJob {
* @throws IllegalArgumentException if the parsed em:Execution is not
* part of the execution metadata of this enhancement job
*/
- public NonLiteral getExecutionNode(NonLiteral execution){
- NonLiteral node = (NonLiteral)executionsMap.get(execution);
+ public BlankNodeOrIRI getExecutionNode(BlankNodeOrIRI execution){
+ BlankNodeOrIRI node = (BlankNodeOrIRI)executionsMap.get(execution);
if(node == null){
throw new IllegalArgumentException("Unknown sp:ExecutionNode instance "+node);
}
@@ -273,8 +273,8 @@ public class EnhancementJob {
* @throws IllegalArgumentException if the parsed ep:ExecutionNode is not
* part of the execution plan of this enhancement job
*/
- public NonLiteral getExecution(NonLiteral executionNode){
- NonLiteral execution = (NonLiteral)executionsMap.getKey(executionNode);
+ public BlankNodeOrIRI getExecution(BlankNodeOrIRI executionNode){
+ BlankNodeOrIRI execution = (BlankNodeOrIRI)executionsMap.getKey(executionNode);
if(execution == null){
throw new IllegalArgumentException("Unknown em:Execution instance "+executionNode);
}
@@ -286,7 +286,7 @@ public class EnhancementJob {
*
* @return the executionPlan
*/
- public final Graph getExecutionPlan() {
+ public final ImmutableGraph getExecutionPlan() {
return executionPlan;
}
@@ -329,7 +329,7 @@ public class EnhancementJob {
*
* @return the currently running executions.
*/
- public Set<NonLiteral> getRunning() {
+ public Set<BlankNodeOrIRI> getRunning() {
log.trace("++ r: {}","getRunning");
readLock.lock();
try {
@@ -346,7 +346,7 @@ public class EnhancementJob {
*
* @return the completed execution nodes
*/
- public Set<NonLiteral> getCompleted() {
+ public Set<BlankNodeOrIRI> getCompleted() {
log.trace("++ r: {}","getCompleted");
readLock.lock();
try {
@@ -371,12 +371,12 @@ public class EnhancementJob {
* if the parsed execution node can not be marked as completed because some of its
* depended nodes are not yet marked as completed.
*/
- public void setCompleted(NonLiteral execution) {
+ public void setCompleted(BlankNodeOrIRI execution) {
if(execution == null) {
throw new IllegalArgumentException("The parsed em:Execution instance MUST NOT be NULL!");
}
writeLock.lock();
- NonLiteral executionNode = getExecutionNode(execution);
+ BlankNodeOrIRI executionNode = getExecutionNode(execution);
log.trace("++ w: {}: {}","setCompleted",getEngine(executionPlan, executionNode));
try {
log.trace(">> w: {}: {}","setCompleted",getEngine(executionPlan, executionNode));
@@ -390,16 +390,16 @@ public class EnhancementJob {
/**
* Internally used to update the state kept in {@link #completed} and
* {@link #running} and {@link #executable} after an execution was set to
- * {@link #setCompleted(NonLiteral) completed} or
- * {@link #setFailed(NonLiteral, EnhancementEngine, Exception) failed}.<p>
+ * {@link #setCompleted(BlankNodeOrIRI) completed} or
+ * {@link #setFailed(BlankNodeOrIRI, EnhancementEngine, Exception) failed}.<p>
* This method expects to be called within an active {@link #writeLock}.
* @param executionNode the ep:ExecutionNode linked to the em:Execution that
* finished.
*/
- private void setNodeCompleted(NonLiteral executionNode) {
+ private void setNodeCompleted(BlankNodeOrIRI executionNode) {
String engine = getEngine(executionPlan, executionNode);
boolean optional = isOptional(executionPlan, executionNode);
- Set<NonLiteral> dependsOn = getDependend(executionPlan, executionNode);
+ Set<BlankNodeOrIRI> dependsOn = getDependend(executionPlan, executionNode);
if (completed.contains(executionNode)) {
log.warn("Execution of Engine '{}' for ContentItem {} already "
+ "marked as completed(chain: {}, node: {}, optional {})."
@@ -447,14 +447,14 @@ public class EnhancementJob {
* if the parsed execution node can not be marked as running because some of its depended
* nodes are not yet marked as completed.
*/
- public void setRunning(NonLiteral execution) {
+ public void setRunning(BlankNodeOrIRI execution) {
if(execution == null) {
throw new IllegalArgumentException("The parsed em:Execution instance MUST NOT be NULL!");
}
- NonLiteral executionNode = getExecutionNode(execution);
+ BlankNodeOrIRI executionNode = getExecutionNode(execution);
String engine = getEngine(executionPlan, executionNode);
boolean optional = isOptional(executionPlan, executionNode);
- Set<NonLiteral> dependsOn = getDependend(executionPlan, executionNode);
+ Set<BlankNodeOrIRI> dependsOn = getDependend(executionPlan, executionNode);
log.trace("++ w: {}: {}","setRunning",ExecutionPlanHelper.getEngine(executionPlan, executionNode));
writeLock.lock();
try {
@@ -510,8 +510,8 @@ public class EnhancementJob {
* updates the {@link #runningExec} based on {@link #running}
*/
private void updateRunningExec() {
- Set<NonLiteral> runningExec = new HashSet<NonLiteral>(running.size());
- for(NonLiteral node : running){
+ Set<BlankNodeOrIRI> runningExec = new HashSet<BlankNodeOrIRI>(running.size());
+ for(BlankNodeOrIRI node : running){
runningExec.add(getExecution(node));
}
this.runningExec = Collections.unmodifiableSet(runningExec);
@@ -520,8 +520,8 @@ public class EnhancementJob {
* updates the {@link #runningExec} based on {@link #running}
*/
private void updateCompletedExec() {
- Set<NonLiteral> completedExec = new HashSet<NonLiteral>(completed.size());
- for(NonLiteral node : completed){
+ Set<BlankNodeOrIRI> completedExec = new HashSet<BlankNodeOrIRI>(completed.size());
+ for(BlankNodeOrIRI node : completed){
completedExec.add(getExecution(node));
}
this.completedExec = Collections.unmodifiableSet(completedExec);
@@ -531,7 +531,7 @@ public class EnhancementJob {
* Assumed to be called within a write lock!
*/
private void checkExecutable(){
- Set<NonLiteral> executeableNodes =
+ Set<BlankNodeOrIRI> executeableNodes =
ExecutionPlanHelper.getExecutable(executionPlan, completed);
//a Chain finishes if no engine is running and no more nodes are executable
if(!ExecutionMetadata.STATUS_FAILED.equals(
@@ -539,7 +539,7 @@ public class EnhancementJob {
executeableNodes.removeAll(running);
if(log.isDebugEnabled()){
Collection<String> engines = new ArrayList<String>(executeableNodes.size());
- for(NonLiteral node : executeableNodes){
+ for(BlankNodeOrIRI node : executeableNodes){
engines.add(getEngine(executionPlan, node));
}
log.trace("MARK {} as executeable",engines);
@@ -550,8 +550,8 @@ public class EnhancementJob {
} else if( executeableNodes.size() == 1){
this.executable = Collections.singleton(getExecution(executeableNodes.iterator().next()));
} else {
- Set<NonLiteral> executable = new HashSet<NonLiteral>(executeableNodes.size());
- for(NonLiteral exeutableNode : executeableNodes){
+ Set<BlankNodeOrIRI> executable = new HashSet<BlankNodeOrIRI>(executeableNodes.size());
+ for(BlankNodeOrIRI exeutableNode : executeableNodes){
executable.add(getExecution(exeutableNode));
}
this.executable = Collections.unmodifiableSet(executable);
@@ -570,7 +570,7 @@ public class EnhancementJob {
* @return the nodes that can be executed next based on the completed and
* currently running engines.
*/
- public Set<NonLiteral> getExecutable(){
+ public Set<BlankNodeOrIRI> getExecutable(){
log.trace("++ r: {}","getExecutable");
readLock.lock();
log.trace(">> r: {}","getExecutable");
@@ -598,11 +598,11 @@ public class EnhancementJob {
}
}
- public void setFailed(NonLiteral execution, EnhancementEngine engine, Exception exception) {
+ public void setFailed(BlankNodeOrIRI execution, EnhancementEngine engine, Exception exception) {
if(execution == null) {
throw new IllegalArgumentException("The parsed em:Execution instance MUST NOT be NULL!");
}
- NonLiteral executionNode = getExecutionNode(execution);
+ BlankNodeOrIRI executionNode = getExecutionNode(execution);
final boolean optional = isOptional(executionPlan, executionNode);
final String engineName = getEngine(executionPlan, executionNode);
log.trace("++ w: {}: {}","setFailed",ExecutionPlanHelper.getEngine(executionPlan, executionNode));
@@ -696,7 +696,7 @@ public class EnhancementJob {
* Getter for the ExecutionMetadata.
* @return the execution metadata.
*/
- public MGraph getExecutionMetadata() {
+ public Graph getExecutionMetadata() {
return executionMetadata;
}
/**
Modified: stanbol/trunk/enhancer/jobmanager/event/src/main/java/org/apache/stanbol/enhancer/jobmanager/event/impl/EnhancementJobHandler.java
URL: http://svn.apache.org/viewvc/stanbol/trunk/enhancer/jobmanager/event/src/main/java/org/apache/stanbol/enhancer/jobmanager/event/impl/EnhancementJobHandler.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- stanbol/trunk/enhancer/jobmanager/event/src/main/java/org/apache/stanbol/enhancer/jobmanager/event/impl/EnhancementJobHandler.java (original)
+++ stanbol/trunk/enhancer/jobmanager/event/src/main/java/org/apache/stanbol/enhancer/jobmanager/event/impl/EnhancementJobHandler.java Tue May 17 22:20:49 2016
@@ -35,9 +35,8 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
import org.apache.stanbol.enhancer.servicesapi.ContentItem;
import org.apache.stanbol.enhancer.servicesapi.EngineException;
import org.apache.stanbol.enhancer.servicesapi.EnhancementEngine;
@@ -189,7 +188,7 @@ public class EnhancementJobHandler imple
@Override
public void handleEvent(Event event) {
EnhancementJob job = (EnhancementJob)event.getProperty(PROPERTY_JOB_MANAGER);
- NonLiteral execution = (NonLiteral)event.getProperty(PROPERTY_EXECUTION);
+ BlankNodeOrIRI execution = (BlankNodeOrIRI)event.getProperty(PROPERTY_EXECUTION);
if(job == null || execution == null){
log.warn("Unable to process EnhancementEvent where EnhancementJob " +
"{} or Execution node {} is null -> ignore",job,execution);
@@ -222,7 +221,7 @@ public class EnhancementJobHandler imple
} else {
if(log.isInfoEnabled()){
Collection<String> running = new ArrayList<String>(3);
- for(NonLiteral runningNode : job.getRunning()){
+ for(BlankNodeOrIRI runningNode : job.getRunning()){
running.add(getEngine(job.getExecutionPlan(), job.getExecutionNode(runningNode)));
}
log.info("Job {} failed, but {} still running!",
@@ -238,7 +237,7 @@ public class EnhancementJobHandler imple
* @param job
* @param execution
*/
- private void processEvent(EnhancementJob job, NonLiteral execution) {
+ private void processEvent(EnhancementJob job, BlankNodeOrIRI execution) {
String engineName = getEngine(job.getExecutionPlan(),
job.getExecutionNode(execution));
//(1) execute the parsed ExecutionNode
@@ -340,7 +339,7 @@ public class EnhancementJobHandler imple
protected boolean executeNextNodes(EnhancementJob job) {
//getExecutable returns an snapshot so we do not need to lock
boolean startedExecution = false;
- for(NonLiteral executable : job.getExecutable()){
+ for(BlankNodeOrIRI executable : job.getExecutable()){
if(log.isTraceEnabled()){
log.trace("PREPARE execution of Engine {}",
getEngine(job.getExecutionPlan(), job.getExecutionNode(executable)));
@@ -425,15 +424,15 @@ public class EnhancementJobHandler imple
log.info(" content-item: {}", job.getContentItem().getUri());
if(logExecutions){
log.info(" executions:");
- for(NonLiteral completedExec : job.getCompleted()){
+ for(BlankNodeOrIRI completedExec : job.getCompleted()){
log.info(" - {} completed",getEngine(job.getExecutionMetadata(),
job.getExecutionNode(completedExec)));
}
- for(NonLiteral runningExec : job.getRunning()){
+ for(BlankNodeOrIRI runningExec : job.getRunning()){
log.info(" - {} running",getEngine(job.getExecutionMetadata(),
job.getExecutionNode(runningExec)));
}
- for(NonLiteral executeable : job.getExecutable()){
+ for(BlankNodeOrIRI executeable : job.getExecutable()){
log.info(" - {} executeable",getEngine(job.getExecutionMetadata(),
job.getExecutionNode(executeable)));
}
Modified: stanbol/trunk/enhancer/jobmanager/event/src/main/java/org/apache/stanbol/enhancer/jobmanager/event/impl/EventJobManagerImpl.java
URL: http://svn.apache.org/viewvc/stanbol/trunk/enhancer/jobmanager/event/src/main/java/org/apache/stanbol/enhancer/jobmanager/event/impl/EventJobManagerImpl.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- stanbol/trunk/enhancer/jobmanager/event/src/main/java/org/apache/stanbol/enhancer/jobmanager/event/impl/EventJobManagerImpl.java (original)
+++ stanbol/trunk/enhancer/jobmanager/event/src/main/java/org/apache/stanbol/enhancer/jobmanager/event/impl/EventJobManagerImpl.java Tue May 17 22:20:49 2016
@@ -26,8 +26,8 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map.Entry;
-import org.apache.clerezza.rdf.core.Graph;
-import org.apache.clerezza.rdf.core.Triple;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
+import org.apache.clerezza.commons.rdf.Triple;
import org.apache.clerezza.rdf.core.serializedform.Serializer;
import org.apache.clerezza.rdf.core.serializedform.SupportedFormat;
import org.apache.felix.scr.annotations.Activate;
@@ -270,7 +270,7 @@ public class EventJobManagerImpl impleme
throw new IllegalStateException("Currently no enhancement chain is " +
"active. Please configure a Chain or enable the default chain");
}
- Graph ep;
+ ImmutableGraph ep;
try {
ep = defaultChain.getExecutionPlan();
} catch (ChainException e) {
Modified: stanbol/trunk/enhancer/ldpath/src/main/java/org/apache/stanbol/enhancer/ldpath/EnhancerLDPath.java
URL: http://svn.apache.org/viewvc/stanbol/trunk/enhancer/ldpath/src/main/java/org/apache/stanbol/enhancer/ldpath/EnhancerLDPath.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- stanbol/trunk/enhancer/ldpath/src/main/java/org/apache/stanbol/enhancer/ldpath/EnhancerLDPath.java (original)
+++ stanbol/trunk/enhancer/ldpath/src/main/java/org/apache/stanbol/enhancer/ldpath/EnhancerLDPath.java Tue May 17 22:20:49 2016
@@ -26,7 +26,7 @@ import static org.apache.stanbol.enhance
import static org.apache.stanbol.enhancer.servicesapi.rdf.TechnicalClasses.ENHANCER_TEXTANNOTATION;
import static org.apache.stanbol.enhancer.servicesapi.rdf.TechnicalClasses.ENHANCER_TOPICANNOTATION;
-import org.apache.clerezza.rdf.core.Resource;
+import org.apache.clerezza.commons.rdf.RDFTerm;
import org.apache.marmotta.ldpath.api.functions.SelectorFunction;
import org.apache.marmotta.ldpath.api.selectors.NodeSelector;
import org.apache.marmotta.ldpath.model.Constants;
@@ -46,7 +46,7 @@ public final class EnhancerLDPath {
private EnhancerLDPath(){}
- private static Configuration<Resource> CONFIG;
+ private static Configuration<RDFTerm> CONFIG;
/**
* The LDPath configuration including the <ul>
@@ -55,9 +55,9 @@ public final class EnhancerLDPath {
* </ul>
* @return the LDPath configuration for the Stanbol Enhancer
*/
- public static final Configuration<Resource> getConfig(){
+ public static final Configuration<RDFTerm> getConfig(){
if(CONFIG == null){
- CONFIG = new DefaultConfiguration<Resource>();
+ CONFIG = new DefaultConfiguration<RDFTerm>();
//add the namespaces
for(NamespaceEnum ns : NamespaceEnum.values()){
CONFIG.addNamespace(ns.getPrefix(), ns.getNamespace());
@@ -65,7 +65,7 @@ public final class EnhancerLDPath {
//now add the functions
addFunction(CONFIG, new ContentFunction());
String path;
- NodeSelector<Resource> selector;
+ NodeSelector<RDFTerm> selector;
//TextAnnotations
path = String.format("^%s[%s is %s]",
ENHANCER_EXTRACTED_FROM,RDF_TYPE,ENHANCER_TEXTANNOTATION);
@@ -75,7 +75,7 @@ public final class EnhancerLDPath {
throw new IllegalStateException("Unable to parse the ld-path selector '" +
path + "'used to select all TextAnnotations of a contentItem!", e);
}
- addFunction(CONFIG, new PathFunction<Resource>(
+ addFunction(CONFIG, new PathFunction<RDFTerm>(
"textAnnotation",selector));
//EntityAnnotations
@@ -87,7 +87,7 @@ public final class EnhancerLDPath {
throw new IllegalStateException("Unable to parse the ld-path selector '" +
path + "'used to select all EntityAnnotations of a contentItem!", e);
}
- addFunction(CONFIG,new PathFunction<Resource>(
+ addFunction(CONFIG,new PathFunction<RDFTerm>(
"entityAnnotation", selector));
//TopicAnnotations
@@ -99,7 +99,7 @@ public final class EnhancerLDPath {
throw new IllegalStateException("Unable to parse the ld-path selector '" +
path + "'used to select all TopicAnnotations of a contentItem!", e);
}
- addFunction(CONFIG,new PathFunction<Resource>(
+ addFunction(CONFIG,new PathFunction<RDFTerm>(
"topicAnnotation",selector));
//Enhancements
path = String.format("^%s[%s is %s]",
@@ -110,13 +110,13 @@ public final class EnhancerLDPath {
throw new IllegalStateException("Unable to parse the ld-path selector '" +
path + "'used to select all Enhancements of a contentItem!", e);
}
- addFunction(CONFIG,new PathFunction<Resource>(
+ addFunction(CONFIG,new PathFunction<RDFTerm>(
"enhancement",selector));
//Suggested EntityAnnotations for Text/TopicAnnotations
//(1) to select the suggestions
- NodeSelector<Resource> linkedEntityAnnotations;
+ NodeSelector<RDFTerm> linkedEntityAnnotations;
path = String.format("^%s[%s is %s]",
DC_RELATION,RDF_TYPE,ENHANCER_ENTITYANNOTATION,ENHANCER_CONFIDENCE);
try {
@@ -126,7 +126,7 @@ public final class EnhancerLDPath {
path + "'used to select all entity suggestions for an Enhancement!", e);
}
//(2) to select the confidence value of Enhancements
- NodeSelector<Resource> confidenceSelector;
+ NodeSelector<RDFTerm> confidenceSelector;
path = ENHANCER_CONFIDENCE.toString();
try {
confidenceSelector = Utils.parseSelector(path);
@@ -141,7 +141,7 @@ public final class EnhancerLDPath {
//The suggestion and confidence selectors can be the same as above,
//but we need an additional result selector
- NodeSelector<Resource> entityReferenceSelector;
+ NodeSelector<RDFTerm> entityReferenceSelector;
path = ENHANCER_ENTITY_REFERENCE.toString();
try {
entityReferenceSelector = Utils.parseSelector(path);
Modified: stanbol/trunk/enhancer/ldpath/src/main/java/org/apache/stanbol/enhancer/ldpath/backend/ContentItemBackend.java
URL: http://svn.apache.org/viewvc/stanbol/trunk/enhancer/ldpath/src/main/java/org/apache/stanbol/enhancer/ldpath/backend/ContentItemBackend.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- stanbol/trunk/enhancer/ldpath/src/main/java/org/apache/stanbol/enhancer/ldpath/backend/ContentItemBackend.java (original)
+++ stanbol/trunk/enhancer/ldpath/src/main/java/org/apache/stanbol/enhancer/ldpath/backend/ContentItemBackend.java Tue May 17 22:20:49 2016
@@ -32,11 +32,11 @@ import java.util.Set;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.locks.Lock;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.utils.UnionMGraph;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.rdf.utils.UnionGraph;
import org.apache.marmotta.ldpath.api.backend.RDFBackend;
import org.apache.stanbol.commons.ldpath.clerezza.ClerezzaBackend;
import org.apache.stanbol.enhancer.servicesapi.ContentItem;
@@ -49,16 +49,16 @@ import org.slf4j.LoggerFactory;
* @author Rupert Westenthaler
*
*/
-public class ContentItemBackend implements RDFBackend<Resource>{
+public class ContentItemBackend implements RDFBackend<RDFTerm>{
private final Logger log = LoggerFactory.getLogger(ContentItemBackend.class);
- private static final Map<UriRef,TripleCollection> EMPTY_INCLUDED = emptyMap();
+ private static final Map<IRI,Graph> EMPTY_INCLUDED = emptyMap();
private final ContentItem ci;
private final Lock readLock;
private final ClerezzaBackend backend;
- private final Map<UriRef,TripleCollection> included;
+ private final Map<IRI,Graph> included;
/**
* Creates a {@link RDFBackend} over the {@link ContentItem#getMetadata()
@@ -71,23 +71,23 @@ public class ContentItemBackend implemen
/**
* Creates a {@link RDFBackend} over the {@link ContentItem#getMetadata()
* metadata} and all {@link ContentItem#getPart(int, Class) content parts}
- * compatible to {@link TripleCollection}
+ * compatible to {@link Graph}
* @param ci the content item
* @param includeAdditionalMetadata if <code>true</code> the {@link RDFBackend}
* will also include RDF data stored in content parts
*/
public ContentItemBackend(ContentItem ci, boolean includeAdditionalMetadata){
included = includeAdditionalMetadata ?
- unmodifiableMap(getContentParts(ci, TripleCollection.class)) :
+ unmodifiableMap(getContentParts(ci, Graph.class)) :
EMPTY_INCLUDED;
- MGraph graph;
+ Graph graph;
if(included.isEmpty()){
graph = ci.getMetadata();
} else {
- TripleCollection[] tcs = new TripleCollection[included.size()+1];
+ Graph[] tcs = new Graph[included.size()+1];
tcs[0] = ci.getMetadata();
System.arraycopy(included.values().toArray(), 0, tcs, 1, included.size());
- graph = new UnionMGraph(tcs);
+ graph = new UnionGraph(tcs);
}
backend = new ClerezzaBackend(graph);
this.ci = ci;
@@ -97,15 +97,15 @@ public class ContentItemBackend implemen
* Creates a {@link RDFBackend} over the {@link ContentItem#getMetadata()
* metadata} and RDF data stored in content parts with the parsed URIs.
* If no content part for a parsed URI exists or its type is not compatible
- * to {@link TripleCollection} it will be not included.
+ * to {@link Graph} it will be not included.
* @param ci the content item
* @param includedMetadata the URIs for the content parts to include
*/
- public ContentItemBackend(ContentItem ci, Set<UriRef> includedMetadata){
- Map<UriRef,TripleCollection> included = new LinkedHashMap<UriRef,TripleCollection>();
- for(UriRef ref : includedMetadata){
+ public ContentItemBackend(ContentItem ci, Set<IRI> includedMetadata){
+ Map<IRI,Graph> included = new LinkedHashMap<IRI,Graph>();
+ for(IRI ref : includedMetadata){
try {
- TripleCollection metadata = ci.getPart(ref, TripleCollection.class);
+ Graph metadata = ci.getPart(ref, Graph.class);
included.put(ref, metadata);
} catch (RuntimeException e) {
log.warn("Unable to add requested Metadata-ContentPart "+ref+" to" +
@@ -113,14 +113,14 @@ public class ContentItemBackend implemen
}
}
this.included = unmodifiableMap(included);
- MGraph graph;
+ Graph graph;
if(!included.isEmpty()){
graph = ci.getMetadata();
} else {
- TripleCollection[] tcs = new TripleCollection[included.size()+1];
+ Graph[] tcs = new Graph[included.size()+1];
tcs[0] = ci.getMetadata();
System.arraycopy(tcs, 1, included.values().toArray(), 0, included.size());
- graph = new UnionMGraph(tcs);
+ graph = new UnionGraph(tcs);
}
backend = new ClerezzaBackend(graph);
this.ci = ci;
@@ -129,7 +129,7 @@ public class ContentItemBackend implemen
@Override
- public Collection<Resource> listObjects(Resource subject, Resource property) {
+ public Collection<RDFTerm> listObjects(RDFTerm subject, RDFTerm property) {
readLock.lock();
try {
return backend.listObjects(subject, property);
@@ -139,7 +139,7 @@ public class ContentItemBackend implemen
}
@Override
- public Collection<Resource> listSubjects(Resource property, Resource object) {
+ public Collection<RDFTerm> listSubjects(RDFTerm property, RDFTerm object) {
readLock.lock();
try {
return backend.listSubjects(property, object);
@@ -159,84 +159,84 @@ public class ContentItemBackend implemen
* RDF backend
* @return the content parts included in this {@link RDFBackend}
*/
- public Map<UriRef,TripleCollection> getIncludedMetadata(){
+ public Map<IRI,Graph> getIncludedMetadata(){
return included;
}
@Override
- public boolean isLiteral(Resource n) {
+ public boolean isLiteral(RDFTerm n) {
return backend.isLiteral(n);
}
@Override
- public boolean isURI(Resource n) {
+ public boolean isURI(RDFTerm n) {
return backend.isURI(n);
}
@Override
- public boolean isBlank(Resource n) {
+ public boolean isBlank(RDFTerm n) {
return backend.isBlank(n);
}
@Override
- public Locale getLiteralLanguage(Resource n) {
+ public Locale getLiteralLanguage(RDFTerm n) {
return backend.getLiteralLanguage(n);
}
@Override
- public URI getLiteralType(Resource n) {
+ public URI getLiteralType(RDFTerm n) {
return backend.getLiteralType(n);
}
@Override
- public Resource createLiteral(String content) {
+ public RDFTerm createLiteral(String content) {
return backend.createLiteral(content);
}
@Override
- public Resource createLiteral(String content, Locale language, URI type) {
+ public RDFTerm createLiteral(String content, Locale language, URI type) {
return backend.createLiteral(content, language, type);
}
@Override
- public Resource createURI(String uri) {
+ public RDFTerm createURI(String uri) {
return backend.createURI(uri);
}
@Override
- public String stringValue(Resource node) {
+ public String stringValue(RDFTerm node) {
return backend.stringValue(node);
}
@Override
- public Double doubleValue(Resource node) {
+ public Double doubleValue(RDFTerm node) {
return backend.doubleValue(node);
}
@Override
- public Long longValue(Resource node) {
+ public Long longValue(RDFTerm node) {
return backend.longValue(node);
}
@Override
- public Boolean booleanValue(Resource node) {
+ public Boolean booleanValue(RDFTerm node) {
return backend.booleanValue(node);
}
@Override
- public Date dateTimeValue(Resource node) {
+ public Date dateTimeValue(RDFTerm node) {
return backend.dateTimeValue(node);
}
@Override
- public Date dateValue(Resource node) {
+ public Date dateValue(RDFTerm node) {
return backend.dateValue(node);
}
@Override
- public Date timeValue(Resource node) {
+ public Date timeValue(RDFTerm node) {
return backend.timeValue(node);
}
@Override
- public Float floatValue(Resource node) {
+ public Float floatValue(RDFTerm node) {
return backend.floatValue(node);
}
@Override
- public Integer intValue(Resource node) {
+ public Integer intValue(RDFTerm node) {
return backend.intValue(node);
}
@Override
- public BigInteger integerValue(Resource node) {
+ public BigInteger integerValue(RDFTerm node) {
return backend.integerValue(node);
}
@Override
- public BigDecimal decimalValue(Resource node) {
+ public BigDecimal decimalValue(RDFTerm node) {
return backend.decimalValue(node);
}
Modified: stanbol/trunk/enhancer/ldpath/src/main/java/org/apache/stanbol/enhancer/ldpath/function/ContentFunction.java
URL: http://svn.apache.org/viewvc/stanbol/trunk/enhancer/ldpath/src/main/java/org/apache/stanbol/enhancer/ldpath/function/ContentFunction.java?rev=1744328&r1=1744327&r2=1744328&view=diff
==============================================================================
--- stanbol/trunk/enhancer/ldpath/src/main/java/org/apache/stanbol/enhancer/ldpath/function/ContentFunction.java (original)
+++ stanbol/trunk/enhancer/ldpath/src/main/java/org/apache/stanbol/enhancer/ldpath/function/ContentFunction.java Tue May 17 22:20:49 2016
@@ -26,8 +26,8 @@ import java.util.Map.Entry;
import java.util.Set;
import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.IRI;
import org.apache.commons.io.IOUtils;
import org.apache.marmotta.ldpath.api.backend.RDFBackend;
import org.apache.marmotta.ldpath.util.Collections;
@@ -55,15 +55,15 @@ public class ContentFunction extends Con
}
@Override
- public Collection<Resource> apply(ContentItemBackend backend, Resource context, Collection<Resource>... args) throws IllegalArgumentException {
+ public Collection<RDFTerm> apply(ContentItemBackend backend, RDFTerm context, Collection<RDFTerm>... args) throws IllegalArgumentException {
ContentItem ci = backend.getContentItem();
Set<String> mimeTypes;
if(args == null || args.length < 1){
mimeTypes = null;
} else {
mimeTypes = new HashSet<String>();
- for(Iterator<Resource> params = Collections.concat(args).iterator();params.hasNext();){
- Resource param = params.next();
+ for(Iterator<RDFTerm> params = Collections.concat(args).iterator();params.hasNext();){
+ RDFTerm param = params.next();
String mediaTypeString = backend.stringValue(param);
try {
mimeTypes.add(parseMimeType(mediaTypeString).get(null));
@@ -73,12 +73,12 @@ public class ContentFunction extends Con
}
}
}
- Collection<Resource> result;
+ Collection<RDFTerm> result;
Blob blob;
if(mimeTypes == null || mimeTypes.isEmpty()){
blob = ci.getBlob();
} else {
- Entry<UriRef,Blob> entry = ContentItemHelper.getBlob(ci, mimeTypes);
+ Entry<IRI,Blob> entry = ContentItemHelper.getBlob(ci, mimeTypes);
blob = entry != null ? entry.getValue() : null;
}
if(blob == null){
@@ -92,7 +92,7 @@ public class ContentFunction extends Con
} else { //binary content
byte[] data = IOUtils.toByteArray(blob.getStream());
result = java.util.Collections.singleton(
- (Resource)lf.createTypedLiteral(data));
+ (RDFTerm)lf.createTypedLiteral(data));
}
} catch (IOException e) {
throw new IllegalStateException("Unable to read contents from Blob '"