You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@marmotta.apache.org by an...@apache.org on 2013/06/25 03:27:23 UTC

git commit: MARMOTTA-261 : Reuse RDFJSON parser and writer

Updated Branches:
  refs/heads/develop acfd008a6 -> b8b062ca3


MARMOTTA-261 : Reuse RDFJSON parser and writer

Project: http://git-wip-us.apache.org/repos/asf/incubator-marmotta/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-marmotta/commit/b8b062ca
Tree: http://git-wip-us.apache.org/repos/asf/incubator-marmotta/tree/b8b062ca
Diff: http://git-wip-us.apache.org/repos/asf/incubator-marmotta/diff/b8b062ca

Branch: refs/heads/develop
Commit: b8b062ca3140e9dff3708bd62f9e575728a4b9df
Parents: acfd008
Author: Peter Ansell <p_...@yahoo.com>
Authored: Tue Jun 25 11:24:53 2013 +1000
Committer: Peter Ansell <p_...@yahoo.com>
Committed: Tue Jun 25 11:24:53 2013 +1000

----------------------------------------------------------------------
 client/marmotta-client-java/pom.xml             |   4 +
 .../marmotta/client/util/RDFJSONParser.java     | 142 ++++++++++++-------
 2 files changed, 92 insertions(+), 54 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-marmotta/blob/b8b062ca/client/marmotta-client-java/pom.xml
----------------------------------------------------------------------
diff --git a/client/marmotta-client-java/pom.xml b/client/marmotta-client-java/pom.xml
index edfc9c6..ba38e9a 100644
--- a/client/marmotta-client-java/pom.xml
+++ b/client/marmotta-client-java/pom.xml
@@ -101,6 +101,10 @@
             <artifactId>jackson-mapper-asl</artifactId>
         </dependency>
         <dependency>
+        	<groupId>org.openrdf.sesame</groupId>
+        	<artifactId>sesame-rio-rdfjson</artifactId>
+        </dependency>
+        <dependency>
             <groupId>org.slf4j</groupId>
             <artifactId>slf4j-api</artifactId>
         </dependency>

http://git-wip-us.apache.org/repos/asf/incubator-marmotta/blob/b8b062ca/client/marmotta-client-java/src/main/java/org/apache/marmotta/client/util/RDFJSONParser.java
----------------------------------------------------------------------
diff --git a/client/marmotta-client-java/src/main/java/org/apache/marmotta/client/util/RDFJSONParser.java b/client/marmotta-client-java/src/main/java/org/apache/marmotta/client/util/RDFJSONParser.java
index 3174044..56c0b0f 100644
--- a/client/marmotta-client-java/src/main/java/org/apache/marmotta/client/util/RDFJSONParser.java
+++ b/client/marmotta-client-java/src/main/java/org/apache/marmotta/client/util/RDFJSONParser.java
@@ -17,14 +17,30 @@
  */
 package org.apache.marmotta.client.util;
 
+import static org.openrdf.rio.rdfjson.RDFJSONUtility.BNODE;
+import static org.openrdf.rio.rdfjson.RDFJSONUtility.DATATYPE;
+import static org.openrdf.rio.rdfjson.RDFJSONUtility.LANG;
+import static org.openrdf.rio.rdfjson.RDFJSONUtility.TYPE;
+import static org.openrdf.rio.rdfjson.RDFJSONUtility.URI;
+import static org.openrdf.rio.rdfjson.RDFJSONUtility.VALUE;
+
 import org.apache.marmotta.client.exception.ParseException;
 import org.apache.marmotta.client.model.meta.Metadata;
 import org.apache.marmotta.client.model.rdf.BNode;
 import org.apache.marmotta.client.model.rdf.Literal;
 import org.apache.marmotta.client.model.rdf.RDFNode;
 import org.apache.marmotta.client.model.rdf.URI;
-import org.codehaus.jackson.map.ObjectMapper;
-import org.codehaus.jackson.type.TypeReference;
+import org.openrdf.model.Model;
+import org.openrdf.model.Resource;
+import org.openrdf.model.Value;
+import org.openrdf.model.ValueFactory;
+import org.openrdf.model.impl.LinkedHashModel;
+import org.openrdf.model.impl.ValueFactoryImpl;
+import org.openrdf.rio.RDFFormat;
+import org.openrdf.rio.RDFHandlerException;
+import org.openrdf.rio.RDFParseException;
+import org.openrdf.rio.Rio;
+import org.openrdf.rio.UnsupportedRDFormatException;
 
 import java.io.IOException;
 import java.io.InputStream;
@@ -41,43 +57,35 @@ import java.util.Set;
  */
 public class RDFJSONParser {
 
-    @SuppressWarnings("unused")
-	private static final String HTTP = "http://";
-    private static final String VALUE = "value";
-    private static final String TYPE = "type";
-    private static final String TYPE_BNODE = "bnode";
-    private static final String TYPE_URI = "uri";
-    private static final String TYPE_LITERAL = "literal";
-    private static final String LANG = "lang";
-    private static final String DATATYPE = "datatype";
-
     private RDFJSONParser() {
 		// static only
 	}
     
     public static Map<String,Metadata> parseRDFJSON(InputStream data) throws ParseException {
-        ObjectMapper mapper = new ObjectMapper();
         try {
-            Map<String,Map<String,Set<Map<String,String>>>> subjects = mapper.readValue(data, new TypeReference<Map<String,Map<String,Set<Map<String,String>>>>>(){});
-
-            // convert "raw" map into a map to Metadata objects
+            Model model = Rio.parse(data, "", RDFFormat.RDFJSON);
+            
+            // convert Sesame Model into a map to Metadata objects
             Map<String,Metadata> result = new HashMap<String, Metadata>();
-            for(Map.Entry<String,Map<String,Set<Map<String,String>>>> subject : subjects.entrySet()) {
-                Metadata m = new Metadata(subject.getKey());
-                result.put(subject.getKey(),m);
-
-                for(Map.Entry<String,Set<Map<String,String>>> property : subject.getValue().entrySet()) {
+            
+            for(Resource subject : model.subjects()) {
+                Metadata m = new Metadata(subject.stringValue());
+                for(org.openrdf.model.URI property : model.filter(subject, null, null).predicates()) {
                     Set<RDFNode> propValue = new HashSet<RDFNode>();
-                    for(Map<String,String> value : property.getValue()) {
+                    for(Value value : model.filter(subject, property, null).objects()) {
                         propValue.add(parseRDFJSONNode(value));
                     }
-                    m.put(property.getKey(),propValue);
+                    m.put(property.stringValue(),propValue);
                 }
+                result.put(subject.stringValue(),m);
             }
             return result;
-
         } catch (IOException e) {
             throw new ParseException("could not parse JSON data",e);
+        } catch(RDFParseException e) {
+            throw new ParseException("could not parse JSON data",e);
+        } catch(UnsupportedRDFormatException e) {
+            throw new ParseException("could not parse JSON data",e);
         }
 
     }
@@ -90,9 +98,9 @@ public class RDFJSONParser {
     public static RDFNode parseRDFJSONNode(Map<String, String> nodeDef) {
         RDFNode object;
 
-        if( nodeDef.get(TYPE).equals(TYPE_URI) ) {
+        if( nodeDef.get(TYPE).equals(URI) ) {
             object = new URI(nodeDef.get(VALUE));
-        } else if( nodeDef.get(TYPE).equals(TYPE_BNODE) ) {
+        } else if( nodeDef.get(TYPE).equals(BNODE) ) {
             object = new BNode(nodeDef.get(VALUE));
         } else {
             if( nodeDef.get(LANG) != null ) {
@@ -106,50 +114,76 @@ public class RDFJSONParser {
         return object;
     }
     
+    /**
+     * Parse the representation of a node in RDF/JSON into an RDFNode object
+     * @param nodeDef
+     * @return
+     */
+    public static RDFNode parseRDFJSONNode(Value value) {
+        RDFNode object;
+
+        if( value instanceof org.openrdf.model.URI ) {
+            object = new URI(value.stringValue());
+        } else if( value instanceof BNode ) {
+            object = new BNode(value.stringValue());
+        } else {
+            org.openrdf.model.Literal literal = (org.openrdf.model.Literal)value;
+            if( literal.getLanguage() != null ) {
+                object = new Literal(literal.getLabel(), literal.getLanguage());
+            } else if( literal.getDatatype() != null) {
+                object = new Literal(literal.getLabel(),new URI(literal.getDatatype().stringValue()));
+            } else {
+                object = new Literal(literal.getLabel());
+            }
+        }
+        return object;
+    }
+    
    
     public static void serializeRDFJSON(Map<String,Metadata> data, OutputStream out) throws IOException {
-        ObjectMapper mapper = new ObjectMapper();
-
-
-        Map<String,Map<String,Set<Map<String,String>>>> subjects = new HashMap<String, Map<String, Set<Map<String, String>>>>();
-
+        ValueFactory vf = ValueFactoryImpl.getInstance();
+        Model results = new LinkedHashModel();
         
         for(Map.Entry<String,Metadata> subject : data.entrySet()) {
-            //add or get predicate map
-            Map<String,Set<Map<String,String>>> predicates = new HashMap<String,Set<Map<String,String>>>();
-            subjects.put(subject.getKey(),predicates);
-            
- 
+            Resource subjectResource = stringToResource(subject.getKey(), vf);
             for(Map.Entry<String,Set<RDFNode>> predicate : subject.getValue().entrySet()) {
-                //add or get object set
-                Set<Map<String,String>> objects = new HashSet<Map<String,String>>();
-                predicates.put(predicate.getKey(),objects);
-
-                //add objects
+                org.openrdf.model.URI predicateURI = vf.createURI(predicate.getKey());
                 for(RDFNode objectNode : predicate.getValue()) {
-                    Map<String,String> object = new HashMap<String,String>();
+                    org.openrdf.model.Value literalValue;
                     if( objectNode instanceof Literal) {
-                        object.put(TYPE,TYPE_LITERAL);
-                        object.put(VALUE,((Literal)objectNode).getContent());
                         if(((Literal) objectNode).getLanguage() != null )
-                            object.put(LANG,((Literal) objectNode).getLanguage());
-                        if(((Literal) objectNode).getType() != null)
-                            object.put(DATATYPE,((Literal) objectNode).getType().getUri());
+                            literalValue = vf.createLiteral(((Literal)objectNode).getContent(), 
+                                                ((Literal)objectNode).getLanguage());
+                        else if(((Literal) objectNode).getType() != null)
+                            literalValue = vf.createLiteral(((Literal)objectNode).getContent(), 
+                                                vf.createURI(((Literal)objectNode).getType().getUri()));
+                        else
+                            literalValue = vf.createLiteral(((Literal)objectNode).getContent());
                     } else {
                         if( objectNode instanceof URI ) {
-                            object.put(TYPE,TYPE_URI);
-                            object.put(VALUE,((URI)objectNode).getUri());
+                            literalValue = vf.createURI(((URI)objectNode).getUri());
                         } else {
-                            object.put(TYPE,TYPE_BNODE);
-                            object.put(VALUE,((BNode)objectNode).getAnonId());
+                            literalValue = vf.createURI(((BNode)objectNode).getAnonId());
                         }
                     }
-                    objects.add(object);
+                    results.add(subjectResource, predicateURI, literalValue);
                 }
             }
                 
         }
-        mapper.writeValue(out,subjects);
-                
+        
+        try {
+            Rio.write(results, out, RDFFormat.RDFJSON);
+        } catch(RDFHandlerException e) {
+            throw new IOException(e);
+        }
+    }
+    
+    private static org.openrdf.model.Resource stringToResource(String resource, ValueFactory vf) {
+        if(resource.startsWith("_:")) {
+            return vf.createBNode(resource);
+        } else {
+            return vf.createURI(resource);
+        }
     }
 }