You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@jena.apache.org by rv...@apache.org on 2014/04/08 15:42:21 UTC

svn commit: r1585723 [1/4] - in /jena/Experimental/hadoop-rdf: hadoop-rdf-common/src/test/java/com/ hadoop-rdf-common/src/test/java/org/ hadoop-rdf-common/src/test/java/org/apache/ hadoop-rdf-common/src/test/java/org/apache/jena/ hadoop-rdf-common/src/...

Author: rvesse
Date: Tue Apr  8 13:42:18 2014
New Revision: 1585723

URL: http://svn.apache.org/r1585723
Log:
Continue migrating to org.apache.jena package (JENA-666)

Added:
    jena/Experimental/hadoop-rdf/hadoop-rdf-common/src/test/java/org/
    jena/Experimental/hadoop-rdf/hadoop-rdf-common/src/test/java/org/apache/
    jena/Experimental/hadoop-rdf/hadoop-rdf-common/src/test/java/org/apache/jena/
    jena/Experimental/hadoop-rdf/hadoop-rdf-common/src/test/java/org/apache/jena/hadoop/
    jena/Experimental/hadoop-rdf/hadoop-rdf-common/src/test/java/org/apache/jena/hadoop/rdf/
    jena/Experimental/hadoop-rdf/hadoop-rdf-common/src/test/java/org/apache/jena/hadoop/rdf/io/
    jena/Experimental/hadoop-rdf/hadoop-rdf-common/src/test/java/org/apache/jena/hadoop/rdf/io/types/
    jena/Experimental/hadoop-rdf/hadoop-rdf-common/src/test/java/org/apache/jena/hadoop/rdf/io/types/CharacteristicTests.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-common/src/test/java/org/apache/jena/hadoop/rdf/io/types/RdfTypesTest.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/HadoopIOConstants.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/RdfIOConstants.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/AbstractNLineFileInputFormat.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/AbstractWholeFileInputFormat.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/BlockedNQuadsInputFormat.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/BlockedNTriplesInputFormat.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/NQuadsInputFormat.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/NTriplesInputFormat.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/QuadsInputFormat.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/RdfJsonInputFormat.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/RdfXmlInputFormat.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/TriGInputFormat.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/TriplesInputFormat.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/TriplesOrQuadsInputFormat.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/TurtleInputFormat.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/WholeFileNQuadsInputFormat.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/WholeFileNTriplesInputFormat.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/AbstractBlockBasedNodeTupleReader.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/AbstractBlockBasedQuadReader.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/AbstractBlockBasedTripleReader.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/AbstractLineBasedNodeTupleReader.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/AbstractLineBasedQuadReader.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/AbstractLineBasedTripleReader.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/AbstractRdfReader.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/AbstractWholeFileNodeTupleReader.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/AbstractWholeFileQuadReader.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/AbstractWholeFileTripleReader.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/BlockedNQuadsReader.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/BlockedNTriplesReader.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/NQuadsReader.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/NTriplesReader.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/QuadsReader.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/RdfJsonReader.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/RdfXmlReader.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/TriGReader.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/TriplesOrQuadsReader.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/TriplesReader.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/TriplesToQuadsReader.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/TurtleReader.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/WholeFileNQuadsReader.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/WholeFileNTriplesReader.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/util/
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/util/BlockInputStream.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/util/RdfIOUtils.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/util/TrackableInputStream.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/util/TrackedInputStream.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/util/TrackedPipedQuadsStream.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/util/TrackedPipedRDFStream.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/util/TrackedPipedTriplesStream.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/AbstractBatchedNodeTupleOutputFormat.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/AbstractNodeOutputFormat.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/AbstractNodeTupleOutputFormat.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/NQuadsOutputFormat.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/NTriplesNodeOutputFormat.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/NTriplesOutputFormat.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/RdfJsonOutputFormat.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/RdfXmlOutputFormat.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/TriGOutputFormat.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/TurtleOutputFormat.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/AbstractBatchedNodeTupleWriter.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/AbstractBatchedQuadWriter.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/AbstractBatchedTripleWriter.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/AbstractLineBasedNodeTupleWriter.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/AbstractLineBasedQuadWriter.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/AbstractLineBasedTripleWriter.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/AbstractNodeWriter.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/AbstractWholeFileNodeTupleWriter.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/AbstractWholeFileQuadWriter.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/AbstractWholeFileTripleWriter.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/NQuadsWriter.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/NTriplesNodeWriter.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/NTriplesWriter.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/RdfJsonWriter.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/RdfXmlWriter.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/TriGWriter.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/TurtleWriter.java
Removed:
    jena/Experimental/hadoop-rdf/hadoop-rdf-common/src/test/java/com/
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/com/
Modified:
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/test/java/com/yarcdata/urika/hadoop/rdf/io/input/AbstractNodeTupleInputFormatTests.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/test/java/com/yarcdata/urika/hadoop/rdf/io/input/BlockedNQuadsInputTest.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/test/java/com/yarcdata/urika/hadoop/rdf/io/input/BlockedNTriplesInputTest.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/test/java/com/yarcdata/urika/hadoop/rdf/io/input/NQuadsInputTest.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/test/java/com/yarcdata/urika/hadoop/rdf/io/input/NTriplesInputTest.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/test/java/com/yarcdata/urika/hadoop/rdf/io/input/RdfJsonInputTest.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/test/java/com/yarcdata/urika/hadoop/rdf/io/input/RdfXmlInputTest.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/test/java/com/yarcdata/urika/hadoop/rdf/io/input/TriGInputTest.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/test/java/com/yarcdata/urika/hadoop/rdf/io/input/TurtleInputTest.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/test/java/com/yarcdata/urika/hadoop/rdf/io/input/WholeFileNQuadsInputTest.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/test/java/com/yarcdata/urika/hadoop/rdf/io/input/WholeFileNTriplesInputTest.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/test/java/com/yarcdata/urika/hadoop/rdf/io/input/compressed/AbstractCompressedNodeTupleInputFormatTests.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/test/java/com/yarcdata/urika/hadoop/rdf/io/input/compressed/AbstractCompressedWholeFileQuadInputFormatTests.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/test/java/com/yarcdata/urika/hadoop/rdf/io/input/compressed/AbstractCompressedWholeFileTripleInputFormatTests.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/test/java/com/yarcdata/urika/hadoop/rdf/io/input/compressed/nquads/AbstractCompressedNQuadsInputFormatTests.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/test/java/com/yarcdata/urika/hadoop/rdf/io/input/compressed/nquads/AbstractCompressedWholeFileNQuadsInputFormatTests.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/test/java/com/yarcdata/urika/hadoop/rdf/io/input/compressed/ntriples/AbstractCompressedBlockedNTriplesInputFormatTests.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/test/java/com/yarcdata/urika/hadoop/rdf/io/input/compressed/ntriples/AbstractCompressedNTriplesInputFormatTests.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/test/java/com/yarcdata/urika/hadoop/rdf/io/input/compressed/ntriples/AbstractCompressedWholeFileNTriplesInputFormatTests.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/test/java/com/yarcdata/urika/hadoop/rdf/io/input/compressed/rdfjson/AbstractCompressedRdfJsonInputFormatTests.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/test/java/com/yarcdata/urika/hadoop/rdf/io/input/compressed/rdfxml/AbstractCompressedRdfXmlInputFormatTests.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/test/java/com/yarcdata/urika/hadoop/rdf/io/input/compressed/trig/AbstractCompressedTriGInputFormatTests.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/test/java/com/yarcdata/urika/hadoop/rdf/io/input/compressed/turtle/AbstractCompressedTurtleInputFormatTests.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/test/java/com/yarcdata/urika/hadoop/rdf/io/input/util/AbstractTrackableInputStreamTests.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/test/java/com/yarcdata/urika/hadoop/rdf/io/input/util/BlockInputStreamTest.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/test/java/com/yarcdata/urika/hadoop/rdf/io/input/util/TrackedInputStreamTest.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/test/java/com/yarcdata/urika/hadoop/rdf/io/output/NQuadsOutputTest.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/test/java/com/yarcdata/urika/hadoop/rdf/io/output/NTriplesOutputTest.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/test/java/com/yarcdata/urika/hadoop/rdf/io/output/RdfJsonOutputTest.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/test/java/com/yarcdata/urika/hadoop/rdf/io/output/RdfXmlOutputTest.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/test/java/com/yarcdata/urika/hadoop/rdf/io/output/TriGOutputTest.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/test/java/com/yarcdata/urika/hadoop/rdf/io/output/TurtleOutputTest.java
    jena/Experimental/hadoop-rdf/hadoop-rdf-stats/src/main/java/com/yarcdata/urika/hadoop/rdf/stats/jobs/JobFactory.java

Added: jena/Experimental/hadoop-rdf/hadoop-rdf-common/src/test/java/org/apache/jena/hadoop/rdf/io/types/CharacteristicTests.java
URL: http://svn.apache.org/viewvc/jena/Experimental/hadoop-rdf/hadoop-rdf-common/src/test/java/org/apache/jena/hadoop/rdf/io/types/CharacteristicTests.java?rev=1585723&view=auto
==============================================================================
--- jena/Experimental/hadoop-rdf/hadoop-rdf-common/src/test/java/org/apache/jena/hadoop/rdf/io/types/CharacteristicTests.java (added)
+++ jena/Experimental/hadoop-rdf/hadoop-rdf-common/src/test/java/org/apache/jena/hadoop/rdf/io/types/CharacteristicTests.java Tue Apr  8 13:42:18 2014
@@ -0,0 +1,210 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+    
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package org.apache.jena.hadoop.rdf.io.types;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.util.Iterator;
+
+import org.apache.jena.hadoop.rdf.types.CharacteristicSetWritable;
+import org.apache.jena.hadoop.rdf.types.CharacteristicWritable;
+import org.junit.Assert;
+import org.junit.Test;
+
+import com.hp.hpl.jena.graph.Node;
+import com.hp.hpl.jena.graph.NodeFactory;
+
+/**
+ * Tests for {@link CharacteristicWritable} and
+ * {@link CharacteristicSetWritable}
+ * 
+ * @author rvesse
+ * 
+ */
+public class CharacteristicTests {
+
+    /**
+     * Checks whether a writable round trips successfully
+     * 
+     * @param cw
+     *            Characteristic writable
+     * @throws IOException
+     */
+    private void checkRoundTrip(CharacteristicWritable cw) throws IOException {
+        ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
+        DataOutputStream output = new DataOutputStream(outputStream);
+        cw.write(output);
+
+        ByteArrayInputStream inputStream = new ByteArrayInputStream(outputStream.toByteArray());
+        DataInputStream input = new DataInputStream(inputStream);
+        CharacteristicWritable actual = CharacteristicWritable.read(input);
+        Assert.assertEquals(cw, actual);
+    }
+
+    /**
+     * Tests characteristic round tripping
+     * 
+     * @throws IOException
+     */
+    @Test
+    public void characteristic_writable_01() throws IOException {
+        Node n = NodeFactory.createURI("http://example.org");
+        CharacteristicWritable expected = new CharacteristicWritable(n);
+        Assert.assertEquals(1, expected.getCount().get());
+
+        this.checkRoundTrip(expected);
+    }
+
+    /**
+     * Tests characteristic properties
+     * 
+     * @throws IOException
+     */
+    @Test
+    public void characteristic_writable_02() throws IOException {
+        Node n = NodeFactory.createURI("http://example.org");
+        CharacteristicWritable cw1 = new CharacteristicWritable(n);
+        CharacteristicWritable cw2 = new CharacteristicWritable(n, 100);
+        this.checkRoundTrip(cw1);
+        this.checkRoundTrip(cw2);
+
+        // Should still be equal since equality is only on the node not the
+        // count
+        Assert.assertEquals(cw1, cw2);
+    }
+
+    /**
+     * Tests characteristic properties
+     * 
+     * @throws IOException
+     */
+    @Test
+    public void characteristic_writable_03() throws IOException {
+        CharacteristicWritable cw1 = new CharacteristicWritable(NodeFactory.createURI("http://example.org"));
+        CharacteristicWritable cw2 = new CharacteristicWritable(NodeFactory.createURI("http://example.org/other"));
+        this.checkRoundTrip(cw1);
+        this.checkRoundTrip(cw2);
+
+        // Should not be equal as different nodes
+        Assert.assertNotEquals(cw1, cw2);
+    }
+
+    /**
+     * Checks that a writable round trips
+     * 
+     * @param set
+     *            Characteristic set
+     * @throws IOException
+     */
+    private void checkRoundTrip(CharacteristicSetWritable set) throws IOException {
+        // Test round trip
+        ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
+        DataOutputStream output = new DataOutputStream(outputStream);
+        set.write(output);
+
+        ByteArrayInputStream inputStream = new ByteArrayInputStream(outputStream.toByteArray());
+        DataInputStream input = new DataInputStream(inputStream);
+        CharacteristicSetWritable actual = CharacteristicSetWritable.read(input);
+        Assert.assertEquals(set, actual);
+    }
+
+    /**
+     * Checks a characteristic set
+     * 
+     * @param set
+     *            Set
+     * @param expectedItems
+     *            Expected number of characteristics
+     * @param expectedCounts
+     *            Expected counts for characteristics
+     */
+    protected final void checkCharacteristicSet(CharacteristicSetWritable set, int expectedItems, long[] expectedCounts) {
+        Assert.assertEquals(expectedItems, set.size());
+        Assert.assertEquals(expectedItems, expectedCounts.length);
+        Iterator<CharacteristicWritable> iter = set.getCharacteristics();
+        int i = 0;
+        while (iter.hasNext()) {
+            CharacteristicWritable cw = iter.next();
+            Assert.assertEquals(expectedCounts[i], cw.getCount().get());
+            i++;
+        }
+    }
+
+    /**
+     * Tests characteristic sets
+     * 
+     * @throws IOException
+     */
+    @Test
+    public void characteristic_set_writable_01() throws IOException {
+        CharacteristicSetWritable set = new CharacteristicSetWritable();
+
+        // Add some characteristics
+        CharacteristicWritable cw1 = new CharacteristicWritable(NodeFactory.createURI("http://example.org"));
+        CharacteristicWritable cw2 = new CharacteristicWritable(NodeFactory.createURI("http://example.org/other"));
+        set.add(cw1);
+        set.add(cw2);
+        this.checkCharacteristicSet(set, 2, new long[] { 1, 1 });
+        this.checkRoundTrip(set);
+    }
+
+    /**
+     * Tests characteristic sets
+     * 
+     * @throws IOException
+     */
+    @Test
+    public void characteristic_set_writable_02() throws IOException {
+        CharacteristicSetWritable set = new CharacteristicSetWritable();
+
+        // Add some characteristics
+        CharacteristicWritable cw1 = new CharacteristicWritable(NodeFactory.createURI("http://example.org"));
+        CharacteristicWritable cw2 = new CharacteristicWritable(NodeFactory.createURI("http://example.org"), 2);
+        set.add(cw1);
+        set.add(cw2);
+        this.checkCharacteristicSet(set, 1, new long[] { 3 });
+        this.checkRoundTrip(set);
+    }
+    
+    /**
+     * Tests characteristic sets
+     * 
+     * @throws IOException
+     */
+    @Test
+    public void characteristic_set_writable_03() throws IOException {
+        CharacteristicSetWritable set1 = new CharacteristicSetWritable();
+        CharacteristicSetWritable set2 = new CharacteristicSetWritable();
+
+        // Add some characteristics
+        CharacteristicWritable cw1 = new CharacteristicWritable(NodeFactory.createURI("http://example.org"));
+        CharacteristicWritable cw2 = new CharacteristicWritable(NodeFactory.createURI("http://example.org/other"));
+        set1.add(cw1);
+        set2.add(cw2);
+        this.checkCharacteristicSet(set1, 1, new long[] { 1 });
+        this.checkCharacteristicSet(set2, 1, new long[] { 1 });
+        this.checkRoundTrip(set1);
+        this.checkRoundTrip(set2);
+        
+        Assert.assertNotEquals(set1, set2);
+    }
+}

Added: jena/Experimental/hadoop-rdf/hadoop-rdf-common/src/test/java/org/apache/jena/hadoop/rdf/io/types/RdfTypesTest.java
URL: http://svn.apache.org/viewvc/jena/Experimental/hadoop-rdf/hadoop-rdf-common/src/test/java/org/apache/jena/hadoop/rdf/io/types/RdfTypesTest.java?rev=1585723&view=auto
==============================================================================
--- jena/Experimental/hadoop-rdf/hadoop-rdf-common/src/test/java/org/apache/jena/hadoop/rdf/io/types/RdfTypesTest.java (added)
+++ jena/Experimental/hadoop-rdf/hadoop-rdf-common/src/test/java/org/apache/jena/hadoop/rdf/io/types/RdfTypesTest.java Tue Apr  8 13:42:18 2014
@@ -0,0 +1,364 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+    
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package org.apache.jena.hadoop.rdf.io.types;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.DataInput;
+import java.io.DataInputStream;
+import java.io.DataOutput;
+import java.io.DataOutputStream;
+import java.io.IOException;
+
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.jena.atlas.lib.Tuple;
+import org.apache.jena.hadoop.rdf.types.NodeTupleWritable;
+import org.apache.jena.hadoop.rdf.types.NodeWritable;
+import org.apache.jena.hadoop.rdf.types.QuadWritable;
+import org.apache.jena.hadoop.rdf.types.TripleWritable;
+import org.junit.Assert;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.hp.hpl.jena.datatypes.xsd.XSDDatatype;
+import com.hp.hpl.jena.graph.Node;
+import com.hp.hpl.jena.graph.NodeFactory;
+import com.hp.hpl.jena.graph.Triple;
+import com.hp.hpl.jena.sparql.core.Quad;
+
+/**
+ * Tests for the various RDF types defined by the
+ * {@link org.apache.jena.hadoop.rdf.types} package
+ * 
+ * @author rvesse
+ * 
+ */
+public class RdfTypesTest {
+    
+    private static final Logger LOG = LoggerFactory.getLogger(RdfTypesTest.class);
+
+    private ByteArrayOutputStream outputStream;
+    private ByteArrayInputStream inputStream;
+
+    /**
+     * Prepare for output
+     * 
+     * @return Data output
+     */
+    private DataOutput prepareOutput() {
+        this.outputStream = new ByteArrayOutputStream();
+        return new DataOutputStream(this.outputStream);
+    }
+
+    /**
+     * Prepare for input from the previously written output
+     * 
+     * @return Data Input
+     */
+    private DataInput prepareInput() {
+        this.inputStream = new ByteArrayInputStream(this.outputStream.toByteArray());
+        return new DataInputStream(this.inputStream);
+    }
+
+    /**
+     * Prepare for input from the given data
+     * 
+     * @param data
+     *            Data
+     * @return Data Input
+     */
+    @SuppressWarnings("unused")
+    private DataInput prepareInput(byte[] data) {
+        this.inputStream = new ByteArrayInputStream(data);
+        return new DataInputStream(this.inputStream);
+    }
+
+    @SuppressWarnings({ "unchecked", "rawtypes" })
+    private <T extends WritableComparable> void testWriteRead(T writable, T expected) throws IOException, InstantiationException,
+            IllegalAccessException, ClassNotFoundException {
+        // Write out data
+        DataOutput output = this.prepareOutput();
+        writable.write(output);
+
+        // Read back in data
+        DataInput input = this.prepareInput();
+        T actual = (T) Class.forName(writable.getClass().getName()).newInstance();
+        actual.readFields(input);
+        
+        LOG.info("Original = " + writable.toString());
+        LOG.info("Round Tripped = " + actual.toString());
+
+        // Check equivalent
+        Assert.assertEquals(0, expected.compareTo(actual));
+    }
+
+    /**
+     * Basic node writable round tripping test
+     * 
+     * @throws IOException
+     * @throws InstantiationException
+     * @throws IllegalAccessException
+     * @throws ClassNotFoundException
+     */
+    @Test
+    public void node_writable_uri_01() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
+        Node n = NodeFactory.createURI("http://example.org");
+        NodeWritable nw = new NodeWritable(n);
+        testWriteRead(nw, nw);
+    }
+
+    /**
+     * Basic node writable round tripping test
+     * 
+     * @throws IOException
+     * @throws InstantiationException
+     * @throws IllegalAccessException
+     * @throws ClassNotFoundException
+     */
+    @Test
+    public void node_writable_uri_02() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
+        Node n = NodeFactory.createURI("http://user:password@example.org/some/path?key=value#id");
+        NodeWritable nw = new NodeWritable(n);
+        testWriteRead(nw, nw);
+    }
+    
+    /**
+     * Basic node writable round tripping test
+     * 
+     * @throws IOException
+     * @throws InstantiationException
+     * @throws IllegalAccessException
+     * @throws ClassNotFoundException
+     */
+    @Test
+    public void node_writable_literal_01() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
+        Node n = NodeFactory.createLiteral("simple");
+        NodeWritable nw = new NodeWritable(n);
+        testWriteRead(nw, nw);
+    }
+    
+    /**
+     * Basic node writable round tripping test
+     * 
+     * @throws IOException
+     * @throws InstantiationException
+     * @throws IllegalAccessException
+     * @throws ClassNotFoundException
+     */
+    @Test
+    public void node_writable_literal_02() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
+        Node n = NodeFactory.createLiteral("language", "en", null);
+        NodeWritable nw = new NodeWritable(n);
+        testWriteRead(nw, nw);
+    }
+    
+    /**
+     * Basic node writable round tripping test
+     * 
+     * @throws IOException
+     * @throws InstantiationException
+     * @throws IllegalAccessException
+     * @throws ClassNotFoundException
+     */
+    @Test
+    public void node_writable_literal_03() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
+        Node n = NodeFactory.createLiteral("string", XSDDatatype.XSDstring);
+        NodeWritable nw = new NodeWritable(n);
+        testWriteRead(nw, nw);
+    }
+    
+    /**
+     * Basic node writable round tripping test
+     * 
+     * @throws IOException
+     * @throws InstantiationException
+     * @throws IllegalAccessException
+     * @throws ClassNotFoundException
+     */
+    @Test
+    public void node_writable_literal_04() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
+        Node n = NodeFactory.createLiteral("1234", XSDDatatype.XSDinteger);
+        NodeWritable nw = new NodeWritable(n);
+        testWriteRead(nw, nw);
+    }
+    
+    
+    /**
+     * Basic node writable round tripping test
+     * 
+     * @throws IOException
+     * @throws InstantiationException
+     * @throws IllegalAccessException
+     * @throws ClassNotFoundException
+     */
+    @Test
+    public void node_writable_literal_05() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
+        Node n = NodeFactory.createLiteral("123.4", XSDDatatype.XSDdecimal);
+        NodeWritable nw = new NodeWritable(n);
+        testWriteRead(nw, nw);
+    }
+    
+    
+    /**
+     * Basic node writable round tripping test
+     * 
+     * @throws IOException
+     * @throws InstantiationException
+     * @throws IllegalAccessException
+     * @throws ClassNotFoundException
+     */
+    @Test
+    public void node_writable_literal_06() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
+        Node n = NodeFactory.createLiteral("12.3e4", XSDDatatype.XSDdouble);
+        NodeWritable nw = new NodeWritable(n);
+        testWriteRead(nw, nw);
+    }
+    
+    
+    /**
+     * Basic node writable round tripping test
+     * 
+     * @throws IOException
+     * @throws InstantiationException
+     * @throws IllegalAccessException
+     * @throws ClassNotFoundException
+     */
+    @Test
+    public void node_writable_literal_07() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
+        Node n = NodeFactory.createLiteral("true", XSDDatatype.XSDboolean);
+        NodeWritable nw = new NodeWritable(n);
+        testWriteRead(nw, nw);
+    }
+    
+    
+    /**
+     * Basic node writable round tripping test
+     * 
+     * @throws IOException
+     * @throws InstantiationException
+     * @throws IllegalAccessException
+     * @throws ClassNotFoundException
+     */
+    @Test
+    public void node_writable_bnode_01() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
+        Node n = NodeFactory.createAnon();
+        NodeWritable nw = new NodeWritable(n);
+        testWriteRead(nw, nw);
+    }
+    
+    /**
+     * Basic node writable round tripping test
+     * 
+     * @throws IOException
+     * @throws InstantiationException
+     * @throws IllegalAccessException
+     * @throws ClassNotFoundException
+     */
+    @Test
+    public void node_writable_bnode_02() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
+        Node n = NodeFactory.createAnon();
+        NodeWritable nw = new NodeWritable(n);
+        testWriteRead(nw, nw);
+        NodeWritable nw2 = new NodeWritable(n);
+        testWriteRead(nw2, nw2);
+        
+        Assert.assertEquals(0, nw.compareTo(nw2));
+    }
+    
+    /**
+     * Basic triple writable round tripping test
+     * 
+     * @throws IOException
+     * @throws InstantiationException
+     * @throws IllegalAccessException
+     * @throws ClassNotFoundException
+     */
+    @Test
+    public void triple_writable_01() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
+        Triple t = new Triple(NodeFactory.createURI("http://example"), NodeFactory.createURI("http://predicate"),
+                NodeFactory.createLiteral("value"));
+        TripleWritable tw = new TripleWritable(t);
+        testWriteRead(tw, tw);
+    }
+    
+    /**
+     * Basic triple writable round tripping test
+     * 
+     * @throws IOException
+     * @throws InstantiationException
+     * @throws IllegalAccessException
+     * @throws ClassNotFoundException
+     */
+    @Test
+    public void triple_writable_02() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
+        Triple t = new Triple(NodeFactory.createAnon(), NodeFactory.createURI("http://predicate"),
+                NodeFactory.createLiteral("value"));
+        TripleWritable tw = new TripleWritable(t);
+        testWriteRead(tw, tw);
+    }
+    
+    /**
+     * Basic quad writable round tripping test
+     * 
+     * @throws IOException
+     * @throws InstantiationException
+     * @throws IllegalAccessException
+     * @throws ClassNotFoundException
+     */
+    @Test
+    public void quad_writable_01() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
+        Quad q = new Quad(Quad.defaultGraphNodeGenerated, NodeFactory.createURI("http://example"), NodeFactory.createURI("http://predicate"),
+                NodeFactory.createLiteral("value"));
+        QuadWritable qw = new QuadWritable(q);
+        testWriteRead(qw, qw);
+    }
+    
+    /**
+     * Basic quad writable round tripping test
+     * 
+     * @throws IOException
+     * @throws InstantiationException
+     * @throws IllegalAccessException
+     * @throws ClassNotFoundException
+     */
+    @Test
+    public void quad_writable_02() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
+        Quad q = new Quad(Quad.defaultGraphNodeGenerated, NodeFactory.createAnon(), NodeFactory.createURI("http://predicate"),
+                NodeFactory.createLiteral("value"));
+        QuadWritable qw = new QuadWritable(q);
+        testWriteRead(qw, qw);
+    }
+    
+    /**
+     * Basic tuple writable round tripping test
+     * 
+     * @throws IOException
+     * @throws InstantiationException
+     * @throws IllegalAccessException
+     * @throws ClassNotFoundException
+     */
+    @Test
+    public void tuple_writable_01() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
+        Tuple<Node> t = Tuple.createTuple(NodeFactory.createURI("http://one"), NodeFactory.createURI("http://two"),
+                NodeFactory.createLiteral("value"), NodeFactory.createLiteral("foo"), NodeFactory.createURI("http://three"));
+        NodeTupleWritable tw = new NodeTupleWritable(t);
+        testWriteRead(tw, tw);
+    }
+}

Added: jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/HadoopIOConstants.java
URL: http://svn.apache.org/viewvc/jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/HadoopIOConstants.java?rev=1585723&view=auto
==============================================================================
--- jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/HadoopIOConstants.java (added)
+++ jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/HadoopIOConstants.java Tue Apr  8 13:42:18 2014
@@ -0,0 +1,49 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+    
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package org.apache.jena.hadoop.rdf.io;
+
+/**
+ * Hadoop IO related constants
+ * 
+ * @author rvesse
+ * 
+ */
+public class HadoopIOConstants {
+
+    /**
+     * Private constructor prevents instantiation
+     */
+    private HadoopIOConstants() {
+    }
+
+    /**
+     * Map Reduce configuration setting for max line length
+     */
+    public static final String MAX_LINE_LENGTH = "mapreduce.input.linerecordreader.line.maxlength";
+
+    /**
+     * Run ID
+     */
+    public static final String RUN_ID = "runId";
+    
+    /**
+     * Compression codecs to use
+     */
+    public static final String IO_COMPRESSION_CODECS = "io.compression.codecs";
+}

Added: jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/RdfIOConstants.java
URL: http://svn.apache.org/viewvc/jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/RdfIOConstants.java?rev=1585723&view=auto
==============================================================================
--- jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/RdfIOConstants.java (added)
+++ jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/RdfIOConstants.java Tue Apr  8 13:42:18 2014
@@ -0,0 +1,56 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+    
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package org.apache.jena.hadoop.rdf.io;
+
+import java.io.IOException;
+
+/**
+ * RDF IO related constants
+ * 
+ * @author rvesse
+ * 
+ */
+public class RdfIOConstants {
+
+    /**
+     * Private constructor prevents instantiation
+     */
+    private RdfIOConstants() {
+    }
+
+    /**
+     * Configuration key used to set whether bad tuples are ignored. This is the
+     * default behaviour, when explicitly set to {@code false} bad tuples will
+     * result in {@link IOException} being thrown by the relevant record
+     * readers.
+     */
+    public static final String INPUT_IGNORE_BAD_TUPLES = "rdf.io.input.ignore-bad-tuples";
+
+    /**
+     * Configuration key used to set the batch size used for RDF output formats
+     * that take a batched writing approach. Default value is given by the
+     * constant {@link #DEFAULT_OUTPUT_BATCH_SIZE}.
+     */
+    public static final String OUTPUT_BATCH_SIZE = "rdf.io.output.batch-size";
+
+    /**
+     * Default batch size for batched output formats
+     */
+    public static final long DEFAULT_OUTPUT_BATCH_SIZE = 10000;
+}

Added: jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/AbstractNLineFileInputFormat.java
URL: http://svn.apache.org/viewvc/jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/AbstractNLineFileInputFormat.java?rev=1585723&view=auto
==============================================================================
--- jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/AbstractNLineFileInputFormat.java (added)
+++ jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/AbstractNLineFileInputFormat.java Tue Apr  8 13:42:18 2014
@@ -0,0 +1,58 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+    
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package org.apache.jena.hadoop.rdf.io.input;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
+import org.apache.hadoop.mapreduce.lib.input.NLineInputFormat;
+
+/**
+ * Abstract line based input format that reuses the machinery from
+ * {@link NLineInputFormat} to calculate the splits
+ * 
+ * @author rvesse
+ * 
+ * @param <TKey>
+ *            Key type
+ * @param <TValue>
+ *            Value type
+ */
+public abstract class AbstractNLineFileInputFormat<TKey, TValue> extends FileInputFormat<TKey, TValue> {
+
+    /**
+     * Logically splits the set of input files for the job, splits N lines of
+     * the input as one split.
+     * 
+     * @see FileInputFormat#getSplits(JobContext)
+     */
+    public final List<InputSplit> getSplits(JobContext job) throws IOException {
+        List<InputSplit> splits = new ArrayList<InputSplit>();
+        int numLinesPerSplit = NLineInputFormat.getNumLinesPerSplit(job);
+        for (FileStatus status : listStatus(job)) {
+            splits.addAll(NLineInputFormat.getSplitsForFile(status, job.getConfiguration(), numLinesPerSplit));
+        }
+        return splits;
+    }
+}

Added: jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/AbstractWholeFileInputFormat.java
URL: http://svn.apache.org/viewvc/jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/AbstractWholeFileInputFormat.java?rev=1585723&view=auto
==============================================================================
--- jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/AbstractWholeFileInputFormat.java (added)
+++ jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/AbstractWholeFileInputFormat.java Tue Apr  8 13:42:18 2014
@@ -0,0 +1,42 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+    
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package org.apache.jena.hadoop.rdf.io.input;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
+
+/**
+ * Abstract implementation of a while file input format where each file is a
+ * single split
+ * 
+ * @author rvesse
+ * 
+ * @param <TKey>
+ *            Key type
+ * @param <TValue>
+ *            Value type
+ */
+public abstract class AbstractWholeFileInputFormat<TKey, TValue> extends FileInputFormat<TKey, TValue> {
+
+    @Override
+    protected final boolean isSplitable(JobContext context, Path filename) {
+        return false;
+    }
+}

Added: jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/BlockedNQuadsInputFormat.java
URL: http://svn.apache.org/viewvc/jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/BlockedNQuadsInputFormat.java?rev=1585723&view=auto
==============================================================================
--- jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/BlockedNQuadsInputFormat.java (added)
+++ jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/BlockedNQuadsInputFormat.java Tue Apr  8 13:42:18 2014
@@ -0,0 +1,52 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+    
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package org.apache.jena.hadoop.rdf.io.input;
+
+import java.io.IOException;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.RecordReader;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.jena.hadoop.rdf.io.input.readers.BlockedNQuadsReader;
+import org.apache.jena.hadoop.rdf.types.QuadWritable;
+
+
+/**
+ * NTriples input format where files are processed as blocks of lines rather
+ * than in a line based manner as with the {@link NQuadsInputFormat} or as
+ * whole files with the {@link WholeFileNQuadsInputFormat}
+ * <p>
+ * This provides a compromise between the higher parser setup of creating more
+ * parsers and the benefit of being able to split input files over multiple
+ * mappers.
+ * </p>
+ * 
+ * @author rvesse
+ * 
+ */
+public class BlockedNQuadsInputFormat extends AbstractNLineFileInputFormat<LongWritable, QuadWritable> {
+
+    @Override
+    public RecordReader<LongWritable, QuadWritable> createRecordReader(InputSplit split, TaskAttemptContext context)
+            throws IOException, InterruptedException {
+        return new BlockedNQuadsReader();
+    }
+
+}

Added: jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/BlockedNTriplesInputFormat.java
URL: http://svn.apache.org/viewvc/jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/BlockedNTriplesInputFormat.java?rev=1585723&view=auto
==============================================================================
--- jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/BlockedNTriplesInputFormat.java (added)
+++ jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/BlockedNTriplesInputFormat.java Tue Apr  8 13:42:18 2014
@@ -0,0 +1,52 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+    
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package org.apache.jena.hadoop.rdf.io.input;
+
+import java.io.IOException;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.RecordReader;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.jena.hadoop.rdf.io.input.readers.BlockedNTriplesReader;
+import org.apache.jena.hadoop.rdf.types.TripleWritable;
+
+
+/**
+ * NTriples input format where files are processed as blocks of lines rather
+ * than in a line based manner as with the {@link NTriplesInputFormat} or as
+ * whole files with the {@link WholeFileNTriplesInputFormat}
+ * <p>
+ * This provides a compromise between the higher parser setup of creating more
+ * parsers and the benefit of being able to split input files over multiple
+ * mappers.
+ * </p>
+ * 
+ * @author rvesse
+ * 
+ */
+public class BlockedNTriplesInputFormat extends AbstractNLineFileInputFormat<LongWritable, TripleWritable> {
+
+    @Override
+    public RecordReader<LongWritable, TripleWritable> createRecordReader(InputSplit split, TaskAttemptContext context)
+            throws IOException, InterruptedException {
+        return new BlockedNTriplesReader();
+    }
+
+}

Added: jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/NQuadsInputFormat.java
URL: http://svn.apache.org/viewvc/jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/NQuadsInputFormat.java?rev=1585723&view=auto
==============================================================================
--- jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/NQuadsInputFormat.java (added)
+++ jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/NQuadsInputFormat.java Tue Apr  8 13:42:18 2014
@@ -0,0 +1,45 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+    
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package org.apache.jena.hadoop.rdf.io.input;
+
+import java.io.IOException;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.RecordReader;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.jena.hadoop.rdf.io.input.readers.NQuadsReader;
+import org.apache.jena.hadoop.rdf.types.QuadWritable;
+
+
+/**
+ * NQuads input format
+ * 
+ * @author rvesse
+ * 
+ */
+public class NQuadsInputFormat extends AbstractNLineFileInputFormat<LongWritable, QuadWritable> {
+
+    @Override
+    public RecordReader<LongWritable, QuadWritable> createRecordReader(InputSplit arg0, TaskAttemptContext arg1)
+            throws IOException, InterruptedException {
+        return new NQuadsReader();
+    }
+
+}

Added: jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/NTriplesInputFormat.java
URL: http://svn.apache.org/viewvc/jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/NTriplesInputFormat.java?rev=1585723&view=auto
==============================================================================
--- jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/NTriplesInputFormat.java (added)
+++ jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/NTriplesInputFormat.java Tue Apr  8 13:42:18 2014
@@ -0,0 +1,45 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+    
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package org.apache.jena.hadoop.rdf.io.input;
+
+import java.io.IOException;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.RecordReader;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.jena.hadoop.rdf.io.input.readers.NTriplesReader;
+import org.apache.jena.hadoop.rdf.types.TripleWritable;
+
+
+/**
+ * NTriples input format
+ * 
+ * @author rvesse
+ * 
+ */
+public class NTriplesInputFormat extends AbstractNLineFileInputFormat<LongWritable, TripleWritable> {
+
+    @Override
+    public RecordReader<LongWritable, TripleWritable> createRecordReader(InputSplit inputSplit, TaskAttemptContext context)
+            throws IOException, InterruptedException {
+        return new NTriplesReader();
+    }
+
+}

Added: jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/QuadsInputFormat.java
URL: http://svn.apache.org/viewvc/jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/QuadsInputFormat.java?rev=1585723&view=auto
==============================================================================
--- jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/QuadsInputFormat.java (added)
+++ jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/QuadsInputFormat.java Tue Apr  8 13:42:18 2014
@@ -0,0 +1,46 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+    
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package org.apache.jena.hadoop.rdf.io.input;
+
+import java.io.IOException;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.RecordReader;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.jena.hadoop.rdf.io.input.readers.QuadsReader;
+import org.apache.jena.hadoop.rdf.types.QuadWritable;
+
+
+/**
+ * RDF input format that can handle any RDF quads format that ARQ supports
+ * selecting the format to use for each file based upon the file extension
+ * 
+ * @author rvesse
+ * 
+ */
+public class QuadsInputFormat extends AbstractWholeFileInputFormat<LongWritable, QuadWritable> {
+
+    @Override
+    public RecordReader<LongWritable, QuadWritable> createRecordReader(InputSplit split, TaskAttemptContext context)
+            throws IOException, InterruptedException {
+        return new QuadsReader();
+    }
+
+}

Added: jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/RdfJsonInputFormat.java
URL: http://svn.apache.org/viewvc/jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/RdfJsonInputFormat.java?rev=1585723&view=auto
==============================================================================
--- jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/RdfJsonInputFormat.java (added)
+++ jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/RdfJsonInputFormat.java Tue Apr  8 13:42:18 2014
@@ -0,0 +1,45 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+    
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package org.apache.jena.hadoop.rdf.io.input;
+
+import java.io.IOException;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.RecordReader;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.jena.hadoop.rdf.io.input.readers.RdfJsonReader;
+import org.apache.jena.hadoop.rdf.types.TripleWritable;
+
+
+/**
+ * RDF/JSON input format
+ * 
+ * @author rvesse
+ * 
+ */
+public class RdfJsonInputFormat extends AbstractWholeFileInputFormat<LongWritable, TripleWritable> {
+
+    @Override
+    public RecordReader<LongWritable, TripleWritable> createRecordReader(InputSplit split, TaskAttemptContext context)
+            throws IOException, InterruptedException {
+        return new RdfJsonReader();
+    }
+
+}

Added: jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/RdfXmlInputFormat.java
URL: http://svn.apache.org/viewvc/jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/RdfXmlInputFormat.java?rev=1585723&view=auto
==============================================================================
--- jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/RdfXmlInputFormat.java (added)
+++ jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/RdfXmlInputFormat.java Tue Apr  8 13:42:18 2014
@@ -0,0 +1,45 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+    
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package org.apache.jena.hadoop.rdf.io.input;
+
+import java.io.IOException;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.RecordReader;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.jena.hadoop.rdf.io.input.readers.RdfXmlReader;
+import org.apache.jena.hadoop.rdf.types.TripleWritable;
+
+
+/**
+ * RDF/XML input format
+ * 
+ * @author rvesse
+ * 
+ */
+public class RdfXmlInputFormat extends AbstractWholeFileInputFormat<LongWritable, TripleWritable> {
+
+    @Override
+    public RecordReader<LongWritable, TripleWritable> createRecordReader(InputSplit split, TaskAttemptContext context)
+            throws IOException, InterruptedException {
+        return new RdfXmlReader();
+    }
+
+}

Added: jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/TriGInputFormat.java
URL: http://svn.apache.org/viewvc/jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/TriGInputFormat.java?rev=1585723&view=auto
==============================================================================
--- jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/TriGInputFormat.java (added)
+++ jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/TriGInputFormat.java Tue Apr  8 13:42:18 2014
@@ -0,0 +1,45 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+    
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package org.apache.jena.hadoop.rdf.io.input;
+
+import java.io.IOException;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.RecordReader;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.jena.hadoop.rdf.io.input.readers.TriGReader;
+import org.apache.jena.hadoop.rdf.types.QuadWritable;
+
+
+/**
+ * Input format for TriG
+ * 
+ * @author rvesse
+ * 
+ */
+public class TriGInputFormat extends AbstractWholeFileInputFormat<LongWritable, QuadWritable> {
+
+    @Override
+    public RecordReader<LongWritable, QuadWritable> createRecordReader(InputSplit split, TaskAttemptContext context)
+            throws IOException, InterruptedException {
+        return new TriGReader();
+    }
+
+}

Added: jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/TriplesInputFormat.java
URL: http://svn.apache.org/viewvc/jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/TriplesInputFormat.java?rev=1585723&view=auto
==============================================================================
--- jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/TriplesInputFormat.java (added)
+++ jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/TriplesInputFormat.java Tue Apr  8 13:42:18 2014
@@ -0,0 +1,46 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+    
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package org.apache.jena.hadoop.rdf.io.input;
+
+import java.io.IOException;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.RecordReader;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.jena.hadoop.rdf.io.input.readers.TriplesReader;
+import org.apache.jena.hadoop.rdf.types.TripleWritable;
+
+
+/**
+ * RDF input format that can handle any RDF triples format that ARQ supports
+ * selecting the format to use for each file based upon the file extension
+ * 
+ * @author rvesse
+ * 
+ */
+public class TriplesInputFormat extends AbstractWholeFileInputFormat<LongWritable, TripleWritable> {
+
+    @Override
+    public RecordReader<LongWritable, TripleWritable> createRecordReader(InputSplit split, TaskAttemptContext context)
+            throws IOException, InterruptedException {
+        return new TriplesReader();
+    }
+
+}

Added: jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/TriplesOrQuadsInputFormat.java
URL: http://svn.apache.org/viewvc/jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/TriplesOrQuadsInputFormat.java?rev=1585723&view=auto
==============================================================================
--- jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/TriplesOrQuadsInputFormat.java (added)
+++ jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/TriplesOrQuadsInputFormat.java Tue Apr  8 13:42:18 2014
@@ -0,0 +1,47 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+    
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package org.apache.jena.hadoop.rdf.io.input;
+
+import java.io.IOException;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.RecordReader;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.jena.hadoop.rdf.io.input.readers.TriplesOrQuadsReader;
+import org.apache.jena.hadoop.rdf.types.QuadWritable;
+
+
+/**
+ * RDF input format that can handle any RDF triple/quads format that ARQ
+ * supports selecting the format to use for each file based upon the file
+ * extension. Triples are converted into quads in the default graph.
+ * 
+ * @author rvesse
+ * 
+ */
+public class TriplesOrQuadsInputFormat extends AbstractWholeFileInputFormat<LongWritable, QuadWritable> {
+
+    @Override
+    public RecordReader<LongWritable, QuadWritable> createRecordReader(InputSplit split, TaskAttemptContext context)
+            throws IOException, InterruptedException {
+        return new TriplesOrQuadsReader();
+    }
+
+}

Added: jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/TurtleInputFormat.java
URL: http://svn.apache.org/viewvc/jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/TurtleInputFormat.java?rev=1585723&view=auto
==============================================================================
--- jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/TurtleInputFormat.java (added)
+++ jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/TurtleInputFormat.java Tue Apr  8 13:42:18 2014
@@ -0,0 +1,45 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+    
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package org.apache.jena.hadoop.rdf.io.input;
+
+import java.io.IOException;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.RecordReader;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.jena.hadoop.rdf.io.input.readers.TurtleReader;
+import org.apache.jena.hadoop.rdf.types.TripleWritable;
+
+
+/**
+ * Turtle input format
+ * 
+ * @author rvesse
+ * 
+ */
+public class TurtleInputFormat extends AbstractWholeFileInputFormat<LongWritable, TripleWritable> {
+
+    @Override
+    public RecordReader<LongWritable, TripleWritable> createRecordReader(InputSplit split, TaskAttemptContext context)
+            throws IOException, InterruptedException {
+        return new TurtleReader();
+    }
+
+}

Added: jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/WholeFileNQuadsInputFormat.java
URL: http://svn.apache.org/viewvc/jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/WholeFileNQuadsInputFormat.java?rev=1585723&view=auto
==============================================================================
--- jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/WholeFileNQuadsInputFormat.java (added)
+++ jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/WholeFileNQuadsInputFormat.java Tue Apr  8 13:42:18 2014
@@ -0,0 +1,50 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+    
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package org.apache.jena.hadoop.rdf.io.input;
+
+import java.io.IOException;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.RecordReader;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.jena.hadoop.rdf.io.input.readers.WholeFileNQuadsReader;
+import org.apache.jena.hadoop.rdf.types.QuadWritable;
+
+
+/**
+ * NQuads input format where files are processed as complete files rather than
+ * in a line based manner as with the {@link NQuadsInputFormat}
+ * <p>
+ * This has the advantage of less parser setup overhead but the disadvantage
+ * that the input cannot be split over multiple mappers.
+ * </p>
+ * 
+ * @author rvesse
+ * 
+ */
+public class WholeFileNQuadsInputFormat extends AbstractWholeFileInputFormat<LongWritable, QuadWritable> {
+
+    @Override
+    public RecordReader<LongWritable, QuadWritable> createRecordReader(InputSplit split, TaskAttemptContext context)
+            throws IOException, InterruptedException {
+        return new WholeFileNQuadsReader();
+    }
+
+}

Added: jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/WholeFileNTriplesInputFormat.java
URL: http://svn.apache.org/viewvc/jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/WholeFileNTriplesInputFormat.java?rev=1585723&view=auto
==============================================================================
--- jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/WholeFileNTriplesInputFormat.java (added)
+++ jena/Experimental/hadoop-rdf/hadoop-rdf-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/WholeFileNTriplesInputFormat.java Tue Apr  8 13:42:18 2014
@@ -0,0 +1,50 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+    
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package org.apache.jena.hadoop.rdf.io.input;
+
+import java.io.IOException;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.RecordReader;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.jena.hadoop.rdf.io.input.readers.WholeFileNTriplesReader;
+import org.apache.jena.hadoop.rdf.types.TripleWritable;
+
+
+/**
+ * NTriples input format where files are processed as complete files rather than
+ * in a line based manner as with the {@link NTriplesInputFormat}
+ * <p>
+ * This has the advantage of less parser setup overhead but the disadvantage
+ * that the input cannot be split over multiple mappers.
+ * </p>
+ * 
+ * @author rvesse
+ * 
+ */
+public class WholeFileNTriplesInputFormat extends AbstractWholeFileInputFormat<LongWritable, TripleWritable> {
+
+    @Override
+    public RecordReader<LongWritable, TripleWritable> createRecordReader(InputSplit split, TaskAttemptContext context)
+            throws IOException, InterruptedException {
+        return new WholeFileNTriplesReader();
+    }
+
+}