You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@jena.apache.org by an...@apache.org on 2021/02/08 17:29:07 UTC
[jena] branch master updated: JENA-2040: Retire jena-elephas
This is an automated email from the ASF dual-hosted git repository.
andy pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/jena.git
The following commit(s) were added to refs/heads/master by this push:
new 3cef6c8 JENA-2040: Retire jena-elephas
new 5af113a Merge pull request #915 from afs/elephas
3cef6c8 is described below
commit 3cef6c8b462e3cad3bef28b0b9706a44c18304de
Author: Andy Seaborne <an...@apache.org>
AuthorDate: Mon Feb 8 14:46:21 2021 +0000
JENA-2040: Retire jena-elephas
---
jena-elephas/README.md | 13 +
jena-elephas/jena-elephas-common/pom.xml | 80 --
.../rdf/types/AbstractNodeTupleWritable.java | 194 -----
.../rdf/types/CharacteristicSetWritable.java | 297 --------
.../hadoop/rdf/types/CharacteristicWritable.java | 159 ----
.../jena/hadoop/rdf/types/NodeTupleWritable.java | 83 ---
.../apache/jena/hadoop/rdf/types/NodeWritable.java | 188 -----
.../apache/jena/hadoop/rdf/types/QuadWritable.java | 135 ----
.../jena/hadoop/rdf/types/TripleWritable.java | 137 ----
.../types/comparators/SimpleBinaryComparator.java | 34 -
.../rdf/types/converters/ThriftConverter.java | 147 ----
.../hadoop/rdf/io/types/CharacteristicTests.java | 209 ------
.../jena/hadoop/rdf/io/types/RdfTypesTest.java | 400 ----------
.../src/test/resources/log4j.properties | 18 -
jena-elephas/jena-elephas-io/pom.xml | 107 ---
.../jena/hadoop/rdf/io/HadoopIOConstants.java | 49 --
.../apache/jena/hadoop/rdf/io/RdfIOConstants.java | 81 --
.../rdf/io/input/AbstractNLineFileInputFormat.java | 71 --
.../rdf/io/input/AbstractWholeFileInputFormat.java | 42 --
.../jena/hadoop/rdf/io/input/QuadsInputFormat.java | 43 --
.../hadoop/rdf/io/input/TriplesInputFormat.java | 39 -
.../rdf/io/input/TriplesOrQuadsInputFormat.java | 44 --
.../rdf/io/input/jsonld/JsonLDQuadInputFormat.java | 36 -
.../io/input/jsonld/JsonLDTripleInputFormat.java | 36 -
.../io/input/nquads/BlockedNQuadsInputFormat.java | 50 --
.../rdf/io/input/nquads/NQuadsInputFormat.java | 43 --
.../input/nquads/WholeFileNQuadsInputFormat.java | 48 --
.../input/ntriples/BlockedNTriplesInputFormat.java | 50 --
.../rdf/io/input/ntriples/NTriplesInputFormat.java | 43 --
.../ntriples/WholeFileNTriplesInputFormat.java | 48 --
.../rdf/io/input/rdfjson/RdfJsonInputFormat.java | 43 --
.../rdf/io/input/rdfxml/RdfXmlInputFormat.java | 43 --
.../readers/AbstractBlockBasedNodeTupleReader.java | 338 ---------
.../readers/AbstractBlockBasedQuadReader.java | 50 --
.../readers/AbstractBlockBasedTripleReader.java | 50 --
.../readers/AbstractLineBasedNodeTupleReader.java | 284 -------
.../input/readers/AbstractLineBasedQuadReader.java | 48 --
.../readers/AbstractLineBasedTripleReader.java | 49 --
.../rdf/io/input/readers/AbstractRdfReader.java | 108 ---
.../readers/AbstractWholeFileNodeTupleReader.java | 326 --------
.../input/readers/AbstractWholeFileQuadReader.java | 50 --
.../readers/AbstractWholeFileTripleReader.java | 50 --
.../hadoop/rdf/io/input/readers/QuadsReader.java | 48 --
.../rdf/io/input/readers/TriplesOrQuadsReader.java | 70 --
.../hadoop/rdf/io/input/readers/TriplesReader.java | 48 --
.../rdf/io/input/readers/TriplesToQuadsReader.java | 101 ---
.../io/input/readers/jsonld/JsonLDQuadReader.java | 32 -
.../input/readers/jsonld/JsonLDTripleReader.java | 30 -
.../input/readers/nquads/BlockedNQuadsReader.java | 45 --
.../rdf/io/input/readers/nquads/NQuadsReader.java | 40 -
.../readers/nquads/WholeFileNQuadsReader.java | 42 --
.../readers/ntriples/BlockedNTriplesReader.java | 45 --
.../io/input/readers/ntriples/NTriplesReader.java | 41 --
.../readers/ntriples/WholeFileNTriplesReader.java | 42 --
.../io/input/readers/rdfjson/RdfJsonReader.java | 37 -
.../rdf/io/input/readers/rdfxml/RdfXmlReader.java | 37 -
.../io/input/readers/thrift/ThriftQuadReader.java | 32 -
.../input/readers/thrift/ThriftTripleReader.java | 30 -
.../rdf/io/input/readers/trig/TriGReader.java | 37 -
.../rdf/io/input/readers/trix/TriXReader.java | 37 -
.../rdf/io/input/readers/turtle/TurtleReader.java | 37 -
.../rdf/io/input/thrift/ThriftQuadInputFormat.java | 36 -
.../io/input/thrift/ThriftTripleInputFormat.java | 36 -
.../hadoop/rdf/io/input/trig/TriGInputFormat.java | 43 --
.../hadoop/rdf/io/input/trix/TriXInputFormat.java | 39 -
.../rdf/io/input/turtle/TurtleInputFormat.java | 43 --
.../hadoop/rdf/io/input/util/BlockInputStream.java | 94 ---
.../jena/hadoop/rdf/io/input/util/RdfIOUtils.java | 114 ---
.../rdf/io/input/util/TrackableInputStream.java | 38 -
.../rdf/io/input/util/TrackedInputStream.java | 124 ----
.../rdf/io/input/util/TrackedPipedQuadsStream.java | 54 --
.../rdf/io/input/util/TrackedPipedRDFStream.java | 64 --
.../io/input/util/TrackedPipedTriplesStream.java | 55 --
.../AbstractBatchedNodeTupleOutputFormat.java | 55 --
.../rdf/io/output/AbstractNodeOutputFormat.java | 94 ---
.../io/output/AbstractNodeTupleOutputFormat.java | 109 ---
.../AbstractStreamRdfNodeTupleOutputFormat.java | 73 --
.../hadoop/rdf/io/output/QuadsOutputFormat.java | 63 --
.../rdf/io/output/TriplesOrQuadsOutputFormat.java | 73 --
.../hadoop/rdf/io/output/TriplesOutputFormat.java | 60 --
.../io/output/jsonld/JsonLDQuadOutputFormat.java | 43 --
.../io/output/jsonld/JsonLDTripleOutputFormat.java | 43 --
.../rdf/io/output/nquads/NQuadsOutputFormat.java | 51 --
.../output/ntriples/NTriplesNodeOutputFormat.java | 45 --
.../io/output/ntriples/NTriplesOutputFormat.java | 51 --
.../rdf/io/output/rdfjson/RdfJsonOutputFormat.java | 51 --
.../rdf/io/output/rdfxml/RdfXmlOutputFormat.java | 51 --
.../io/output/thrift/ThriftQuadOutputFormat.java | 51 --
.../io/output/thrift/ThriftTripleOutputFormat.java | 51 --
.../io/output/trig/BatchedTriGOutputFormat.java | 53 --
.../rdf/io/output/trig/TriGOutputFormat.java | 57 --
.../rdf/io/output/trix/TriXOutputFormat.java | 57 --
.../output/turtle/BatchedTurtleOutputFormat.java | 49 --
.../rdf/io/output/turtle/TurtleOutputFormat.java | 55 --
.../writers/AbstractBatchedNodeTupleWriter.java | 113 ---
.../output/writers/AbstractBatchedQuadWriter.java | 79 --
.../writers/AbstractBatchedTripleWriter.java | 67 --
.../writers/AbstractLineBasedNodeTupleWriter.java | 150 ----
.../writers/AbstractLineBasedQuadWriter.java | 70 --
.../writers/AbstractLineBasedTripleWriter.java | 67 --
.../rdf/io/output/writers/AbstractNodeWriter.java | 192 -----
.../writers/AbstractStreamRdfNodeTupleWriter.java | 69 --
.../writers/AbstractWholeFileNodeTupleWriter.java | 96 ---
.../writers/AbstractWholeFileQuadWriter.java | 65 --
.../writers/AbstractWholeFileTripleWriter.java | 64 --
.../io/output/writers/QuadsToTriplesWriter.java | 59 --
.../rdf/io/output/writers/StreamRdfQuadWriter.java | 44 --
.../io/output/writers/StreamRdfTripleWriter.java | 43 --
.../io/output/writers/jsonld/JsonLDQuadWriter.java | 38 -
.../output/writers/jsonld/JsonLDTripleWriter.java | 38 -
.../rdf/io/output/writers/nquads/NQuadsWriter.java | 57 --
.../writers/ntriples/NTriplesNodeWriter.java | 59 --
.../io/output/writers/ntriples/NTriplesWriter.java | 58 --
.../io/output/writers/rdfjson/RdfJsonWriter.java | 51 --
.../rdf/io/output/writers/rdfxml/RdfXmlWriter.java | 51 --
.../io/output/writers/thrift/ThriftQuadWriter.java | 38 -
.../output/writers/thrift/ThriftTripleWriter.java | 38 -
.../io/output/writers/trig/BatchedTriGWriter.java | 52 --
.../output/writers/turtle/BatchedTurtleWriter.java | 54 --
.../rdf/io/registry/HadoopRdfIORegistry.java | 310 --------
.../jena/hadoop/rdf/io/registry/ReaderFactory.java | 83 ---
.../jena/hadoop/rdf/io/registry/WriterFactory.java | 96 ---
.../readers/AbstractQuadsOnlyReaderFactory.java | 83 ---
.../io/registry/readers/AbstractReaderFactory.java | 80 --
.../readers/AbstractTriplesOnlyReaderFactory.java | 83 ---
.../io/registry/readers/JsonLDReaderFactory.java | 47 --
.../io/registry/readers/NQuadsReaderFactory.java | 40 -
.../io/registry/readers/NTriplesReaderFactory.java | 37 -
.../io/registry/readers/RdfJsonReaderFactory.java | 39 -
.../io/registry/readers/RdfXmlReaderFactory.java | 38 -
.../io/registry/readers/ThriftReaderFactory.java | 47 --
.../rdf/io/registry/readers/TriGReaderFactory.java | 40 -
.../rdf/io/registry/readers/TriXReaderFactory.java | 39 -
.../io/registry/readers/TurtleReaderFactory.java | 38 -
.../writers/AbstractQuadsOnlyWriterFactory.java | 86 ---
.../writers/AbstractTriplesOnlyWriterFactory.java | 85 ---
.../io/registry/writers/AbstractWriterFactory.java | 82 ---
.../io/registry/writers/JsonLDWriterFactory.java | 49 --
.../io/registry/writers/NQuadsWriterFactory.java | 42 --
.../io/registry/writers/NTriplesWriterFactory.java | 42 --
.../io/registry/writers/RdfJsonWriterFactory.java | 41 --
.../io/registry/writers/RdfXmlWriterFactory.java | 42 --
.../io/registry/writers/ThriftWriterFactory.java | 54 --
.../rdf/io/registry/writers/TriGWriterFactory.java | 43 --
.../rdf/io/registry/writers/TriXWriterFactory.java | 45 --
.../io/registry/writers/TurtleWriterFactory.java | 43 --
...pache.jena.hadoop.rdf.io.registry.ReaderFactory | 10 -
...pache.jena.hadoop.rdf.io.registry.WriterFactory | 10 -
.../hadoop/rdf/io/RdfTriplesInputTestMapper.java | 44 --
.../input/AbstractBlockedQuadInputFormatTests.java | 33 -
.../AbstractBlockedTripleInputFormatTests.java | 33 -
.../input/AbstractNodeTupleInputFormatTests.java | 611 ---------------
.../io/input/AbstractQuadsInputFormatTests.java | 69 --
.../io/input/AbstractTriplesInputFormatTests.java | 71 --
.../AbstractWholeFileQuadInputFormatTests.java | 114 ---
.../AbstractWholeFileTripleInputFormatTests.java | 107 ---
.../io/input/bnodes/AbstractBlankNodeTests.java | 630 ----------------
.../input/bnodes/AbstractTripleBlankNodeTests.java | 64 --
.../io/input/bnodes/JsonLdTripleBlankNodeTest.java | 63 --
.../rdf/io/input/bnodes/NTriplesBlankNodeTest.java | 58 --
.../rdf/io/input/bnodes/RdfJsonBlankNodeTest.java | 58 --
.../io/input/bnodes/RdfThriftBlankNodeTest.java | 68 --
.../rdf/io/input/bnodes/RdfXmlBlankNodeTest.java | 62 --
.../rdf/io/input/bnodes/TurtleBlankNodeTest.java | 58 --
...bstractCompressedNodeTupleInputFormatTests.java | 74 --
.../AbstractCompressedQuadsInputFormatTests.java | 70 --
.../AbstractCompressedTriplesInputFormatTests.java | 70 --
...actCompressedWholeFileQuadInputFormatTests.java | 149 ----
...tCompressedWholeFileTripleInputFormatTests.java | 143 ----
...stractCompressedJsonLDQuadInputFormatTests.java | 90 ---
...ractCompressedJsonLDTripleInputFormatTests.java | 89 ---
.../jsonld/BZippedJsonLDQuadInputTest.java | 34 -
.../jsonld/BZippedJsonLDTripleInputTest.java | 34 -
.../jsonld/DeflatedJsonLDQuadInputTest.java | 34 -
.../jsonld/DeflatedJsonLDTripleInputTest.java | 34 -
.../jsonld/GZippedJsonLDQuadInputTest.java | 34 -
.../jsonld/GZippedJsonLDTripleInputTest.java | 34 -
.../AbstractCompressedNQuadsInputFormatTests.java | 68 --
...tCompressedWholeFileNQuadsInputFormatTests.java | 75 --
.../compressed/nquads/BZipppedNQuadsInputTest.java | 38 -
.../nquads/BZipppedWholeFileNQuadsInputTest.java | 37 -
.../compressed/nquads/DeflatedNQuadsInputTest.java | 37 -
.../nquads/DeflatedWholeFileNQuadsInputTest.java | 37 -
.../compressed/nquads/GZippedNQuadsInputTest.java | 38 -
.../nquads/GZippedWholeFileNQuadsInputTest.java | 38 -
...tCompressedBlockedNTriplesInputFormatTests.java | 53 --
...AbstractCompressedNTriplesInputFormatTests.java | 68 --
...ompressedWholeFileNTriplesInputFormatTests.java | 75 --
.../ntriples/BZippedBlockedNTriplesInput.java | 37 -
.../ntriples/BZippedNTriplesInputTest.java | 38 -
.../BZippedWholeFileNTriplesInputTest.java | 38 -
.../ntriples/DeflatedBlockedNTriplesInput.java | 37 -
.../ntriples/DeflatedNTriplesInputTest.java | 38 -
.../DeflatedWholeFileNTriplesInputTest.java | 38 -
.../ntriples/GZippedBlockedNTriplesInput.java | 37 -
.../ntriples/GZippedNTriplesInputTest.java | 41 --
.../GZippedWholeFileNTriplesInputTest.java | 38 -
.../AbstractCompressedRdfJsonInputFormatTests.java | 74 --
.../rdfjson/BZippedRdfJsonInputTest.java | 37 -
.../rdfjson/DeflatedRdfJsonInputTest.java | 37 -
.../rdfjson/GZippedRdfJsonInputTest.java | 37 -
.../AbstractCompressedRdfXmlInputFormatTests.java | 75 --
.../compressed/rdfxml/BZippedRdfXmlInputTest.java | 37 -
.../compressed/rdfxml/DeflatedRdfXmlInputTest.java | 37 -
.../compressed/rdfxml/GZippedRdfXmlInputTest.java | 37 -
...stractCompressedThriftQuadInputFormatTests.java | 72 --
...ractCompressedThriftTripleInputFormatTests.java | 72 --
.../thrift/BZippedThriftQuadInputTest.java | 34 -
.../thrift/BZippedThriftTripleInputTest.java | 34 -
.../thrift/DeflatedThriftQuadInputTest.java | 34 -
.../thrift/DeflatedThriftTripleInputTest.java | 34 -
.../thrift/GZippedThriftQuadInputTest.java | 34 -
.../thrift/GZippedThriftTripleInputTest.java | 34 -
.../AbstractCompressedTriGInputFormatTests.java | 72 --
.../compressed/trig/BZippedTriGInputTest.java | 37 -
.../compressed/trig/DeflatedTriGInputTest.java | 37 -
.../compressed/trig/GZippedTriGInputTest.java | 37 -
.../AbstractCompressedTriXInputFormatTests.java | 72 --
.../compressed/trix/BZippedTriXInputTest.java | 35 -
.../compressed/trix/DeflatedTriXInputTest.java | 35 -
.../compressed/trix/GZippedTriXInputTest.java | 35 -
.../AbstractCompressedTurtleInputFormatTests.java | 75 --
.../compressed/turtle/BZippedTurtleInputTest.java | 37 -
.../compressed/turtle/DeflatedTurtleInputTest.java | 37 -
.../compressed/turtle/GZippedTurtleInputTest.java | 37 -
.../rdf/io/input/jsonld/JsonLDQuadInputTest.java | 65 --
.../rdf/io/input/jsonld/JsonLDTripleInputTest.java | 65 --
.../io/input/nquads/BlockedNQuadsInputTest.java | 51 --
.../rdf/io/input/nquads/NQuadsInputTest.java | 44 --
.../nquads/WholeFileNQuadsAsQuadsInputTest.java | 36 -
.../io/input/nquads/WholeFileNQuadsInputTest.java | 51 --
.../input/ntriples/BlockedNTriplesInputTest.java | 50 --
.../rdf/io/input/ntriples/NTriplesInputTest.java | 44 --
.../WholeFileNTriplesAsTriplesInputTest.java | 36 -
.../input/ntriples/WholeFileNTriplesInputTest.java | 52 --
.../input/rdfjson/RdfJsonAsTriplesInputTest.java | 36 -
.../rdf/io/input/rdfjson/RdfJsonInputTest.java | 51 --
.../io/input/rdfxml/RdfXmlAsTriplesInputTest.java | 36 -
.../rdf/io/input/rdfxml/RdfXmlInputTest.java | 51 --
.../rdf/io/input/thrift/ThriftQuadInputTest.java | 51 --
.../rdf/io/input/thrift/ThriftTripleInputTest.java | 51 --
.../rdf/io/input/trig/TriGAsQuadsInputTest.java | 36 -
.../hadoop/rdf/io/input/trig/TriGInputTest.java | 50 --
.../rdf/io/input/trix/TriXAsQuadsInputTest.java | 35 -
.../hadoop/rdf/io/input/trix/TriXInputTest.java | 50 --
.../io/input/turtle/TurtleAsTriplesInputTest.java | 36 -
.../rdf/io/input/turtle/TurtleInputTest.java | 81 --
.../util/AbstractTrackableInputStreamTests.java | 707 ------------------
.../rdf/io/input/util/BlockInputStreamTest.java | 238 ------
.../rdf/io/input/util/TrackedInputStreamTest.java | 36 -
.../output/AbstractNodeTupleOutputFormatTests.java | 254 -------
.../io/output/AbstractQuadOutputFormatTests.java | 50 --
.../io/output/AbstractTripleOutputFormatTests.java | 46 --
.../rdf/io/output/jsonld/JsonLdQuadOutputTest.java | 47 --
.../io/output/jsonld/JsonLdTripleOutputTest.java | 47 --
.../rdf/io/output/nquads/NQuadsOutputTest.java | 51 --
.../rdf/io/output/ntriples/NTriplesOutputTest.java | 51 --
.../rdf/io/output/rdfjson/RdfJsonOutputTest.java | 51 --
.../rdf/io/output/rdfxml/RdfXmlOutputTest.java | 51 --
.../rdf/io/output/thrift/ThriftQuadOutputTest.java | 48 --
.../io/output/thrift/ThriftTripleOutputTest.java | 48 --
.../rdf/io/output/trig/BatchedTriGOutputTest.java | 92 ---
.../rdf/io/output/trig/StreamedTriGOutputTest.java | 92 ---
.../io/output/trig/TriGBlankNodeOutputTests.java | 123 ----
.../hadoop/rdf/io/output/trix/TriXOutputTest.java | 47 --
.../io/output/turtle/BatchedTurtleOutputTest.java | 92 ---
.../io/output/turtle/StreamedTurtleOutputTest.java | 92 ---
.../output/turtle/TurtleBlankNodeOutputTests.java | 121 ---
.../rdf/io/registry/TestHadoopRdfIORegistry.java | 186 -----
.../src/test/resources/log4j.properties | 12 -
jena-elephas/jena-elephas-mapreduce/pom.xml | 115 ---
.../jena/hadoop/rdf/mapreduce/KeyMapper.java | 54 --
.../hadoop/rdf/mapreduce/KeyPlusNullMapper.java | 55 --
.../jena/hadoop/rdf/mapreduce/KeyReducer.java | 39 -
.../hadoop/rdf/mapreduce/NullPlusKeyMapper.java | 55 --
.../hadoop/rdf/mapreduce/NullPlusKeyReducer.java | 59 --
.../hadoop/rdf/mapreduce/NullPlusValueMapper.java | 55 --
.../hadoop/rdf/mapreduce/NullPlusValueReducer.java | 64 --
.../rdf/mapreduce/RdfMapReduceConstants.java | 67 --
.../jena/hadoop/rdf/mapreduce/SwapMapper.java | 55 --
.../jena/hadoop/rdf/mapreduce/SwapReducer.java | 43 --
.../hadoop/rdf/mapreduce/TextCountReducer.java | 49 --
.../jena/hadoop/rdf/mapreduce/ValueMapper.java | 54 --
.../hadoop/rdf/mapreduce/ValuePlusNullMapper.java | 55 --
.../jena/hadoop/rdf/mapreduce/ValueReducer.java | 44 --
...AbstractCharacteristicSetGeneratingReducer.java | 179 -----
.../characteristics/CharacteristicSetReducer.java | 68 --
.../QuadCharacteristicSetGeneratingReducer.java | 38 -
.../TripleCharacteristicSetGeneratingReducer.java | 39 -
.../count/AbstractNodeTupleNodeCountMapper.java | 66 --
.../rdf/mapreduce/count/NodeCountReducer.java | 50 --
.../rdf/mapreduce/count/QuadNodeCountMapper.java | 43 --
.../rdf/mapreduce/count/TripleNodeCountMapper.java | 41 --
.../count/datatypes/QuadDataTypeCountMapper.java | 55 --
.../count/datatypes/TripleDataTypeCountMapper.java | 55 --
.../AbstractNodeTupleNamespaceCountMapper.java | 134 ----
.../count/namespaces/QuadNamespaceCountMapper.java | 43 --
.../namespaces/TripleNamespaceCountMapper.java | 43 --
.../count/positional/QuadGraphCountMapper.java | 42 --
.../count/positional/QuadObjectCountMapper.java | 42 --
.../count/positional/QuadPredicateCountMapper.java | 42 --
.../count/positional/QuadSubjectCountMapper.java | 41 --
.../count/positional/TripleObjectCountMapper.java | 41 --
.../positional/TriplePredicateCountMapper.java | 42 --
.../count/positional/TripleSubjectCountMapper.java | 41 --
.../filter/AbstractNodeTupleFilterMapper.java | 72 --
.../mapreduce/filter/AbstractQuadFilterMapper.java | 34 -
.../filter/AbstractTripleFilterMapper.java | 34 -
.../mapreduce/filter/GroundQuadFilterMapper.java | 46 --
.../mapreduce/filter/GroundTripleFilterMapper.java | 46 --
.../mapreduce/filter/ValidQuadFilterMapper.java | 47 --
.../mapreduce/filter/ValidTripleFilterMapper.java | 46 --
.../AbstractQuadFilterByPositionMapper.java | 170 -----
.../AbstractTripleFilterByPositionMapper.java | 140 ----
.../positional/QuadFilterByGraphUriMapper.java | 75 --
.../positional/QuadFilterByObjectUriMapper.java | 75 --
.../positional/QuadFilterByPredicateMapper.java | 75 --
.../positional/QuadFilterBySubjectUriMapper.java | 75 --
.../positional/TripleFilterByObjectUriMapper.java | 70 --
.../TripleFilterByPredicateUriMapper.java | 70 --
.../positional/TripleFilterBySubjectUriMapper.java | 70 --
.../group/AbstractNodeTupleGroupingMapper.java | 60 --
.../group/AbstractQuadGroupingMapper.java | 50 --
.../group/AbstractTripleGroupingMapper.java | 43 --
.../mapreduce/group/QuadGroupByGraphMapper.java | 38 -
.../mapreduce/group/QuadGroupByObjectMapper.java | 38 -
.../group/QuadGroupByPredicateMapper.java | 38 -
.../mapreduce/group/QuadGroupBySubjectMapper.java | 38 -
.../mapreduce/group/TripleGroupByObjectMapper.java | 40 -
.../group/TripleGroupByPredicateMapper.java | 40 -
.../group/TripleGroupBySubjectMapper.java | 40 -
.../split/AbstractNodeTupleSplitToNodesMapper.java | 60 --
.../AbstractNodeTupleSplitWithNodesMapper.java | 60 --
.../mapreduce/split/QuadSplitToNodesMapper.java | 42 --
.../mapreduce/split/QuadSplitWithNodesMapper.java | 42 --
.../mapreduce/split/TripleSplitToNodesMapper.java | 40 -
.../split/TripleSplitWithNodesMapper.java | 40 -
.../transform/AbstractTriplesToQuadsMapper.java | 59 --
.../mapreduce/transform/QuadsToTriplesMapper.java | 46 --
.../transform/TriplesToQuadsBySubjectMapper.java | 40 -
.../TriplesToQuadsConstantGraphMapper.java | 75 --
.../rdf/mapreduce/AbstractMapReduceTests.java | 72 --
.../hadoop/rdf/mapreduce/AbstractMapperTests.java | 72 --
.../hadoop/rdf/mapreduce/TestDistinctTriples.java | 128 ----
...actCharacteristicSetGeneratingReducerTests.java | 183 -----
.../CharacteristicSetReducerTest.java | 195 -----
...ipleCharacteristicSetGeneratingReducerTest.java | 57 --
.../AbstractNodeTupleNodeCountReducedTests.java | 148 ----
.../count/AbstractNodeTupleNodeCountTests.java | 137 ----
.../count/QuadNodeCountMapReduceTest.java | 63 --
.../mapreduce/count/QuadNodeCountMapperTest.java | 57 --
.../count/TripleNodeCountMapReduceTest.java | 63 --
.../mapreduce/count/TripleNodeCountMapperTest.java | 56 --
.../filter/AbstractNodeTupleFilterTests.java | 145 ----
.../filter/AbstractQuadValidityFilterTests.java | 85 ---
.../filter/AbstractTripleValidityFilterTests.java | 72 --
.../TripleFilterByNoPredicateMapperTest.java | 49 --
.../filter/TripleFilterByPredicateMapperTest.java | 79 --
...ripleInvertedFilterByNoPredicateMapperTest.java | 54 --
.../TripleInvertedFilterByPredicateMapperTest.java | 86 ---
.../filter/ValidQuadFilterMapperTest.java | 39 -
.../filter/ValidTripleFilterMapperTest.java | 39 -
.../group/AbstractNodeTupleGroupingTests.java | 114 ---
.../mapreduce/group/AbstractQuadGroupingTests.java | 41 --
.../group/AbstractTripleGroupingTests.java | 39 -
.../group/QuadGroupByGraphMapperTest.java | 45 --
.../group/QuadGroupByObjectMapperTest.java | 45 --
.../group/QuadGroupByPredicateMapperTest.java | 45 --
.../group/QuadGroupBySubjectMapperTest.java | 45 --
.../group/TripleGroupByObjectMapperTest.java | 45 --
.../group/TripleGroupByPredicateMapperTest.java | 45 --
.../group/TripleGroupBySubjectMapperTest.java | 45 --
.../split/AbstractNodeTupleSplitToNodesTests.java | 115 ---
.../AbstractNodeTupleSplitWithNodesTests.java | 115 ---
.../split/AbstractQuadSplitToNodesTests.java | 51 --
.../split/AbstractQuadSplitWithNodesTests.java | 51 --
.../split/AbstractTripleSplitToNodesTests.java | 50 --
.../split/AbstractTripleSplitWithNodesTests.java | 50 --
.../split/QuadSplitToNodesMapperTest.java | 40 -
.../split/QuadSplitWithNodesMapperTest.java | 40 -
.../split/TripleSplitToNodesMapperTest.java | 40 -
.../split/TripleSplitWithNodesMapperTest.java | 40 -
.../transform/QuadsToTriplesMapperTest.java | 111 ---
.../TriplesToQuadsBySubjectMapperTest.java | 111 ---
.../TriplesToQuadsConstantGraphMapperTest.java | 111 ---
.../src/test/resources/log4j.properties | 12 -
jena-elephas/jena-elephas-stats/hadoop-job.xml | 46 --
jena-elephas/jena-elephas-stats/pom.xml | 109 ---
.../org/apache/jena/hadoop/rdf/stats/RdfStats.java | 428 -----------
.../jena/hadoop/rdf/stats/jobs/JobFactory.java | 820 ---------------------
jena-elephas/pom.xml | 147 ----
pom.xml | 9 -
392 files changed, 13 insertions(+), 28451 deletions(-)
diff --git a/jena-elephas/README.md b/jena-elephas/README.md
new file mode 100644
index 0000000..fbd905a
--- /dev/null
+++ b/jena-elephas/README.md
@@ -0,0 +1,13 @@
+Apache Jena Elephas
+===================
+
+The last release of jena-elephas was with Jena 3.17.0 on 2020-12-01.
+
+The code for this module has been removed.
+Source is available from git with tag "jena-3.17.0".
+
+The last git commit after 3.17.0 to jena-elephas was cb458d4c7b.
+This contains changes made in the progress to Jena 4.0.0.
+
+Reired modules can be incorporated back into Jena releases if there is
+sufficient activity to maintain the code over the long term.
diff --git a/jena-elephas/jena-elephas-common/pom.xml b/jena-elephas/jena-elephas-common/pom.xml
deleted file mode 100644
index b2ae3b4..0000000
--- a/jena-elephas/jena-elephas-common/pom.xml
+++ /dev/null
@@ -1,80 +0,0 @@
-<!--
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <parent>
- <groupId>org.apache.jena</groupId>
- <artifactId>jena-elephas</artifactId>
- <version>3.18.0-SNAPSHOT</version>
- </parent>
- <artifactId>jena-elephas-common</artifactId>
- <name>Apache Jena - Elephas - Common API</name>
- <description>Common code for RDF on Hadoop such as writable types for RDF primitives</description>
-
- <properties>
- <automatic.module.name>org.apache.jena.elephas.common</automatic.module.name>
- </properties>
-
- <!-- Note that versions are managed by parent POMs -->
- <dependencies>
- <!-- Hadoop Dependencies -->
- <!-- Note these will be provided on the Hadoop cluster
- hence the provided scope
- -->
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-common</artifactId>
- <scope>provided</scope>
- </dependency>
-
- <!-- Jena dependencies -->
- <dependency>
- <groupId>org.apache.jena</groupId>
- <artifactId>jena-arq</artifactId>
- </dependency>
-
- <!-- Test Dependencies -->
- <dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <scope>test</scope>
- </dependency>
- </dependencies>
-
- <build>
- <plugins>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-javadoc-plugin</artifactId>
- <configuration>
- <version>true</version>
- <show>public</show>
- <quiet>true</quiet>
- <encoding>UTF-8</encoding>
- <windowtitle>Apache Jena Elephas - Common API</windowtitle>
- <doctitle>Apache Jena Elephas - Common API ${project.version}</doctitle>
- <bottom>Licenced under the Apache License, Version 2.0</bottom>
- <links>
- <link>https://jena.apache.org/documentation/javadoc/jena/</link>
- <link>https://jena.apache.org/documentation/javadoc/arq/</link>
- </links>
- </configuration>
- </plugin>
- </plugins>
- </build>
-</project>
diff --git a/jena-elephas/jena-elephas-common/src/main/java/org/apache/jena/hadoop/rdf/types/AbstractNodeTupleWritable.java b/jena-elephas/jena-elephas-common/src/main/java/org/apache/jena/hadoop/rdf/types/AbstractNodeTupleWritable.java
deleted file mode 100644
index c9750f2..0000000
--- a/jena-elephas/jena-elephas-common/src/main/java/org/apache/jena/hadoop/rdf/types/AbstractNodeTupleWritable.java
+++ /dev/null
@@ -1,194 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.types;
-
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.io.IOException;
-
-import org.apache.hadoop.io.WritableComparable;
-import org.apache.hadoop.io.WritableUtils;
-import org.apache.jena.graph.Node ;
-import org.apache.jena.sparql.util.NodeUtils ;
-
-/**
- * A abstract general purpose writable where the actual class represented is
- * composed of a number of {@link Node} instances
- * <p>
- * The binary encoding of this base implementation is just a variable integer
- * indicating the number of nodes present followed by the binary encodings of
- * the {@link NodeWritable} instances. Derived implementations may wish to
- * override the {@link #readFields(DataInput)} and {@link #write(DataOutput)}
- * methods in order to use more specialised encodings.
- * </p>
- *
- * @param <T>
- * Tuple type
- */
-public abstract class AbstractNodeTupleWritable<T> implements WritableComparable<AbstractNodeTupleWritable<T>> {
-
- private T tuple;
-
- /**
- * Creates a new empty instance
- */
- protected AbstractNodeTupleWritable() {
- this(null);
- }
-
- /**
- * Creates a new instance with the given value
- *
- * @param tuple
- * Tuple value
- */
- protected AbstractNodeTupleWritable(T tuple) {
- this.tuple = tuple;
- }
-
- /**
- * Gets the tuple
- *
- * @return Tuple
- */
- public T get() {
- return this.tuple;
- }
-
- /**
- * Sets the tuple
- *
- * @param tuple
- * Tuple
- */
- public void set(T tuple) {
- this.tuple = tuple;
- }
-
- @Override
- public void readFields(DataInput input) throws IOException {
- // Determine how many nodes
- int size = WritableUtils.readVInt(input);
- Node[] ns = new Node[size];
-
- NodeWritable nw = new NodeWritable();
- for (int i = 0; i < ns.length; i++) {
- nw.readFields(input);
- ns[i] = nw.get();
- }
-
- // Load the tuple
- this.tuple = this.createTuple(ns);
- }
-
- /**
- * Creates the actual tuple type from an array of nodes
- *
- * @param ns
- * Nodes
- * @return Tuple
- */
- protected abstract T createTuple(Node[] ns);
-
- @Override
- public void write(DataOutput output) throws IOException {
- // Determine how many nodes
- Node[] ns = this.createNodes(this.tuple);
- WritableUtils.writeVInt(output, ns.length);
-
- // Write out nodes
- NodeWritable nw = new NodeWritable();
- for (int i = 0; i < ns.length; i++) {
- nw.set(ns[i]);
- nw.write(output);
- }
- }
-
- /**
- * Sets the tuple value
- * <p>
- * Intended only for internal use i.e. when a derived implementation
- * overrides {@link #readFields(DataInput)} and needs to set the tuple value
- * directly i.e. when a derived implementation is using a custom encoding
- * scheme
- * </p>
- *
- * @param tuple
- * Tuple
- */
- protected final void setInternal(T tuple) {
- this.tuple = tuple;
- }
-
- /**
- * Converts the actual tuple type into an array of nodes
- *
- * @param tuple
- * Tuples
- * @return Nodes
- */
- protected abstract Node[] createNodes(T tuple);
-
- /**
- * Compares instances node by node
- * <p>
- * Derived implementations may wish to override this and substitute native
- * tuple based comparisons
- * </p>
- *
- * @param other
- * Instance to compare with
- */
- @Override
- public int compareTo(AbstractNodeTupleWritable<T> other) {
- Node[] ns = this.createNodes(this.tuple);
- Node[] otherNs = this.createNodes(other.tuple);
-
- if (ns.length < otherNs.length) {
- return -1;
- } else if (ns.length > otherNs.length) {
- return 1;
- }
- // Compare node by node
- for (int i = 0; i < ns.length; i++) {
- int c = NodeUtils.compareRDFTerms(ns[i], otherNs[i]);
- if (c != 0)
- return c;
- }
- return 0;
- }
-
- @Override
- public String toString() {
- return this.get().toString();
- }
-
- @Override
- public int hashCode() {
- return this.get().hashCode();
- }
-
- @SuppressWarnings("unchecked")
- @Override
- public boolean equals(Object other) {
- if (!(other instanceof AbstractNodeTupleWritable))
- return false;
- return this.compareTo((AbstractNodeTupleWritable<T>) other) == 0;
- }
-}
diff --git a/jena-elephas/jena-elephas-common/src/main/java/org/apache/jena/hadoop/rdf/types/CharacteristicSetWritable.java b/jena-elephas/jena-elephas-common/src/main/java/org/apache/jena/hadoop/rdf/types/CharacteristicSetWritable.java
deleted file mode 100644
index 39599fa..0000000
--- a/jena-elephas/jena-elephas-common/src/main/java/org/apache/jena/hadoop/rdf/types/CharacteristicSetWritable.java
+++ /dev/null
@@ -1,297 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.types;
-
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.io.IOException;
-import java.util.Iterator;
-import java.util.Map;
-import java.util.TreeMap;
-
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.WritableComparable;
-import org.apache.hadoop.io.WritableUtils;
-import org.apache.jena.graph.Node ;
-import org.apache.jena.graph.NodeFactory ;
-
-/**
- * Represents a characteristic set which is comprised of a count of nodes for
- * which the characteristic is applicable and a set of characteristics which
- * represents the number of usages of predicates with those nodes
- *
- *
- *
- */
-public class CharacteristicSetWritable implements WritableComparable<CharacteristicSetWritable> {
-
- private Map<NodeWritable, CharacteristicWritable> characteristics = new TreeMap<NodeWritable, CharacteristicWritable>();
- private LongWritable count = new LongWritable();
-
- /**
- * Creates a new empty characteristic set with the default count of 1
- */
- public CharacteristicSetWritable() {
- this(1);
- }
-
- /**
- * Creates a new characteristic set with the default count of 1 and the
- * given characteristics
- *
- * @param characteristics
- * Characteristics
- */
- public CharacteristicSetWritable(CharacteristicWritable... characteristics) {
- this(1, characteristics);
- }
-
- /**
- * Creates an empty characteristic set with the given count
- *
- * @param count
- * Count
- */
- public CharacteristicSetWritable(long count) {
- this(count, new CharacteristicWritable[0]);
- }
-
- /**
- * Creates a new characteristic set
- *
- * @param count
- * Count
- * @param characteristics
- * Characteristics
- */
- public CharacteristicSetWritable(long count, CharacteristicWritable... characteristics) {
- this.count.set(count);
- for (CharacteristicWritable characteristic : characteristics) {
- this.characteristics.put(characteristic.getNode(), characteristic);
- }
- }
-
- /**
- * Creates a new instance and reads its data from the given input
- *
- * @param input
- * Input
- * @return New instance
- * @throws IOException
- */
- public static CharacteristicSetWritable read(DataInput input) throws IOException {
- CharacteristicSetWritable set = new CharacteristicSetWritable();
- set.readFields(input);
- return set;
- }
-
- /**
- * Gets the count
- *
- * @return Count
- */
- public LongWritable getCount() {
- return this.count;
- }
-
- /**
- * Gets the characteristics
- *
- * @return Characteristics
- */
- public Iterator<CharacteristicWritable> getCharacteristics() {
- return this.characteristics.values().iterator();
- }
-
- /**
- * Gets the size of the characteristic set
- *
- * @return Size
- */
- public int size() {
- return this.characteristics.size();
- }
-
- /**
- * Adds a characteristic to the set merging it into the appropriate existing
- * characteristic if applicable
- *
- * @param characteristic
- * Characteristics
- */
- public void add(CharacteristicWritable characteristic) {
- if (this.characteristics.containsKey(characteristic.getNode())) {
- this.characteristics.get(characteristic.getNode()).increment(characteristic.getCount().get());
- } else {
- this.characteristics.put(characteristic.getNode(), characteristic);
- }
- }
-
- /**
- * Adds some characteristics to the set merging them with the appropriate
- * existing characteristics if applicable
- *
- * @param characteristics
- */
- public void add(CharacteristicWritable... characteristics) {
- for (CharacteristicWritable characteristic : characteristics) {
- this.add(characteristic);
- }
- }
-
- /**
- * Adds the contents of the other characteristic set to this characteristic
- * set
- *
- * @param set
- * Characteristic set
- */
- public void add(CharacteristicSetWritable set) {
- this.increment(set.getCount().get());
- Iterator<CharacteristicWritable> iter = set.getCharacteristics();
- while (iter.hasNext()) {
- this.add(iter.next());
- }
- }
-
- /**
- * Gets whether the set contains a characteristic for the given predicate
- *
- * @param uri
- * Predicate URI
- * @return True if contained in the set, false otherwise
- */
- public boolean hasCharacteristic(String uri) {
- return this.hasCharacteristic(NodeFactory.createURI(uri));
- }
-
- /**
- * Gets whether the set contains a characteristic for the given predicate
- *
- * @param n
- * Predicate
- * @return True if contained in the set, false otherwise
- */
- public boolean hasCharacteristic(Node n) {
- return this.hasCharacteristic(new NodeWritable(n));
- }
-
- /**
- * Gets whether the set contains a characteristic for the given predicate
- *
- * @param n
- * Predicate
- * @return True if contained in the set, false otherwise
- */
- public boolean hasCharacteristic(NodeWritable n) {
- return this.characteristics.containsKey(n);
- }
-
- /**
- * Increments the count by the given increment
- *
- * @param l
- * Increment
- */
- public void increment(long l) {
- this.count.set(this.count.get() + l);
- }
-
- @Override
- public void write(DataOutput output) throws IOException {
- // Write size, then count, then characteristics
- WritableUtils.writeVInt(output, this.characteristics.size());
- this.count.write(output);
- for (CharacteristicWritable characteristic : this.characteristics.values()) {
- characteristic.write(output);
- }
- }
-
- @Override
- public void readFields(DataInput input) throws IOException {
- // Read size, then count, then characteristics
- int size = WritableUtils.readVInt(input);
- this.count.readFields(input);
- this.characteristics.clear();
- for (int i = 0; i < size; i++) {
- CharacteristicWritable cw = CharacteristicWritable.read(input);
- this.characteristics.put(cw.getNode(), cw);
- }
- }
-
- @Override
- public int compareTo(CharacteristicSetWritable cs) {
- int size = this.characteristics.size();
- int otherSize = cs.characteristics.size();
- if (size < otherSize) {
- return -1;
- } else if (size > otherSize) {
- return 1;
- } else {
- // Compare characteristics in turn
- Iterator<CharacteristicWritable> iter = this.getCharacteristics();
- Iterator<CharacteristicWritable> otherIter = cs.getCharacteristics();
-
- int compare = 0;
- while (iter.hasNext()) {
- CharacteristicWritable c = iter.next();
- CharacteristicWritable otherC = otherIter.next();
- compare = c.compareTo(otherC);
- if (compare != 0)
- return compare;
- }
- return compare;
- }
- }
-
- @Override
- public boolean equals(Object other) {
- if (!(other instanceof CharacteristicSetWritable))
- return false;
- return this.compareTo((CharacteristicSetWritable) other) == 0;
- }
-
- @Override
- public int hashCode() {
- // Build a hash code from characteristics
- if (this.characteristics.size() == 0)
- return 0;
- Iterator<CharacteristicWritable> iter = this.getCharacteristics();
- int hash = 17;
- while (iter.hasNext()) {
- hash = hash * 31 + iter.next().hashCode();
- }
- return hash;
- }
-
- @Override
- public String toString() {
- StringBuilder builder = new StringBuilder();
- builder.append("{ ");
- builder.append(this.count.get());
- Iterator<CharacteristicWritable> iter = this.getCharacteristics();
- while (iter.hasNext()) {
- builder.append(" , ");
- builder.append(iter.next().toString());
- }
- builder.append(" }");
- return builder.toString();
- }
-
-}
diff --git a/jena-elephas/jena-elephas-common/src/main/java/org/apache/jena/hadoop/rdf/types/CharacteristicWritable.java b/jena-elephas/jena-elephas-common/src/main/java/org/apache/jena/hadoop/rdf/types/CharacteristicWritable.java
deleted file mode 100644
index 9fc8a08..0000000
--- a/jena-elephas/jena-elephas-common/src/main/java/org/apache/jena/hadoop/rdf/types/CharacteristicWritable.java
+++ /dev/null
@@ -1,159 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.types;
-
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.io.IOException;
-
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.WritableComparable;
-import org.apache.jena.graph.Node ;
-
-/**
- * Represents a characteristic for a single node and contains the node and a
- * count associated with that node
- * <p>
- * Note that characteristics are compared based upon only the nodes and not
- * their counts
- * </p>
- *
- *
- *
- */
-public class CharacteristicWritable implements WritableComparable<CharacteristicWritable> {
-
- private NodeWritable node = new NodeWritable();
- private LongWritable count = new LongWritable();
-
- /**
- * Creates an empty characteristic writable
- */
- public CharacteristicWritable() {
- this(null);
- }
-
- /**
- * Creates a characteristic writable with the given node and the default
- * count of 1
- *
- * @param n
- * Node
- */
- public CharacteristicWritable(Node n) {
- this(n, 1);
- }
-
- /**
- * Creates a characteristic writable with the given node and count
- *
- * @param n
- * Node
- * @param count
- * Count
- */
- public CharacteristicWritable(Node n, long count) {
- this.node.set(n);
- this.count.set(count);
- }
-
- /**
- * Creates a new instance and reads in its data from the given input
- *
- * @param input
- * Input
- * @return New instance
- * @throws IOException
- */
- public static CharacteristicWritable read(DataInput input) throws IOException {
- CharacteristicWritable cw = new CharacteristicWritable();
- cw.readFields(input);
- return cw;
- }
-
- /**
- * Gets the node
- *
- * @return Node
- */
- public NodeWritable getNode() {
- return this.node;
- }
-
- /**
- * Gets the count
- *
- * @return Count
- */
- public LongWritable getCount() {
- return this.count;
- }
-
- /**
- * Increments the count by 1
- */
- public void increment() {
- this.increment(1);
- }
-
- /**
- * Increments the count by the given value
- *
- * @param l
- * Value to increment by
- */
- public void increment(long l) {
- this.count.set(this.count.get() + l);
- }
-
- @Override
- public void write(DataOutput output) throws IOException {
- this.node.write(output);
- this.count.write(output);
- }
-
- @Override
- public void readFields(DataInput input) throws IOException {
- this.node.readFields(input);
- this.count.readFields(input);
- }
-
- @Override
- public int compareTo(CharacteristicWritable o) {
- return this.node.compareTo(o.node);
- }
-
- @Override
- public boolean equals(Object other) {
- if (!(other instanceof CharacteristicWritable))
- return false;
- return this.compareTo((CharacteristicWritable) other) == 0;
- }
-
- @Override
- public int hashCode() {
- return this.node.hashCode();
- }
-
- @Override
- public String toString() {
- return "(" + this.node.toString() + ", " + this.count.toString() + ")";
- }
-
-}
diff --git a/jena-elephas/jena-elephas-common/src/main/java/org/apache/jena/hadoop/rdf/types/NodeTupleWritable.java b/jena-elephas/jena-elephas-common/src/main/java/org/apache/jena/hadoop/rdf/types/NodeTupleWritable.java
deleted file mode 100644
index d65860b..0000000
--- a/jena-elephas/jena-elephas-common/src/main/java/org/apache/jena/hadoop/rdf/types/NodeTupleWritable.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.types;
-
-import java.io.DataInput;
-import java.io.IOException;
-
-import org.apache.jena.atlas.lib.tuple.Tuple ;
-import org.apache.jena.atlas.lib.tuple.TupleFactory ;
-import org.apache.jena.graph.Node ;
-
-/**
- * A writable RDF tuple
- * <p>
- * Unlike the more specific {@link TripleWritable} and {@link QuadWritable} this
- * class allows for arbitrary length tuples and does not restrict tuples to
- * being of uniform size.
- * </p>
- *
- *
- *
- */
-public class NodeTupleWritable extends AbstractNodeTupleWritable<Tuple<Node>> {
-
- /**
- * Creates a new empty instance
- */
- public NodeTupleWritable() {
- this(null);
- }
-
- /**
- * Creates a new instance with the given value
- *
- * @param tuple
- * Tuple
- */
- public NodeTupleWritable(Tuple<Node> tuple) {
- super(tuple);
- }
-
- /**
- * Creates a new instance from the given input
- *
- * @param input
- * Input
- * @return New instance
- * @throws IOException
- */
- public static NodeTupleWritable read(DataInput input) throws IOException {
- NodeTupleWritable t = new NodeTupleWritable();
- t.readFields(input);
- return t;
- }
-
- @Override
- protected Tuple<Node> createTuple(Node[] ns) {
- return TupleFactory.create(ns);
- }
-
- @Override
- protected Node[] createNodes(Tuple<Node> tuple) {
- Node n[] = new Node[tuple.len()] ;
- tuple.copyInto(n);
- return n ;
- }
-}
diff --git a/jena-elephas/jena-elephas-common/src/main/java/org/apache/jena/hadoop/rdf/types/NodeWritable.java b/jena-elephas/jena-elephas-common/src/main/java/org/apache/jena/hadoop/rdf/types/NodeWritable.java
deleted file mode 100644
index 7b21b26..0000000
--- a/jena-elephas/jena-elephas-common/src/main/java/org/apache/jena/hadoop/rdf/types/NodeWritable.java
+++ /dev/null
@@ -1,188 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.types;
-
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.io.IOException;
-
-import org.apache.hadoop.io.WritableComparable;
-import org.apache.hadoop.io.WritableComparator;
-import org.apache.jena.graph.Node ;
-import org.apache.jena.hadoop.rdf.types.comparators.SimpleBinaryComparator;
-import org.apache.jena.hadoop.rdf.types.converters.ThriftConverter;
-import org.apache.jena.riot.thrift.TRDF;
-import org.apache.jena.riot.thrift.ThriftConvert;
-import org.apache.jena.riot.thrift.wire.RDF_Term;
-import org.apache.jena.sparql.util.NodeUtils ;
-import org.apache.thrift.TException;
-
-/**
- * A writable for {@link Node} instances
- * <p>
- * This uses <a
- * href="http://afs.github.io/rdf-thrift/rdf-binary-thrift.html">RDF Thrift</a>
- * for the binary encoding of terms. The in-memory storage for this type is both
- * a {@link Node} and a {@link RDF_Term} with lazy conversion between the two
- * forms as necessary.
- * </p>
- */
-public class NodeWritable implements WritableComparable<NodeWritable> {
-
- static {
- WritableComparator.define(NodeWritable.class, new SimpleBinaryComparator());
- }
-
- private Node node;
- private RDF_Term term = new RDF_Term();
-
- /**
- * Creates an empty writable
- */
- public NodeWritable() {
- this(null);
- }
-
- /**
- * Creates a new instance from the given input
- *
- * @param input
- * Input
- * @return New instance
- * @throws IOException
- */
- public static NodeWritable read(DataInput input) throws IOException {
- NodeWritable nw = new NodeWritable();
- nw.readFields(input);
- return nw;
- }
-
- /**
- * Creates a new writable with the given value
- *
- * @param n
- * Node
- */
- public NodeWritable(Node n) {
- this.set(n);
- }
-
- /**
- * Gets the node
- *
- * @return Node
- */
- public Node get() {
- // We may not have yet loaded the node
- if (this.node == null) {
- // If term is set to undefined then node is supposed to be null
- if (this.term.isSet() && !this.term.isSetUndefined()) {
- this.node = ThriftConvert.convert(this.term);
- }
- }
- return this.node;
- }
-
- /**
- * Sets the node
- *
- * @param n
- * Node
- */
- public void set(Node n) {
- this.node = n;
- // Clear the term for now
- // We only convert the Node to a term as and when we want to write it
- // out in order to not waste effort if the value is never written out
- this.term.clear();
- }
-
- @Override
- public void readFields(DataInput input) throws IOException {
- // Clear previous value
- this.node = null;
- this.term.clear();
-
- // Read in the new value
- int termLength = input.readInt();
- byte[] buffer = new byte[termLength];
- input.readFully(buffer);
- try {
- ThriftConverter.fromBytes(buffer, this.term);
- } catch (TException e) {
- throw new IOException(e);
- }
-
- // Note that we don't convert it back into a Node at this time
- }
-
- @Override
- public void write(DataOutput output) throws IOException {
- // May not yet have prepared the Thrift term
- if (!this.term.isSet()) {
- if (this.node == null) {
- this.term.setUndefined(TRDF.UNDEF);
- } else {
- ThriftConvert.toThrift(this.node, null, this.term, false);
- }
- }
-
- // Write out the Thrift term
- byte[] buffer;
- try {
- buffer = ThriftConverter.toBytes(this.term);
- } catch (TException e) {
- throw new IOException(e);
- }
- output.writeInt(buffer.length);
- output.write(buffer);
- }
-
- @Override
- public int compareTo(NodeWritable other) {
- // Use get() rather than accessing the field directly because the node
- // field is lazily instantiated from the Thrift term
- return NodeUtils.compareRDFTerms(this.get(), other.get());
- }
-
- @Override
- public String toString() {
- // Use get() rather than accessing the field directly because the node
- // field is lazily instantiated from the Thrift term
- Node n = this.get();
- if (n == null)
- return "";
- return n.toString();
- }
-
- @Override
- public int hashCode() {
- // Use get() rather than accessing the field directly because the node
- // field is lazily instantiated from the Thrift term
- Node n = this.get();
- return n != null ? this.get().hashCode() : 0;
- }
-
- @Override
- public boolean equals(Object other) {
- if (!(other instanceof NodeWritable))
- return false;
- return this.compareTo((NodeWritable) other) == 0;
- }
-}
diff --git a/jena-elephas/jena-elephas-common/src/main/java/org/apache/jena/hadoop/rdf/types/QuadWritable.java b/jena-elephas/jena-elephas-common/src/main/java/org/apache/jena/hadoop/rdf/types/QuadWritable.java
deleted file mode 100644
index 31f9645..0000000
--- a/jena-elephas/jena-elephas-common/src/main/java/org/apache/jena/hadoop/rdf/types/QuadWritable.java
+++ /dev/null
@@ -1,135 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.types;
-
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.io.IOException;
-
-import org.apache.hadoop.io.WritableComparator;
-import org.apache.jena.graph.Node ;
-import org.apache.jena.hadoop.rdf.types.comparators.SimpleBinaryComparator;
-import org.apache.jena.hadoop.rdf.types.converters.ThriftConverter;
-import org.apache.jena.riot.thrift.ThriftConvert;
-import org.apache.jena.riot.thrift.wire.RDF_Quad;
-import org.apache.jena.sparql.core.Quad ;
-import org.apache.thrift.TException;
-
-/**
- * A writable quad
- */
-public class QuadWritable extends AbstractNodeTupleWritable<Quad> {
-
- static {
- WritableComparator.define(QuadWritable.class, new SimpleBinaryComparator());
- }
-
- private RDF_Quad quad = new RDF_Quad();
-
- /**
- * Creates a new empty instance
- */
- public QuadWritable() {
- this(null);
- }
-
- /**
- * Creates a new instance with the given value
- *
- * @param q
- * Quad
- */
- public QuadWritable(Quad q) {
- super(q);
- }
-
- /**
- * Creates a new instance from the given input
- *
- * @param input
- * Input
- * @return New instance
- * @throws IOException
- */
- public static QuadWritable read(DataInput input) throws IOException {
- QuadWritable q = new QuadWritable();
- q.readFields(input);
- return q;
- }
-
- @Override
- public void set(Quad tuple) {
- super.set(tuple);
- this.quad.clear();
- }
-
- @Override
- public void readFields(DataInput input) throws IOException {
- this.quad.clear();
- int tripleLength = input.readInt();
- byte[] buffer = new byte[tripleLength];
- input.readFully(buffer);
- try {
- ThriftConverter.fromBytes(buffer, this.quad);
- } catch (TException e) {
- throw new IOException(e);
- }
- this.setInternal(new Quad(ThriftConvert.convert(this.quad.getG()), ThriftConvert.convert(this.quad.getS()),
- ThriftConvert.convert(this.quad.getP()), ThriftConvert.convert(this.quad.getO())));
- }
-
- @Override
- public void write(DataOutput output) throws IOException {
- if (this.get() == null)
- throw new IOException(
- "Null quads cannot be written using this class, consider using NodeTupleWritable instead");
-
- // May not have yet prepared the Thrift triple
- if (!this.quad.isSetS()) {
- Quad tuple = this.get();
- this.quad.setG(ThriftConvert.convert(tuple.getGraph(), false));
- this.quad.setS(ThriftConvert.convert(tuple.getSubject(), false));
- this.quad.setP(ThriftConvert.convert(tuple.getPredicate(), false));
- this.quad.setO(ThriftConvert.convert(tuple.getObject(), false));
- }
-
- byte[] buffer;
- try {
- buffer = ThriftConverter.toBytes(this.quad);
- } catch (TException e) {
- throw new IOException(e);
- }
- output.writeInt(buffer.length);
- output.write(buffer);
- }
-
- @Override
- protected Quad createTuple(Node[] ns) {
- if (ns.length != 4)
- throw new IllegalArgumentException(String.format(
- "Incorrect number of nodes to form a quad - got %d but expected 4", ns.length));
- return new Quad(ns[0], ns[1], ns[2], ns[3]);
- }
-
- @Override
- protected Node[] createNodes(Quad tuple) {
- return new Node[] { tuple.getGraph(), tuple.getSubject(), tuple.getPredicate(), tuple.getObject() };
- }
-
-}
diff --git a/jena-elephas/jena-elephas-common/src/main/java/org/apache/jena/hadoop/rdf/types/TripleWritable.java b/jena-elephas/jena-elephas-common/src/main/java/org/apache/jena/hadoop/rdf/types/TripleWritable.java
deleted file mode 100644
index ba81b66..0000000
--- a/jena-elephas/jena-elephas-common/src/main/java/org/apache/jena/hadoop/rdf/types/TripleWritable.java
+++ /dev/null
@@ -1,137 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.types;
-
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.io.IOException;
-
-import org.apache.hadoop.io.WritableComparator;
-import org.apache.jena.graph.Node ;
-import org.apache.jena.graph.Triple ;
-import org.apache.jena.hadoop.rdf.types.comparators.SimpleBinaryComparator;
-import org.apache.jena.hadoop.rdf.types.converters.ThriftConverter;
-import org.apache.jena.riot.thrift.ThriftConvert;
-import org.apache.jena.riot.thrift.wire.RDF_Triple;
-import org.apache.thrift.TException;
-
-/**
- * A writable triple
- *
- *
- *
- */
-public class TripleWritable extends AbstractNodeTupleWritable<Triple> {
-
- static {
- WritableComparator.define(TripleWritable.class, new SimpleBinaryComparator());
- }
-
- private RDF_Triple triple = new RDF_Triple();
-
- /**
- * Creates a new instance using the default NTriples node formatter
- */
- public TripleWritable() {
- this(null);
- }
-
- /**
- * Creates a new instance with a given value that uses a specific node
- * formatter
- *
- * @param t
- * Triple
- */
- public TripleWritable(Triple t) {
- super(t);
- }
-
- /**
- * Creates a new instance from the given input
- *
- * @param input
- * Input
- * @return New instance
- * @throws IOException
- */
- public static TripleWritable read(DataInput input) throws IOException {
- TripleWritable t = new TripleWritable();
- t.readFields(input);
- return t;
- }
-
- @Override
- public void set(Triple tuple) {
- super.set(tuple);
- this.triple.clear();
- }
-
- @Override
- public void readFields(DataInput input) throws IOException {
- this.triple.clear();
- int tripleLength = input.readInt();
- byte[] buffer = new byte[tripleLength];
- input.readFully(buffer);
- try {
- ThriftConverter.fromBytes(buffer, this.triple);
- } catch (TException e) {
- throw new IOException(e);
- }
- this.setInternal(new Triple(ThriftConvert.convert(this.triple.getS()),
- ThriftConvert.convert(this.triple.getP()), ThriftConvert.convert(this.triple.getO())));
- }
-
- @Override
- public void write(DataOutput output) throws IOException {
- if (this.get() == null)
- throw new IOException(
- "Null triples cannot be written using this class, consider using NodeTupleWritable instead");
-
- // May not have yet prepared the Thrift triple
- if (!this.triple.isSetS()) {
- Triple tuple = this.get();
- this.triple.setS(ThriftConvert.convert(tuple.getSubject(), false));
- this.triple.setP(ThriftConvert.convert(tuple.getPredicate(), false));
- this.triple.setO(ThriftConvert.convert(tuple.getObject(), false));
- }
-
- byte[] buffer;
- try {
- buffer = ThriftConverter.toBytes(this.triple);
- } catch (TException e) {
- throw new IOException(e);
- }
- output.writeInt(buffer.length);
- output.write(buffer);
- }
-
- @Override
- protected Triple createTuple(Node[] ns) {
- if (ns.length != 3)
- throw new IllegalArgumentException(String.format(
- "Incorrect number of nodes to form a triple - got %d but expected 3", ns.length));
- return new Triple(ns[0], ns[1], ns[2]);
- }
-
- @Override
- protected Node[] createNodes(Triple tuple) {
- return new Node[] { tuple.getSubject(), tuple.getPredicate(), tuple.getObject() };
- }
-}
diff --git a/jena-elephas/jena-elephas-common/src/main/java/org/apache/jena/hadoop/rdf/types/comparators/SimpleBinaryComparator.java b/jena-elephas/jena-elephas-common/src/main/java/org/apache/jena/hadoop/rdf/types/comparators/SimpleBinaryComparator.java
deleted file mode 100644
index cc2924d..0000000
--- a/jena-elephas/jena-elephas-common/src/main/java/org/apache/jena/hadoop/rdf/types/comparators/SimpleBinaryComparator.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.types.comparators;
-
-import org.apache.hadoop.io.WritableComparator;
-
-/**
- * A general purpose comparator that may be used with any types which can be
- * compared directly on their binary encodings
- */
-public class SimpleBinaryComparator extends WritableComparator {
-
- @Override
- public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
- return WritableComparator.compareBytes(b1, s1, l1, b2, s2, l2);
- }
-
-}
diff --git a/jena-elephas/jena-elephas-common/src/main/java/org/apache/jena/hadoop/rdf/types/converters/ThriftConverter.java b/jena-elephas/jena-elephas-common/src/main/java/org/apache/jena/hadoop/rdf/types/converters/ThriftConverter.java
deleted file mode 100644
index 0675afc..0000000
--- a/jena-elephas/jena-elephas-common/src/main/java/org/apache/jena/hadoop/rdf/types/converters/ThriftConverter.java
+++ /dev/null
@@ -1,147 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.types.converters;
-
-import java.io.ByteArrayOutputStream;
-
-import org.apache.jena.riot.thrift.wire.RDF_Quad;
-import org.apache.jena.riot.thrift.wire.RDF_Term;
-import org.apache.jena.riot.thrift.wire.RDF_Triple;
-import org.apache.thrift.TException;
-import org.apache.thrift.protocol.TCompactProtocol;
-import org.apache.thrift.protocol.TProtocol;
-import org.apache.thrift.transport.TIOStreamTransport;
-import org.apache.thrift.transport.TMemoryInputTransport;
-import org.apache.thrift.transport.TTransport;
-
-/**
- * Helper for converting between the binary representation of Nodes, Triples and
- * Quads and their Jena API equivalents
- *
- */
-public class ThriftConverter {
-
- private static ThreadLocal<TMemoryInputTransport> inputTransports = new ThreadLocal<>();
- private static ThreadLocal<TProtocol> inputProtocols = new ThreadLocal<>();
-
- private static ThreadLocal<ByteArrayOutputStream> outputStreams = new ThreadLocal<>();
- private static ThreadLocal<TTransport> outputTransports = new ThreadLocal<>();
- private static ThreadLocal<TProtocol> outputProtocols = new ThreadLocal<>();
-
- private static TMemoryInputTransport getInputTransport() {
- TMemoryInputTransport transport = inputTransports.get();
- if (transport != null)
- return transport;
-
- transport = new TMemoryInputTransport();
- inputTransports.set(transport);
- return transport;
- }
-
- private static TProtocol getInputProtocol() {
- TProtocol protocol = inputProtocols.get();
- if (protocol != null)
- return protocol;
-
- protocol = new TCompactProtocol(getInputTransport());
- inputProtocols.set(protocol);
- return protocol;
- }
-
- private static ByteArrayOutputStream getOutputStream() {
- ByteArrayOutputStream output = outputStreams.get();
- if (output != null)
- return output;
-
- output = new ByteArrayOutputStream();
- outputStreams.set(output);
- return output;
- }
-
- private static TTransport getOutputTransport() {
- TTransport transport = outputTransports.get();
- if (transport != null)
- return transport;
-
- transport = new TIOStreamTransport(getOutputStream());
- outputTransports.set(transport);
- return transport;
- }
-
- private static TProtocol getOutputProtocol() {
- TProtocol protocol = outputProtocols.get();
- if (protocol != null)
- return protocol;
-
- protocol = new TCompactProtocol(getOutputTransport());
- outputProtocols.set(protocol);
- return protocol;
- }
-
- public static byte[] toBytes(RDF_Term term) throws TException {
- ByteArrayOutputStream output = getOutputStream();
- output.reset();
-
- TProtocol protocol = getOutputProtocol();
- term.write(protocol);
-
- return output.toByteArray();
- }
-
- public static void fromBytes(byte[] bs, RDF_Term term) throws TException {
- TMemoryInputTransport transport = getInputTransport();
- transport.reset(bs);
- TProtocol protocol = getInputProtocol();
- term.read(protocol);
- }
-
- public static void fromBytes(byte[] buffer, RDF_Triple triple) throws TException {
- TMemoryInputTransport transport = getInputTransport();
- transport.reset(buffer);
- TProtocol protocol = getInputProtocol();
- triple.read(protocol);
- }
-
- public static byte[] toBytes(RDF_Triple triple) throws TException {
- ByteArrayOutputStream output = getOutputStream();
- output.reset();
-
- TProtocol protocol = getOutputProtocol();
- triple.write(protocol);
-
- return output.toByteArray();
- }
-
- public static void fromBytes(byte[] buffer, RDF_Quad quad) throws TException {
- TMemoryInputTransport transport = getInputTransport();
- transport.reset(buffer);
- TProtocol protocol = getInputProtocol();
- quad.read(protocol);
- }
-
- public static byte[] toBytes(RDF_Quad quad) throws TException {
- ByteArrayOutputStream output = getOutputStream();
- output.reset();
-
- TProtocol protocol = getOutputProtocol();
- quad.write(protocol);
-
- return output.toByteArray();
- }
-}
diff --git a/jena-elephas/jena-elephas-common/src/test/java/org/apache/jena/hadoop/rdf/io/types/CharacteristicTests.java b/jena-elephas/jena-elephas-common/src/test/java/org/apache/jena/hadoop/rdf/io/types/CharacteristicTests.java
deleted file mode 100644
index 5edff0f..0000000
--- a/jena-elephas/jena-elephas-common/src/test/java/org/apache/jena/hadoop/rdf/io/types/CharacteristicTests.java
+++ /dev/null
@@ -1,209 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.types;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.util.Iterator;
-
-import org.apache.jena.graph.Node ;
-import org.apache.jena.graph.NodeFactory ;
-import org.apache.jena.hadoop.rdf.types.CharacteristicSetWritable;
-import org.apache.jena.hadoop.rdf.types.CharacteristicWritable;
-import org.junit.Assert;
-import org.junit.Test;
-
-/**
- * Tests for {@link CharacteristicWritable} and
- * {@link CharacteristicSetWritable}
- *
- *
- *
- */
-public class CharacteristicTests {
-
- /**
- * Checks whether a writable round trips successfully
- *
- * @param cw
- * Characteristic writable
- * @throws IOException
- */
- private void checkRoundTrip(CharacteristicWritable cw) throws IOException {
- ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
- DataOutputStream output = new DataOutputStream(outputStream);
- cw.write(output);
-
- ByteArrayInputStream inputStream = new ByteArrayInputStream(outputStream.toByteArray());
- DataInputStream input = new DataInputStream(inputStream);
- CharacteristicWritable actual = CharacteristicWritable.read(input);
- Assert.assertEquals(cw, actual);
- }
-
- /**
- * Tests characteristic round tripping
- *
- * @throws IOException
- */
- @Test
- public void characteristic_writable_01() throws IOException {
- Node n = NodeFactory.createURI("http://example.org");
- CharacteristicWritable expected = new CharacteristicWritable(n);
- Assert.assertEquals(1, expected.getCount().get());
-
- this.checkRoundTrip(expected);
- }
-
- /**
- * Tests characteristic properties
- *
- * @throws IOException
- */
- @Test
- public void characteristic_writable_02() throws IOException {
- Node n = NodeFactory.createURI("http://example.org");
- CharacteristicWritable cw1 = new CharacteristicWritable(n);
- CharacteristicWritable cw2 = new CharacteristicWritable(n, 100);
- this.checkRoundTrip(cw1);
- this.checkRoundTrip(cw2);
-
- // Should still be equal since equality is only on the node not the
- // count
- Assert.assertEquals(cw1, cw2);
- }
-
- /**
- * Tests characteristic properties
- *
- * @throws IOException
- */
- @Test
- public void characteristic_writable_03() throws IOException {
- CharacteristicWritable cw1 = new CharacteristicWritable(NodeFactory.createURI("http://example.org"));
- CharacteristicWritable cw2 = new CharacteristicWritable(NodeFactory.createURI("http://example.org/other"));
- this.checkRoundTrip(cw1);
- this.checkRoundTrip(cw2);
-
- // Should not be equal as different nodes
- Assert.assertNotEquals(cw1, cw2);
- }
-
- /**
- * Checks that a writable round trips
- *
- * @param set
- * Characteristic set
- * @throws IOException
- */
- private void checkRoundTrip(CharacteristicSetWritable set) throws IOException {
- // Test round trip
- ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
- DataOutputStream output = new DataOutputStream(outputStream);
- set.write(output);
-
- ByteArrayInputStream inputStream = new ByteArrayInputStream(outputStream.toByteArray());
- DataInputStream input = new DataInputStream(inputStream);
- CharacteristicSetWritable actual = CharacteristicSetWritable.read(input);
- Assert.assertEquals(set, actual);
- }
-
- /**
- * Checks a characteristic set
- *
- * @param set
- * Set
- * @param expectedItems
- * Expected number of characteristics
- * @param expectedCounts
- * Expected counts for characteristics
- */
- protected final void checkCharacteristicSet(CharacteristicSetWritable set, int expectedItems, long[] expectedCounts) {
- Assert.assertEquals(expectedItems, set.size());
- Assert.assertEquals(expectedItems, expectedCounts.length);
- Iterator<CharacteristicWritable> iter = set.getCharacteristics();
- int i = 0;
- while (iter.hasNext()) {
- CharacteristicWritable cw = iter.next();
- Assert.assertEquals(expectedCounts[i], cw.getCount().get());
- i++;
- }
- }
-
- /**
- * Tests characteristic sets
- *
- * @throws IOException
- */
- @Test
- public void characteristic_set_writable_01() throws IOException {
- CharacteristicSetWritable set = new CharacteristicSetWritable();
-
- // Add some characteristics
- CharacteristicWritable cw1 = new CharacteristicWritable(NodeFactory.createURI("http://example.org"));
- CharacteristicWritable cw2 = new CharacteristicWritable(NodeFactory.createURI("http://example.org/other"));
- set.add(cw1);
- set.add(cw2);
- this.checkCharacteristicSet(set, 2, new long[] { 1, 1 });
- this.checkRoundTrip(set);
- }
-
- /**
- * Tests characteristic sets
- *
- * @throws IOException
- */
- @Test
- public void characteristic_set_writable_02() throws IOException {
- CharacteristicSetWritable set = new CharacteristicSetWritable();
-
- // Add some characteristics
- CharacteristicWritable cw1 = new CharacteristicWritable(NodeFactory.createURI("http://example.org"));
- CharacteristicWritable cw2 = new CharacteristicWritable(NodeFactory.createURI("http://example.org"), 2);
- set.add(cw1);
- set.add(cw2);
- this.checkCharacteristicSet(set, 1, new long[] { 3 });
- this.checkRoundTrip(set);
- }
-
- /**
- * Tests characteristic sets
- *
- * @throws IOException
- */
- @Test
- public void characteristic_set_writable_03() throws IOException {
- CharacteristicSetWritable set1 = new CharacteristicSetWritable();
- CharacteristicSetWritable set2 = new CharacteristicSetWritable();
-
- // Add some characteristics
- CharacteristicWritable cw1 = new CharacteristicWritable(NodeFactory.createURI("http://example.org"));
- CharacteristicWritable cw2 = new CharacteristicWritable(NodeFactory.createURI("http://example.org/other"));
- set1.add(cw1);
- set2.add(cw2);
- this.checkCharacteristicSet(set1, 1, new long[] { 1 });
- this.checkCharacteristicSet(set2, 1, new long[] { 1 });
- this.checkRoundTrip(set1);
- this.checkRoundTrip(set2);
-
- Assert.assertNotEquals(set1, set2);
- }
-}
diff --git a/jena-elephas/jena-elephas-common/src/test/java/org/apache/jena/hadoop/rdf/io/types/RdfTypesTest.java b/jena-elephas/jena-elephas-common/src/test/java/org/apache/jena/hadoop/rdf/io/types/RdfTypesTest.java
deleted file mode 100644
index ab04d43..0000000
--- a/jena-elephas/jena-elephas-common/src/test/java/org/apache/jena/hadoop/rdf/io/types/RdfTypesTest.java
+++ /dev/null
@@ -1,400 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.types;
-
-import java.io.* ;
-
-import org.apache.hadoop.io.WritableComparable;
-import org.apache.jena.atlas.lib.tuple.Tuple ;
-import static org.apache.jena.atlas.lib.tuple.TupleFactory.tuple ;
-import org.apache.jena.datatypes.xsd.XSDDatatype ;
-import org.apache.jena.graph.Node ;
-import org.apache.jena.graph.NodeFactory ;
-import org.apache.jena.graph.Triple ;
-import org.apache.jena.hadoop.rdf.types.NodeTupleWritable;
-import org.apache.jena.hadoop.rdf.types.NodeWritable;
-import org.apache.jena.hadoop.rdf.types.QuadWritable;
-import org.apache.jena.hadoop.rdf.types.TripleWritable;
-import org.apache.jena.sparql.core.Quad ;
-import org.junit.Assert;
-import org.junit.Ignore;
-import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Tests for the various RDF types defined by the
- * {@link org.apache.jena.hadoop.rdf.types} package
- *
- *
- *
- */
-public class RdfTypesTest {
-
- private static final Logger LOG = LoggerFactory.getLogger(RdfTypesTest.class);
-
- private ByteArrayOutputStream outputStream;
- private ByteArrayInputStream inputStream;
-
- /**
- * Prepare for output
- *
- * @return Data output
- */
- private DataOutput prepareOutput() {
- this.outputStream = new ByteArrayOutputStream();
- return new DataOutputStream(this.outputStream);
- }
-
- /**
- * Prepare for input from the previously written output
- *
- * @return Data Input
- */
- private DataInput prepareInput() {
- this.inputStream = new ByteArrayInputStream(this.outputStream.toByteArray());
- return new DataInputStream(this.inputStream);
- }
-
- /**
- * Prepare for input from the given data
- *
- * @param data
- * Data
- * @return Data Input
- */
- private DataInput prepareInput(byte[] data) {
- this.inputStream = new ByteArrayInputStream(data);
- return new DataInputStream(this.inputStream);
- }
-
- @SuppressWarnings({ "unchecked", "rawtypes" })
- private <T extends WritableComparable> void testWriteRead(T writable, T expected) throws IOException, InstantiationException, IllegalAccessException,
- ClassNotFoundException {
- // Write out data
- DataOutput output = this.prepareOutput();
- writable.write(output);
-
- // Read back in data
- DataInput input = this.prepareInput();
- T actual = (T) Class.forName(writable.getClass().getName()).newInstance();
- actual.readFields(input);
-
- LOG.debug("Original = " + writable.toString());
- LOG.debug("Round Tripped = " + actual.toString());
-
- // Check equivalent
- Assert.assertEquals(0, expected.compareTo(actual));
- }
-
- /**
- * Basic node writable round tripping test
- *
- * @throws IOException
- * @throws InstantiationException
- * @throws IllegalAccessException
- * @throws ClassNotFoundException
- */
- @Test
- public void node_writable_null() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
- Node n = null;
- NodeWritable nw = new NodeWritable(n);
- testWriteRead(nw, nw);
- }
-
- /**
- * Basic node writable round tripping test
- *
- * @throws IOException
- * @throws InstantiationException
- * @throws IllegalAccessException
- * @throws ClassNotFoundException
- */
- @Test
- @Ignore
- public void node_writable_variable_01() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
- Node n = NodeFactory.createVariable("x");
- NodeWritable nw = new NodeWritable(n);
- testWriteRead(nw, nw);
- }
-
- /**
- * Basic node writable round tripping test
- *
- * @throws IOException
- * @throws InstantiationException
- * @throws IllegalAccessException
- * @throws ClassNotFoundException
- */
- @Test
- @Ignore
- public void node_writable_variable_02() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
- Node n = NodeFactory.createVariable("really-log-variable-name-asddsfr4545egfdgdfgfdgdtgvdg-dfgfdgdfgdfgdfg4-dfvdfgdfgdfgfdgfdgdfgdfgfdg");
- NodeWritable nw = new NodeWritable(n);
- testWriteRead(nw, nw);
- }
-
- /**
- * Basic node writable round tripping test
- *
- * @throws IOException
- * @throws InstantiationException
- * @throws IllegalAccessException
- * @throws ClassNotFoundException
- */
- @Test
- public void node_writable_uri_01() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
- Node n = NodeFactory.createURI("http://example.org");
- NodeWritable nw = new NodeWritable(n);
- testWriteRead(nw, nw);
- }
-
- /**
- * Basic node writable round tripping test
- *
- * @throws IOException
- * @throws InstantiationException
- * @throws IllegalAccessException
- * @throws ClassNotFoundException
- */
- @Test
- public void node_writable_uri_02() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
- Node n = NodeFactory.createURI("http://user:password@example.org/some/path?key=value#id");
- NodeWritable nw = new NodeWritable(n);
- testWriteRead(nw, nw);
- }
-
- /**
- * Basic node writable round tripping test
- *
- * @throws IOException
- * @throws InstantiationException
- * @throws IllegalAccessException
- * @throws ClassNotFoundException
- */
- @Test
- public void node_writable_literal_01() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
- Node n = NodeFactory.createLiteral("simple");
- NodeWritable nw = new NodeWritable(n);
- testWriteRead(nw, nw);
- }
-
- /**
- * Basic node writable round tripping test
- *
- * @throws IOException
- * @throws InstantiationException
- * @throws IllegalAccessException
- * @throws ClassNotFoundException
- */
- @Test
- public void node_writable_literal_02() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
- Node n = NodeFactory.createLiteral("language", "en", null);
- NodeWritable nw = new NodeWritable(n);
- testWriteRead(nw, nw);
- }
-
- /**
- * Basic node writable round tripping test
- *
- * @throws IOException
- * @throws InstantiationException
- * @throws IllegalAccessException
- * @throws ClassNotFoundException
- */
- @Test
- public void node_writable_literal_03() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
- Node n = NodeFactory.createLiteral("string", XSDDatatype.XSDstring);
- NodeWritable nw = new NodeWritable(n);
- testWriteRead(nw, nw);
- }
-
- /**
- * Basic node writable round tripping test
- *
- * @throws IOException
- * @throws InstantiationException
- * @throws IllegalAccessException
- * @throws ClassNotFoundException
- */
- @Test
- public void node_writable_literal_04() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
- Node n = NodeFactory.createLiteral("1234", XSDDatatype.XSDinteger);
- NodeWritable nw = new NodeWritable(n);
- testWriteRead(nw, nw);
- }
-
- /**
- * Basic node writable round tripping test
- *
- * @throws IOException
- * @throws InstantiationException
- * @throws IllegalAccessException
- * @throws ClassNotFoundException
- */
- @Test
- public void node_writable_literal_05() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
- Node n = NodeFactory.createLiteral("123.4", XSDDatatype.XSDdecimal);
- NodeWritable nw = new NodeWritable(n);
- testWriteRead(nw, nw);
- }
-
- /**
- * Basic node writable round tripping test
- *
- * @throws IOException
- * @throws InstantiationException
- * @throws IllegalAccessException
- * @throws ClassNotFoundException
- */
- @Test
- public void node_writable_literal_06() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
- Node n = NodeFactory.createLiteral("12.3e4", XSDDatatype.XSDdouble);
- NodeWritable nw = new NodeWritable(n);
- testWriteRead(nw, nw);
- }
-
- /**
- * Basic node writable round tripping test
- *
- * @throws IOException
- * @throws InstantiationException
- * @throws IllegalAccessException
- * @throws ClassNotFoundException
- */
- @Test
- public void node_writable_literal_07() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
- Node n = NodeFactory.createLiteral("true", XSDDatatype.XSDboolean);
- NodeWritable nw = new NodeWritable(n);
- testWriteRead(nw, nw);
- }
-
- /**
- * Basic node writable round tripping test
- *
- * @throws IOException
- * @throws InstantiationException
- * @throws IllegalAccessException
- * @throws ClassNotFoundException
- */
- @Test
- public void node_writable_bnode_01() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
- Node n = NodeFactory.createBlankNode();
- NodeWritable nw = new NodeWritable(n);
- testWriteRead(nw, nw);
- }
-
- /**
- * Basic node writable round tripping test
- *
- * @throws IOException
- * @throws InstantiationException
- * @throws IllegalAccessException
- * @throws ClassNotFoundException
- */
- @Test
- public void node_writable_bnode_02() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
- Node n = NodeFactory.createBlankNode();
- NodeWritable nw = new NodeWritable(n);
- testWriteRead(nw, nw);
- NodeWritable nw2 = new NodeWritable(n);
- testWriteRead(nw2, nw2);
-
- Assert.assertEquals(0, nw.compareTo(nw2));
- }
-
- /**
- * Basic triple writable round tripping test
- *
- * @throws IOException
- * @throws InstantiationException
- * @throws IllegalAccessException
- * @throws ClassNotFoundException
- */
- @Test
- public void triple_writable_01() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
- Triple t = new Triple(NodeFactory.createURI("http://example"), NodeFactory.createURI("http://predicate"), NodeFactory.createLiteral("value"));
- TripleWritable tw = new TripleWritable(t);
- testWriteRead(tw, tw);
- }
-
- /**
- * Basic triple writable round tripping test
- *
- * @throws IOException
- * @throws InstantiationException
- * @throws IllegalAccessException
- * @throws ClassNotFoundException
- */
- @Test
- public void triple_writable_02() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
- Triple t = new Triple(NodeFactory.createBlankNode(), NodeFactory.createURI("http://predicate"), NodeFactory.createLiteral("value"));
- TripleWritable tw = new TripleWritable(t);
- testWriteRead(tw, tw);
- }
-
- /**
- * Basic quad writable round tripping test
- *
- * @throws IOException
- * @throws InstantiationException
- * @throws IllegalAccessException
- * @throws ClassNotFoundException
- */
- @Test
- public void quad_writable_01() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
- Quad q = new Quad(Quad.defaultGraphNodeGenerated, NodeFactory.createURI("http://example"), NodeFactory.createURI("http://predicate"),
- NodeFactory.createLiteral("value"));
- QuadWritable qw = new QuadWritable(q);
- testWriteRead(qw, qw);
- }
-
- /**
- * Basic quad writable round tripping test
- *
- * @throws IOException
- * @throws InstantiationException
- * @throws IllegalAccessException
- * @throws ClassNotFoundException
- */
- @Test
- public void quad_writable_02() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
- Quad q = new Quad(Quad.defaultGraphNodeGenerated, NodeFactory.createBlankNode(), NodeFactory.createURI("http://predicate"),
- NodeFactory.createLiteral("value"));
- QuadWritable qw = new QuadWritable(q);
- testWriteRead(qw, qw);
- }
-
- /**
- * Basic tuple writable round tripping test
- *
- * @throws IOException
- * @throws InstantiationException
- * @throws IllegalAccessException
- * @throws ClassNotFoundException
- */
- @Test
- public void tuple_writable_01() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
- Tuple<Node> t = tuple(NodeFactory.createURI("http://one"), NodeFactory.createURI("http://two"),
- NodeFactory.createLiteral("value"),
- NodeFactory.createLiteral("foo"), NodeFactory.createURI("http://three"));
- NodeTupleWritable tw = new NodeTupleWritable(t);
- testWriteRead(tw, tw);
- }
-}
diff --git a/jena-elephas/jena-elephas-common/src/test/resources/log4j.properties b/jena-elephas/jena-elephas-common/src/test/resources/log4j.properties
deleted file mode 100644
index 5cedb50..0000000
--- a/jena-elephas/jena-elephas-common/src/test/resources/log4j.properties
+++ /dev/null
@@ -1,18 +0,0 @@
-log4j.rootLogger=INFO, stdlog
-
-log4j.appender.stdlog=org.apache.log4j.ConsoleAppender
-## log4j.appender.stdlog.target=System.err
-log4j.appender.stdlog.layout=org.apache.log4j.PatternLayout
-log4j.appender.stdlog.layout.ConversionPattern=%d{HH:mm:ss} %-5p %-25c{1} :: %m%n
-
-# Execution logging
-log4j.logger.org.apache.jena.arq.info=INFO
-log4j.logger.org.apache.jena.arq.exec=INFO
-
-# Everything else in Jena
-log4j.logger.org.apache.jena=WARN
-log4j.logger.org.apache.jena.riot=INFO
-
-# Apache Commons HTTP
-# May be useful to turn up to DEBUG if debugging HTTP communication issues
-log4j.logger.org.apache.http=WARN
diff --git a/jena-elephas/jena-elephas-io/pom.xml b/jena-elephas/jena-elephas-io/pom.xml
deleted file mode 100644
index c1d8e7d..0000000
--- a/jena-elephas/jena-elephas-io/pom.xml
+++ /dev/null
@@ -1,107 +0,0 @@
-<!--
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <parent>
- <groupId>org.apache.jena</groupId>
- <artifactId>jena-elephas</artifactId>
- <version>3.18.0-SNAPSHOT</version>
- <relativePath>../</relativePath>
- </parent>
- <artifactId>jena-elephas-io</artifactId>
- <name>Apache Jena - Elephas - I/O</name>
- <description>RDF Input/Output formats library for Hadoop</description>
-
- <properties>
- <automatic.module.name>org.apache.jena.elephas.io</automatic.module.name>
- </properties>
-
- <!-- Note that versions are managed by parent POMs -->
- <dependencies>
- <!-- Internal Project Dependencies -->
-
- <dependency>
- <groupId>org.apache.jena</groupId>
- <artifactId>jena-elephas-common</artifactId>
- <version>3.18.0-SNAPSHOT</version>
- </dependency>
-
- <!-- Hadoop Dependencies -->
- <!-- Note these will be provided on the Hadoop cluster hence
- the provided scope
- -->
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-common</artifactId>
- <scope>provided</scope>
- </dependency>
-
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-mapreduce-client-common</artifactId>
- <scope>provided</scope>
- </dependency>
-
- <!-- Jena dependencies -->
- <dependency>
- <groupId>org.apache.jena</groupId>
- <artifactId>jena-arq</artifactId>
- </dependency>
-
- <!-- Test Dependencies -->
- <dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <scope>test</scope>
- </dependency>
- </dependencies>
-
- <build>
- <plugins>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-javadoc-plugin</artifactId>
- <configuration>
- <version>true</version>
- <show>public</show>
- <quiet>true</quiet>
- <encoding>UTF-8</encoding>
- <windowtitle>Apache Jena Elephas - IO API</windowtitle>
- <doctitle>Apache Jena Elephas - IO API ${project.version}</doctitle>
- <bottom>Licenced under the Apache License, Version 2.0</bottom>
- <links>
- <link>https://jena.apache.org/documentation/javadoc/jena/</link>
- <link>https://jena.apache.org/documentation/javadoc/arq/</link>
- <link>https://jena.apache.org/documentation/javadoc/elephas/common/</link>
- </links>
- </configuration>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-surefire-plugin</artifactId>
- <!-- versions 2.20, 2.20.1 and 2.22.2 result in test failures -->
- <version>2.19.1</version>
- <configuration>
- <parallel>classes</parallel>
- <threadCount>2</threadCount>
- </configuration>
- </plugin>
- </plugins>
- </build>
-
-</project>
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/HadoopIOConstants.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/HadoopIOConstants.java
deleted file mode 100644
index e2cc847..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/HadoopIOConstants.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io;
-
-/**
- * Hadoop IO related constants
- *
- *
- *
- */
-public class HadoopIOConstants {
-
- /**
- * Private constructor prevents instantiation
- */
- private HadoopIOConstants() {
- }
-
- /**
- * Map Reduce configuration setting for max line length
- */
- public static final String MAX_LINE_LENGTH = "mapreduce.input.linerecordreader.line.maxlength";
-
- /**
- * Run ID
- */
- public static final String RUN_ID = "runId";
-
- /**
- * Compression codecs to use
- */
- public static final String IO_COMPRESSION_CODECS = "io.compression.codecs";
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/RdfIOConstants.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/RdfIOConstants.java
deleted file mode 100644
index dbe16ff..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/RdfIOConstants.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io;
-
-import java.io.IOException;
-
-/**
- * RDF IO related constants
- *
- *
- *
- */
-public class RdfIOConstants {
-
- /**
- * Private constructor prevents instantiation
- */
- private RdfIOConstants() {
- }
-
- /**
- * Configuration key used to set whether bad tuples are ignored. This is the
- * default behaviour, when explicitly set to {@code false} bad tuples will
- * result in {@link IOException} being thrown by the relevant record
- * readers.
- */
- public static final String INPUT_IGNORE_BAD_TUPLES = "rdf.io.input.ignore-bad-tuples";
-
- /**
- * Configuration key used to set the batch size used for RDF output formats
- * that take a batched writing approach. Default value is given by the
- * constant {@link #DEFAULT_OUTPUT_BATCH_SIZE}.
- */
- public static final String OUTPUT_BATCH_SIZE = "rdf.io.output.batch-size";
-
- /**
- * Default batch size for batched output formats
- */
- public static final long DEFAULT_OUTPUT_BATCH_SIZE = 10000;
-
- /**
- * Configuration key used to control behaviour with regards to how blank
- * nodes are handled.
- * <p>
- * The default behaviour is that blank nodes are file scoped which is what
- * the RDF specifications require.
- * </p>
- * <p>
- * However in the case of a multi-stage pipeline this behaviour can cause
- * blank nodes to diverge over several jobs and introduce spurious blank
- * nodes over time. This is described in <a
- * href="https://issues.apache.org/jira/browse/JENA-820">JENA-820</a> and
- * enabling this flag for jobs in your pipeline allow you to work around
- * this problem.
- * </p>
- * <h3>Warning</h3> You should only enable this flag for jobs that take in
- * RDF output originating from previous jobs since our normal blank node
- * allocation policy ensures that blank nodes will be file scoped and unique
- * over all files (barring unfortunate hasing collisions). If you enable
- * this for jobs that take in RDF originating from other sources you may
- * incorrectly conflate blank nodes that are supposed to distinct and
- * separate nodes.
- */
- public static final String GLOBAL_BNODE_IDENTITY = "rdf.io.input.bnodes.global-identity";
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/AbstractNLineFileInputFormat.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/AbstractNLineFileInputFormat.java
deleted file mode 100644
index f29564d..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/AbstractNLineFileInputFormat.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.JobContext;
-import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
-import org.apache.hadoop.mapreduce.lib.input.NLineInputFormat;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Abstract line based input format that reuses the machinery from
- * {@link NLineInputFormat} to calculate the splits
- *
- *
- *
- * @param <TKey>
- * Key type
- * @param <TValue>
- * Value type
- */
-public abstract class AbstractNLineFileInputFormat<TKey, TValue> extends FileInputFormat<TKey, TValue> {
-
- private static final Logger LOGGER = LoggerFactory.getLogger(AbstractNLineFileInputFormat.class);
-
- /**
- * Logically splits the set of input files for the job, splits N lines of
- * the input as one split.
- *
- * @see FileInputFormat#getSplits(JobContext)
- */
- @Override
- public final List<InputSplit> getSplits(JobContext job) throws IOException {
- boolean debug = LOGGER.isDebugEnabled();
- if (debug && FileInputFormat.getInputDirRecursive(job)) {
- LOGGER.debug("Recursive searching for input data is enabled");
- }
-
- List<InputSplit> splits = new ArrayList<InputSplit>();
- int numLinesPerSplit = NLineInputFormat.getNumLinesPerSplit(job);
- for (FileStatus status : listStatus(job)) {
- if (debug) {
- LOGGER.debug("Determining how to split input file/directory {}", status.getPath());
- }
- splits.addAll(NLineInputFormat.getSplitsForFile(status, job.getConfiguration(), numLinesPerSplit));
- }
- return splits;
- }
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/AbstractWholeFileInputFormat.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/AbstractWholeFileInputFormat.java
deleted file mode 100644
index 01d78d5..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/AbstractWholeFileInputFormat.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input;
-
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapreduce.JobContext;
-import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
-
-/**
- * Abstract implementation of a while file input format where each file is a
- * single split
- *
- *
- *
- * @param <TKey>
- * Key type
- * @param <TValue>
- * Value type
- */
-public abstract class AbstractWholeFileInputFormat<TKey, TValue> extends FileInputFormat<TKey, TValue> {
-
- @Override
- protected final boolean isSplitable(JobContext context, Path filename) {
- return false;
- }
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/QuadsInputFormat.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/QuadsInputFormat.java
deleted file mode 100644
index 255fc8e..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/QuadsInputFormat.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input;
-
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.RecordReader;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.jena.hadoop.rdf.io.input.readers.QuadsReader;
-import org.apache.jena.hadoop.rdf.types.QuadWritable;
-
-
-/**
- * RDF input format that can handle any RDF quads format that ARQ supports
- * selecting the format to use for each file based upon the file extension
- *
- *
- *
- */
-public class QuadsInputFormat extends AbstractWholeFileInputFormat<LongWritable, QuadWritable> {
-
- @Override
- public RecordReader<LongWritable, QuadWritable> createRecordReader(InputSplit split, TaskAttemptContext context) {
- return new QuadsReader();
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/TriplesInputFormat.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/TriplesInputFormat.java
deleted file mode 100644
index 4ba5ff1..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/TriplesInputFormat.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input;
-
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.RecordReader;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.jena.hadoop.rdf.io.input.readers.TriplesReader;
-import org.apache.jena.hadoop.rdf.types.TripleWritable;
-
-/**
- * RDF input format that can handle any RDF triples format that ARQ supports
- * selecting the format to use for each file based upon the file extension
- */
-public class TriplesInputFormat extends AbstractWholeFileInputFormat<LongWritable, TripleWritable> {
-
- @Override
- public RecordReader<LongWritable, TripleWritable> createRecordReader(InputSplit split, TaskAttemptContext context) {
- return new TriplesReader();
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/TriplesOrQuadsInputFormat.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/TriplesOrQuadsInputFormat.java
deleted file mode 100644
index 4ef8656..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/TriplesOrQuadsInputFormat.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input;
-
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.RecordReader;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.jena.hadoop.rdf.io.input.readers.TriplesOrQuadsReader;
-import org.apache.jena.hadoop.rdf.types.QuadWritable;
-
-
-/**
- * RDF input format that can handle any RDF triple/quads format that ARQ
- * supports selecting the format to use for each file based upon the file
- * extension. Triples are converted into quads in the default graph.
- *
- *
- *
- */
-public class TriplesOrQuadsInputFormat extends AbstractWholeFileInputFormat<LongWritable, QuadWritable> {
-
- @Override
- public RecordReader<LongWritable, QuadWritable> createRecordReader(InputSplit split, TaskAttemptContext context) {
- return new TriplesOrQuadsReader();
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/jsonld/JsonLDQuadInputFormat.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/jsonld/JsonLDQuadInputFormat.java
deleted file mode 100644
index b4f788c..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/jsonld/JsonLDQuadInputFormat.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.jsonld;
-
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.RecordReader;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.jena.hadoop.rdf.io.input.AbstractWholeFileInputFormat;
-import org.apache.jena.hadoop.rdf.io.input.readers.jsonld.JsonLDQuadReader;
-import org.apache.jena.hadoop.rdf.types.QuadWritable;
-
-public class JsonLDQuadInputFormat extends AbstractWholeFileInputFormat<LongWritable, QuadWritable> {
-
- @Override
- public RecordReader<LongWritable, QuadWritable> createRecordReader(InputSplit split, TaskAttemptContext context) {
- return new JsonLDQuadReader();
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/jsonld/JsonLDTripleInputFormat.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/jsonld/JsonLDTripleInputFormat.java
deleted file mode 100644
index 0166d01..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/jsonld/JsonLDTripleInputFormat.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.jsonld;
-
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.RecordReader;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.jena.hadoop.rdf.io.input.AbstractWholeFileInputFormat;
-import org.apache.jena.hadoop.rdf.io.input.readers.jsonld.JsonLDTripleReader;
-import org.apache.jena.hadoop.rdf.types.TripleWritable;
-
-public class JsonLDTripleInputFormat extends AbstractWholeFileInputFormat<LongWritable, TripleWritable> {
-
- @Override
- public RecordReader<LongWritable, TripleWritable> createRecordReader(InputSplit split, TaskAttemptContext context) {
- return new JsonLDTripleReader();
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/nquads/BlockedNQuadsInputFormat.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/nquads/BlockedNQuadsInputFormat.java
deleted file mode 100644
index 296e4c9..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/nquads/BlockedNQuadsInputFormat.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.nquads;
-
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.RecordReader;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.jena.hadoop.rdf.io.input.AbstractNLineFileInputFormat;
-import org.apache.jena.hadoop.rdf.io.input.readers.nquads.BlockedNQuadsReader;
-import org.apache.jena.hadoop.rdf.types.QuadWritable;
-
-
-/**
- * NTriples input format where files are processed as blocks of lines rather
- * than in a line based manner as with the {@link NQuadsInputFormat} or as
- * whole files with the {@link WholeFileNQuadsInputFormat}
- * <p>
- * This provides a compromise between the higher parser setup of creating more
- * parsers and the benefit of being able to split input files over multiple
- * mappers.
- * </p>
- *
- *
- *
- */
-public class BlockedNQuadsInputFormat extends AbstractNLineFileInputFormat<LongWritable, QuadWritable> {
-
- @Override
- public RecordReader<LongWritable, QuadWritable> createRecordReader(InputSplit split, TaskAttemptContext context) {
- return new BlockedNQuadsReader();
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/nquads/NQuadsInputFormat.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/nquads/NQuadsInputFormat.java
deleted file mode 100644
index cda77e5..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/nquads/NQuadsInputFormat.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.nquads;
-
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.RecordReader;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.jena.hadoop.rdf.io.input.AbstractNLineFileInputFormat;
-import org.apache.jena.hadoop.rdf.io.input.readers.nquads.NQuadsReader;
-import org.apache.jena.hadoop.rdf.types.QuadWritable;
-
-
-/**
- * NQuads input format
- *
- *
- *
- */
-public class NQuadsInputFormat extends AbstractNLineFileInputFormat<LongWritable, QuadWritable> {
-
- @Override
- public RecordReader<LongWritable, QuadWritable> createRecordReader(InputSplit arg0, TaskAttemptContext arg1) {
- return new NQuadsReader();
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/nquads/WholeFileNQuadsInputFormat.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/nquads/WholeFileNQuadsInputFormat.java
deleted file mode 100644
index 086ddba..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/nquads/WholeFileNQuadsInputFormat.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.nquads;
-
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.RecordReader;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.jena.hadoop.rdf.io.input.AbstractWholeFileInputFormat;
-import org.apache.jena.hadoop.rdf.io.input.readers.nquads.WholeFileNQuadsReader;
-import org.apache.jena.hadoop.rdf.types.QuadWritable;
-
-
-/**
- * NQuads input format where files are processed as complete files rather than
- * in a line based manner as with the {@link NQuadsInputFormat}
- * <p>
- * This has the advantage of less parser setup overhead but the disadvantage
- * that the input cannot be split over multiple mappers.
- * </p>
- *
- *
- *
- */
-public class WholeFileNQuadsInputFormat extends AbstractWholeFileInputFormat<LongWritable, QuadWritable> {
-
- @Override
- public RecordReader<LongWritable, QuadWritable> createRecordReader(InputSplit split, TaskAttemptContext context) {
- return new WholeFileNQuadsReader();
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/ntriples/BlockedNTriplesInputFormat.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/ntriples/BlockedNTriplesInputFormat.java
deleted file mode 100644
index 81cc6a2..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/ntriples/BlockedNTriplesInputFormat.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.ntriples;
-
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.RecordReader;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.jena.hadoop.rdf.io.input.AbstractNLineFileInputFormat;
-import org.apache.jena.hadoop.rdf.io.input.readers.ntriples.BlockedNTriplesReader;
-import org.apache.jena.hadoop.rdf.types.TripleWritable;
-
-
-/**
- * NTriples input format where files are processed as blocks of lines rather
- * than in a line based manner as with the {@link NTriplesInputFormat} or as
- * whole files with the {@link WholeFileNTriplesInputFormat}
- * <p>
- * This provides a compromise between the higher parser setup of creating more
- * parsers and the benefit of being able to split input files over multiple
- * mappers.
- * </p>
- *
- *
- *
- */
-public class BlockedNTriplesInputFormat extends AbstractNLineFileInputFormat<LongWritable, TripleWritable> {
-
- @Override
- public RecordReader<LongWritable, TripleWritable> createRecordReader(InputSplit split, TaskAttemptContext context) {
- return new BlockedNTriplesReader();
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/ntriples/NTriplesInputFormat.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/ntriples/NTriplesInputFormat.java
deleted file mode 100644
index 5bfa04c..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/ntriples/NTriplesInputFormat.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.ntriples;
-
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.RecordReader;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.jena.hadoop.rdf.io.input.AbstractNLineFileInputFormat;
-import org.apache.jena.hadoop.rdf.io.input.readers.ntriples.NTriplesReader;
-import org.apache.jena.hadoop.rdf.types.TripleWritable;
-
-
-/**
- * NTriples input format
- *
- *
- *
- */
-public class NTriplesInputFormat extends AbstractNLineFileInputFormat<LongWritable, TripleWritable> {
-
- @Override
- public RecordReader<LongWritable, TripleWritable> createRecordReader(InputSplit inputSplit, TaskAttemptContext context) {
- return new NTriplesReader();
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/ntriples/WholeFileNTriplesInputFormat.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/ntriples/WholeFileNTriplesInputFormat.java
deleted file mode 100644
index 0b7db0a..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/ntriples/WholeFileNTriplesInputFormat.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.ntriples;
-
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.RecordReader;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.jena.hadoop.rdf.io.input.AbstractWholeFileInputFormat;
-import org.apache.jena.hadoop.rdf.io.input.readers.ntriples.WholeFileNTriplesReader;
-import org.apache.jena.hadoop.rdf.types.TripleWritable;
-
-
-/**
- * NTriples input format where files are processed as complete files rather than
- * in a line based manner as with the {@link NTriplesInputFormat}
- * <p>
- * This has the advantage of less parser setup overhead but the disadvantage
- * that the input cannot be split over multiple mappers.
- * </p>
- *
- *
- *
- */
-public class WholeFileNTriplesInputFormat extends AbstractWholeFileInputFormat<LongWritable, TripleWritable> {
-
- @Override
- public RecordReader<LongWritable, TripleWritable> createRecordReader(InputSplit split, TaskAttemptContext context) {
- return new WholeFileNTriplesReader();
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/rdfjson/RdfJsonInputFormat.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/rdfjson/RdfJsonInputFormat.java
deleted file mode 100644
index 66b1833..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/rdfjson/RdfJsonInputFormat.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.rdfjson;
-
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.RecordReader;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.jena.hadoop.rdf.io.input.AbstractWholeFileInputFormat;
-import org.apache.jena.hadoop.rdf.io.input.readers.rdfjson.RdfJsonReader;
-import org.apache.jena.hadoop.rdf.types.TripleWritable;
-
-
-/**
- * RDF/JSON input format
- *
- *
- *
- */
-public class RdfJsonInputFormat extends AbstractWholeFileInputFormat<LongWritable, TripleWritable> {
-
- @Override
- public RecordReader<LongWritable, TripleWritable> createRecordReader(InputSplit split, TaskAttemptContext context) {
- return new RdfJsonReader();
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/rdfxml/RdfXmlInputFormat.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/rdfxml/RdfXmlInputFormat.java
deleted file mode 100644
index 0a2b25b..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/rdfxml/RdfXmlInputFormat.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.rdfxml;
-
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.RecordReader;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.jena.hadoop.rdf.io.input.AbstractWholeFileInputFormat;
-import org.apache.jena.hadoop.rdf.io.input.readers.rdfxml.RdfXmlReader;
-import org.apache.jena.hadoop.rdf.types.TripleWritable;
-
-
-/**
- * RDF/XML input format
- *
- *
- *
- */
-public class RdfXmlInputFormat extends AbstractWholeFileInputFormat<LongWritable, TripleWritable> {
-
- @Override
- public RecordReader<LongWritable, TripleWritable> createRecordReader(InputSplit split, TaskAttemptContext context) {
- return new RdfXmlReader();
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/AbstractBlockBasedNodeTupleReader.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/AbstractBlockBasedNodeTupleReader.java
deleted file mode 100644
index 345bb44..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/AbstractBlockBasedNodeTupleReader.java
+++ /dev/null
@@ -1,338 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.readers;
-
-import java.io.IOException;
-import java.io.InputStream;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.compress.CompressionCodec;
-import org.apache.hadoop.io.compress.CompressionCodecFactory;
-import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.RecordReader;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.hadoop.mapreduce.lib.input.FileSplit;
-import org.apache.jena.hadoop.rdf.io.RdfIOConstants;
-import org.apache.jena.hadoop.rdf.io.input.util.* ;
-import org.apache.jena.hadoop.rdf.types.AbstractNodeTupleWritable;
-import org.apache.jena.riot.Lang;
-import org.apache.jena.riot.RDFParserBuilder ;
-import org.apache.jena.riot.lang.PipedRDFIterator;
-import org.apache.jena.riot.lang.PipedRDFStream;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * An abstract implementation for a record reader that reads records from blocks
- * of files, this is a hybrid between {@link AbstractLineBasedNodeTupleReader}
- * and {@link AbstractWholeFileNodeTupleReader} in that it can only be used by
- * formats which can be split by lines but reduces the overhead by parsing the
- * split as a whole rather than as individual lines.
- * <p>
- * The keys produced are the approximate position in the file at which a tuple
- * was found and the values will be node tuples. Positions are approximate
- * because they are recorded after the point at which the most recent tuple was
- * parsed from the input thus they reflect the approximate position in the
- * stream immediately after which the triple was found.
- * </p>
- *
- *
- *
- * @param <TValue>
- * Value type
- * @param <T>
- * Tuple type
- */
-public abstract class AbstractBlockBasedNodeTupleReader<TValue, T extends AbstractNodeTupleWritable<TValue>> extends RecordReader<LongWritable, T> {
-
- private static final Logger LOG = LoggerFactory.getLogger(AbstractBlockBasedNodeTupleReader.class);
- private CompressionCodec compressionCodecs;
- private TrackableInputStream input;
- private LongWritable key;
- private long start, length;
- private T tuple;
- private TrackedPipedRDFStream<TValue> stream;
- private PipedRDFIterator<TValue> iter;
- private Thread parserThread;
- private boolean finished = false;
- private boolean ignoreBadTuples = true;
- private boolean parserFinished = false;
- private Throwable parserError = null;
-
- @Override
- public void initialize(InputSplit genericSplit, TaskAttemptContext context) throws IOException {
- LOG.debug("initialize({}, {})", genericSplit, context);
-
- // Assuming file split
- if (!(genericSplit instanceof FileSplit))
- throw new IOException("This record reader only supports FileSplit inputs");
- FileSplit split = (FileSplit) genericSplit;
-
- // Configuration
- Configuration config = context.getConfiguration();
- this.ignoreBadTuples = config.getBoolean(RdfIOConstants.INPUT_IGNORE_BAD_TUPLES, true);
- if (this.ignoreBadTuples)
- LOG.warn(
- "Configured to ignore bad tuples, parsing errors will be logged and further parsing aborted but no user visible errors will be thrown. Consider setting {} to false to disable this behaviour",
- RdfIOConstants.INPUT_IGNORE_BAD_TUPLES);
-
- // Figure out what portion of the file to read
- start = split.getStart();
- long end = start + split.getLength();
- final Path file = split.getPath();
- long totalLength = file.getFileSystem(context.getConfiguration()).getFileStatus(file).getLen();
- boolean readToEnd = end == totalLength;
- CompressionCodecFactory factory = new CompressionCodecFactory(config);
- this.compressionCodecs = factory.getCodec(file);
-
- LOG.info(String.format("Got split with start %d and length %d for file with total length of %d", new Object[] { start, split.getLength(), totalLength }));
-
- // Open the file and prepare the input stream
- FileSystem fs = file.getFileSystem(config);
- FSDataInputStream fileIn = fs.open(file);
- this.length = split.getLength();
- if (start > 0)
- fileIn.seek(start);
-
- if (this.compressionCodecs != null) {
- // Compressed input
- // For compressed input NLineInputFormat will have failed to find
- // any line breaks and will give us a split from 0 -> (length - 1)
- // Add 1 and re-verify readToEnd so we can abort correctly if ever
- // given a partial split of a compressed file
- end++;
- readToEnd = end == totalLength;
- if (start > 0 || !readToEnd)
- throw new IOException("This record reader can only be used with compressed input where the split is a whole file");
- input = new TrackedInputStream(this.compressionCodecs.createInputStream(fileIn));
- } else {
- // Uncompressed input
-
- if (readToEnd) {
- input = new TrackedInputStream(fileIn);
- } else {
- // Need to limit the portion of the file we are reading
- input = new BlockInputStream(fileIn, split.getLength());
- }
- }
-
- // Set up background thread for parser
- iter = this.getPipedIterator();
- this.stream = this.getPipedStream(iter, this.input);
- RDFParserBuilder builder = RdfIOUtils.createRDFParserBuilder(context, file);
- Runnable parserRunnable = this.createRunnable(this, this.input, stream, this.getRdfLanguage(), builder);
-
- this.parserThread = new Thread(parserRunnable);
- this.parserThread.setDaemon(true);
- this.parserThread.start();
- }
-
- /**
- * Gets the RDF iterator to use
- *
- * @return Iterator
- */
- protected abstract PipedRDFIterator<TValue> getPipedIterator();
-
- /**
- * Gets the RDF stream to parse to
- *
- * @param iterator
- * Iterator
- * @return RDF stream
- */
- protected abstract TrackedPipedRDFStream<TValue> getPipedStream(PipedRDFIterator<TValue> iterator, TrackableInputStream input);
-
- /**
- * Gets the RDF language to use for parsing
- *
- * @return
- */
- protected abstract Lang getRdfLanguage();
-
- /**
- * Creates the runnable upon which the parsing will run
- *
- * @param input
- * Input
- * @param stream
- * Stream
- * @param lang
- * Language to use for parsing
- * @param builder
- * RDFParser setup
- * @return Parser runnable
- */
- private Runnable createRunnable(final AbstractBlockBasedNodeTupleReader<?, ?> reader, final InputStream input,
- final PipedRDFStream<TValue> stream, final Lang lang, RDFParserBuilder builder) {
- return new Runnable() {
- @Override
- public void run() {
- try {
- builder.lang(lang).source(input).parse(stream);
- reader.setParserFinished(null);
- } catch (Throwable e) {
- reader.setParserFinished(e);
- }
- }
- };
- }
-
- /**
- * Sets the parser thread finished state
- *
- * @param e
- * Error (if any)
- */
- private void setParserFinished(Throwable e) {
- synchronized (this.parserThread) {
- this.parserError = e;
- this.parserFinished = true;
- }
- }
-
- /**
- * Waits for the parser thread to have reported as finished
- *
- * @throws InterruptedException
- */
- private void waitForParserFinished() throws InterruptedException {
- do {
- synchronized (this.parserThread) {
- if (this.parserFinished)
- return;
- }
- Thread.sleep(50);
- } while (true);
- }
-
- /**
- * Creates an instance of a writable tuple from the given tuple value
- *
- * @param tuple
- * Tuple value
- * @return Writable tuple
- */
- protected abstract T createInstance(TValue tuple);
-
- @Override
- public boolean nextKeyValue() throws IOException {
- // Reuse key for efficiency
- if (key == null) {
- key = new LongWritable();
- }
-
- if (this.finished)
- return false;
-
- try {
- if (this.iter.hasNext()) {
- // Position will be relative to the start for the split we're
- // processing
- Long l = this.start + this.stream.getPosition();
- if (l != null) {
- this.key.set(l);
- // For compressed input the actual length from which we
- // calculate progress is likely less than the actual
- // uncompressed length so we need to increment the
- // length as we go along
- // We always add 1 more than the current length because we
- // don't want to report 100% progress until we really have
- // finished
- if (this.compressionCodecs != null && l > this.length)
- this.length = l + 1;
- }
- this.tuple = this.createInstance(this.iter.next());
- return true;
- } else {
- // Need to ensure that the parser thread has finished in order
- // to determine whether we finished without error
- this.waitForParserFinished();
- if (this.parserError != null) {
- LOG.error("Error parsing block, aborting further parsing", this.parserError);
- if (!this.ignoreBadTuples)
- throw new IOException("Error parsing block at position " + (this.start + this.input.getBytesRead()) + ", aborting further parsing",
- this.parserError);
- }
-
- this.key = null;
- this.tuple = null;
- this.finished = true;
- // This is necessary so that when compressed input is used we
- // report 100% progress once we've reached the genuine end of
- // the stream
- if (this.compressionCodecs != null)
- this.length--;
- return false;
- }
- } catch (IOException e) {
- throw e;
- } catch (Throwable e) {
- // Failed to read the tuple on this line
- LOG.error("Error parsing block, aborting further parsing", e);
- if (!this.ignoreBadTuples) {
- this.iter.close();
- throw new IOException("Error parsing block at position " + (this.start + this.input.getBytesRead()) + ", aborting further parsing", e);
- }
- this.key = null;
- this.tuple = null;
- this.finished = true;
- return false;
- }
- }
-
- @Override
- public LongWritable getCurrentKey() {
- return this.key;
- }
-
- @Override
- public T getCurrentValue() {
- return this.tuple;
- }
-
- @Override
- public float getProgress() {
- float progress = 0.0f;
- if (this.key == null) {
- // We've either not started or we've finished
- progress = (this.finished ? 1.0f : 0.0f);
- } else if (this.key.get() == Long.MIN_VALUE) {
- // We don't have a position so we've either in-progress or finished
- progress = (this.finished ? 1.0f : 0.5f);
- } else {
- // We're some way through the file
- progress = (this.key.get() - this.start) / (float) this.length;
- }
- LOG.debug("getProgress() --> {}", progress);
- return progress;
- }
-
- @Override
- public void close() throws IOException {
- this.iter.close();
- this.input.close();
- this.finished = true;
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/AbstractBlockBasedQuadReader.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/AbstractBlockBasedQuadReader.java
deleted file mode 100644
index adc431f..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/AbstractBlockBasedQuadReader.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.readers;
-
-import org.apache.jena.hadoop.rdf.io.input.util.TrackableInputStream;
-import org.apache.jena.hadoop.rdf.io.input.util.TrackedPipedQuadsStream;
-import org.apache.jena.hadoop.rdf.io.input.util.TrackedPipedRDFStream;
-import org.apache.jena.hadoop.rdf.types.QuadWritable;
-import org.apache.jena.riot.lang.PipedRDFIterator;
-import org.apache.jena.sparql.core.Quad ;
-
-/**
- * An abstract record reader for whole file triple formats
- *
- *
- *
- */
-public abstract class AbstractBlockBasedQuadReader extends AbstractBlockBasedNodeTupleReader<Quad, QuadWritable> {
-
- @Override
- protected PipedRDFIterator<Quad> getPipedIterator() {
- return new PipedRDFIterator<Quad>();
- }
-
- @Override
- protected TrackedPipedRDFStream<Quad> getPipedStream(PipedRDFIterator<Quad> iterator, TrackableInputStream input) {
- return new TrackedPipedQuadsStream(iterator, input);
- }
-
- @Override
- protected QuadWritable createInstance(Quad tuple) {
- return new QuadWritable(tuple);
- }
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/AbstractBlockBasedTripleReader.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/AbstractBlockBasedTripleReader.java
deleted file mode 100644
index 43a171c..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/AbstractBlockBasedTripleReader.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.readers;
-
-import org.apache.jena.graph.Triple ;
-import org.apache.jena.hadoop.rdf.io.input.util.TrackableInputStream;
-import org.apache.jena.hadoop.rdf.io.input.util.TrackedPipedRDFStream;
-import org.apache.jena.hadoop.rdf.io.input.util.TrackedPipedTriplesStream;
-import org.apache.jena.hadoop.rdf.types.TripleWritable;
-import org.apache.jena.riot.lang.PipedRDFIterator;
-
-/**
- * An abstract record reader for whole file triple formats
- *
- *
- *
- */
-public abstract class AbstractBlockBasedTripleReader extends AbstractBlockBasedNodeTupleReader<Triple, TripleWritable> {
-
- @Override
- protected PipedRDFIterator<Triple> getPipedIterator() {
- return new PipedRDFIterator<Triple>();
- }
-
- @Override
- protected TrackedPipedRDFStream<Triple> getPipedStream(PipedRDFIterator<Triple> iterator, TrackableInputStream input) {
- return new TrackedPipedTriplesStream(iterator, input);
- }
-
- @Override
- protected TripleWritable createInstance(Triple tuple) {
- return new TripleWritable(tuple);
- }
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/AbstractLineBasedNodeTupleReader.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/AbstractLineBasedNodeTupleReader.java
deleted file mode 100644
index e0c2fb1..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/AbstractLineBasedNodeTupleReader.java
+++ /dev/null
@@ -1,284 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.readers;
-
-import java.io.IOException;
-import java.util.Iterator;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.io.compress.CompressionCodec;
-import org.apache.hadoop.io.compress.CompressionCodecFactory;
-import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.RecordReader;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.hadoop.mapreduce.lib.input.FileSplit;
-import org.apache.hadoop.util.LineReader;
-import org.apache.jena.hadoop.rdf.io.HadoopIOConstants;
-import org.apache.jena.hadoop.rdf.io.RdfIOConstants;
-import org.apache.jena.hadoop.rdf.io.input.util.RdfIOUtils;
-import org.apache.jena.hadoop.rdf.types.AbstractNodeTupleWritable;
-import org.apache.jena.riot.lang.LabelToNode;
-import org.apache.jena.riot.system.*;
-import org.apache.jena.riot.tokens.Tokenizer;
-import org.apache.jena.riot.tokens.TokenizerText;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * An abstract implementation of a record reader that reads records from line
- * based tuple formats. This only supports reading from file splits currently.
- * <p>
- * The keys produced are the position of the line in the file and the values
- * will be node tuples
- * </p>
- *
- *
- *
- * @param <TValue>
- * @param <T>
- * Writable tuple type
- */
-public abstract class AbstractLineBasedNodeTupleReader<TValue, T extends AbstractNodeTupleWritable<TValue>> extends RecordReader<LongWritable, T> {
-
- private static final Logger LOG = LoggerFactory.getLogger(AbstractLineBasedNodeTupleReader.class);
- private CompressionCodecFactory compressionCodecs = null;
- private long start, pos, end, estLength;
- private int maxLineLength;
- private LineReader in;
- private LongWritable key = null;
- private Text value = null;
- private T tuple = null;
- private ParserProfile maker = null;
- private boolean ignoreBadTuples = true;
-
- @Override
- public final void initialize(InputSplit genericSplit, TaskAttemptContext context) throws IOException {
- LOG.debug("initialize({}, {})", genericSplit, context);
-
- // Assuming file split
- if (!(genericSplit instanceof FileSplit))
- throw new IOException("This record reader only supports FileSplit inputs");
- FileSplit split = (FileSplit) genericSplit;
-
- // Intermediate : RDFParser but need to make a Iterator<Quad/Triple>
- LabelToNode labelToNode = RdfIOUtils.createLabelToNode(context, split.getPath());
- maker = new ParserProfileStd(RiotLib.factoryRDF(labelToNode),
- ErrorHandlerFactory.errorHandlerStd,
- IRIResolver.create(), PrefixMapFactory.createForInput(),
- null, true, false);
-
- Configuration config = context.getConfiguration();
- this.ignoreBadTuples = config.getBoolean(RdfIOConstants.INPUT_IGNORE_BAD_TUPLES, true);
- if (this.ignoreBadTuples)
- LOG.warn(
- "Configured to ignore bad tuples, parsing errors will be logged and the bad line skipped but no errors will be thrownConsider setting {} to false to disable this behaviour",
- RdfIOConstants.INPUT_IGNORE_BAD_TUPLES);
-
- // Figure out what portion of the file to read
- this.maxLineLength = config.getInt(HadoopIOConstants.MAX_LINE_LENGTH, Integer.MAX_VALUE);
- start = split.getStart();
- end = start + split.getLength();
- final Path file = split.getPath();
- long totalLength = file.getFileSystem(context.getConfiguration()).getFileStatus(file).getLen();
- compressionCodecs = new CompressionCodecFactory(config);
- final CompressionCodec codec = compressionCodecs.getCodec(file);
-
- LOG.info(String.format("Got split with start %d and length %d for file with total length of %d", new Object[] { start, split.getLength(), totalLength }));
-
- // Open the file and seek to the start of the split
- FileSystem fs = file.getFileSystem(config);
- FSDataInputStream fileIn = fs.open(file);
- boolean skipFirstLine = false;
- if (codec != null) {
- // Compressed input
- // For compressed input NLineInputFormat will have failed to find
- // any line breaks and will give us a split from 0 -> (length - 1)
- // Add 1 and verify we got complete split
- if (totalLength > split.getLength() + 1)
- throw new IOException("This record reader can only be used with compressed input where the split covers the whole file");
- in = new LineReader(codec.createInputStream(fileIn), config);
- estLength = end;
- end = Long.MAX_VALUE;
- } else {
- // Uncompressed input
- if (start != 0) {
- skipFirstLine = true;
- --start;
- fileIn.seek(start);
- }
- in = new LineReader(fileIn, config);
- }
- // Skip first line and re-establish "start".
- // This is to do with how line reader reads lines and how
- // NLineInputFormat will provide the split information to use
- if (skipFirstLine) {
- start += in.readLine(new Text(), 0, (int) Math.min(Integer.MAX_VALUE, end - start));
- }
- this.pos = start;
- }
-
- /**
- * Gets an iterator over the data on the current line
- *
- * @param line
- * Line
- * @param builder
- * Parser setup.
- * @return Iterator
- */
- protected abstract Iterator<TValue> getIterator(String line, ParserProfile maker);
-
- /** Create a tokenizer for a line
- * @param line
- * Content
- * @return Tokenizer
- */
- protected Tokenizer getTokenizer(String line) {
- return TokenizerText.fromString(line);
- }
-
-
- /**
- * Creates an instance of a writable tuple from the given tuple value
- *
- * @param tuple
- * Tuple value
- * @return Writable tuple
- */
- protected abstract T createInstance(TValue tuple);
-
- @Override
- public final boolean nextKeyValue() throws IOException {
- // Reuse key for efficiency
- if (key == null) {
- key = new LongWritable();
- }
-
- // Reset value which we use for reading lines
- if (value == null) {
- value = new Text();
- }
- tuple = null;
-
- // Try to read the next valid line
- int newSize = 0;
- while (pos < end) {
- // Read next line
- newSize = in.readLine(value, maxLineLength, Math.max((int) Math.min(Integer.MAX_VALUE, end - pos), maxLineLength));
-
- // Once we get an empty line we've reached the end of our input
- if (newSize == 0) {
- break;
- }
-
- // Update position, remember that where inputs are compressed we may
- // be at a larger position then we expected because the length of
- // the split is likely less than the length of the data once
- // decompressed
- key.set(pos);
- pos += newSize;
- if (pos > estLength)
- estLength = pos + 1;
-
- // Skip lines that exceed the line length limit that has been set
- if (newSize >= maxLineLength) {
- LOG.warn("Skipped oversized line of size {} at position {}", newSize, (pos - newSize));
- continue;
- }
-
- // Attempt to read the tuple from current line
- try {
- Iterator<TValue> iter = this.getIterator(value.toString(), maker);
- if (iter.hasNext()) {
- tuple = this.createInstance(iter.next());
-
- // If we reach here we've found a valid tuple so we can
- // break out of the loop
- break;
- } else {
- // Empty line/Comment line
- LOG.debug("Valid line with no triple at position {}", (pos - newSize));
- continue;
- }
- } catch (Throwable e) {
- // Failed to read the tuple on this line
- LOG.error("Bad tuple at position " + (pos - newSize), e);
- if (this.ignoreBadTuples)
- continue;
- throw new IOException(String.format("Bad tuple at position %d", (pos - newSize)), e);
- }
- }
- boolean result = this.tuple != null;
-
- // End of input
- if (newSize == 0) {
- key = null;
- value = null;
- tuple = null;
- result = false;
- estLength = pos;
- }
- LOG.debug("nextKeyValue() --> {}", result);
- return result;
- }
-
- @Override
- public LongWritable getCurrentKey() {
- LOG.debug("getCurrentKey() --> {}", key);
- return key;
- }
-
- @Override
- public T getCurrentValue() {
- LOG.debug("getCurrentValue() --> {}", tuple);
- return tuple;
- }
-
- @Override
- public float getProgress() {
- float progress = 0.0f;
- if (start != end) {
- if (end == Long.MAX_VALUE) {
- if (estLength == 0)
- return 1.0f;
- // Use estimated length
- progress = Math.min(1.0f, (pos - start) / (float) (estLength - start));
- } else {
- // Use actual length
- progress = Math.min(1.0f, (pos - start) / (float) (end - start));
- }
- }
- LOG.debug("getProgress() --> {}", progress);
- return progress;
- }
-
- @Override
- public void close() throws IOException {
- LOG.debug("close()");
- if (in != null) {
- in.close();
- }
- }
-
-}
\ No newline at end of file
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/AbstractLineBasedQuadReader.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/AbstractLineBasedQuadReader.java
deleted file mode 100644
index f0f616e..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/AbstractLineBasedQuadReader.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.readers;
-
-import java.util.Iterator;
-
-import org.apache.jena.hadoop.rdf.types.QuadWritable;
-import org.apache.jena.riot.system.ParserProfile;
-import org.apache.jena.riot.tokens.Tokenizer;
-import org.apache.jena.sparql.core.Quad ;
-
-/**
- * An abstract reader for line based quad formats
- *
- *
- *
- */
-public abstract class AbstractLineBasedQuadReader extends AbstractLineBasedNodeTupleReader<Quad, QuadWritable> {
-
- @Override
- protected Iterator<Quad> getIterator(String line, ParserProfile maker) {
- Tokenizer tokenizer = getTokenizer(line);
- return getQuadsIterator(tokenizer, maker);
- }
-
- @Override
- protected QuadWritable createInstance(Quad q) {
- return new QuadWritable(q);
- }
-
- protected abstract Iterator<Quad> getQuadsIterator(Tokenizer tokenizer, ParserProfile maker);
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/AbstractLineBasedTripleReader.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/AbstractLineBasedTripleReader.java
deleted file mode 100644
index 2ec8bf6..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/AbstractLineBasedTripleReader.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.readers;
-
-import java.util.Iterator;
-
-import org.apache.jena.graph.Triple ;
-import org.apache.jena.hadoop.rdf.types.TripleWritable;
-import org.apache.jena.riot.system.ParserProfile;
-import org.apache.jena.riot.tokens.Tokenizer;
-
-/**
- * An abstract record reader for line based triple formats
- *
- *
- *
- */
-public abstract class AbstractLineBasedTripleReader extends AbstractLineBasedNodeTupleReader<Triple, TripleWritable> {
-
- @Override
- protected Iterator<Triple> getIterator(String line, ParserProfile maker) {
- Tokenizer tokenizer = getTokenizer(line);
- return getTriplesIterator(tokenizer, maker);
- }
-
- @Override
- protected TripleWritable createInstance(Triple t) {
- return new TripleWritable(t);
- }
-
- protected abstract Iterator<Triple> getTriplesIterator(Tokenizer tokenizer, ParserProfile maker);
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/AbstractRdfReader.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/AbstractRdfReader.java
deleted file mode 100644
index 030155f..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/AbstractRdfReader.java
+++ /dev/null
@@ -1,108 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.readers;
-
-import java.io.IOException;
-
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.RecordReader;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.hadoop.mapreduce.lib.input.FileSplit;
-import org.apache.jena.hadoop.rdf.types.AbstractNodeTupleWritable;
-import org.apache.jena.riot.Lang;
-import org.apache.jena.riot.RDFLanguages;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * An abstract record reader for arbitrary RDF which provides support for
- * selecting the actual record reader to use based on detecting the RDF language
- * from the file name
- *
- * @param <TValue>
- * Tuple type
- * @param <T>
- * Writable tuple type
- */
-public abstract class AbstractRdfReader<TValue, T extends AbstractNodeTupleWritable<TValue>> extends
- RecordReader<LongWritable, T> {
- private static final Logger LOG = LoggerFactory.getLogger(AbstractRdfReader.class);
-
- private RecordReader<LongWritable, T> reader;
-
- @Override
- public void initialize(InputSplit genericSplit, TaskAttemptContext context) throws IOException,
- InterruptedException {
- LOG.debug("initialize({}, {})", genericSplit, context);
-
- // Assuming file split
- if (!(genericSplit instanceof FileSplit))
- throw new IOException("This record reader only supports FileSplit inputs");
-
- // Find RDF language
- FileSplit split = (FileSplit) genericSplit;
- Path path = split.getPath();
- Lang lang = RDFLanguages.filenameToLang(path.getName());
- if (lang == null)
- throw new IOException("There is no registered RDF language for the input file " + path.toString());
-
- // Select the record reader and initialize
- this.reader = this.selectRecordReader(lang);
- this.reader.initialize(split, context);
- }
-
- /**
- * Selects the appropriate record reader to use for the given RDF language
- *
- * @param lang
- * RDF language
- * @return Record reader
- * @throws IOException
- * Should be thrown if no record reader can be selected
- */
- protected abstract RecordReader<LongWritable, T> selectRecordReader(Lang lang) throws IOException;
-
- @Override
- public final boolean nextKeyValue() throws IOException, InterruptedException {
- return this.reader.nextKeyValue();
- }
-
- @Override
- public final LongWritable getCurrentKey() throws IOException, InterruptedException {
- return this.reader.getCurrentKey();
- }
-
- @Override
- public final T getCurrentValue() throws IOException, InterruptedException {
- return this.reader.getCurrentValue();
- }
-
- @Override
- public final float getProgress() throws IOException, InterruptedException {
- return this.reader.getProgress();
- }
-
- @Override
- public final void close() throws IOException {
- this.reader.close();
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/AbstractWholeFileNodeTupleReader.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/AbstractWholeFileNodeTupleReader.java
deleted file mode 100644
index 9cbdadc..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/AbstractWholeFileNodeTupleReader.java
+++ /dev/null
@@ -1,326 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.readers;
-
-import java.io.IOException;
-import java.io.InputStream;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.compress.CompressionCodec;
-import org.apache.hadoop.io.compress.CompressionCodecFactory;
-import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.RecordReader;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.hadoop.mapreduce.lib.input.FileSplit;
-import org.apache.jena.hadoop.rdf.io.RdfIOConstants;
-import org.apache.jena.hadoop.rdf.io.input.util.RdfIOUtils;
-import org.apache.jena.hadoop.rdf.io.input.util.TrackableInputStream;
-import org.apache.jena.hadoop.rdf.io.input.util.TrackedInputStream;
-import org.apache.jena.hadoop.rdf.io.input.util.TrackedPipedRDFStream;
-import org.apache.jena.hadoop.rdf.types.AbstractNodeTupleWritable;
-import org.apache.jena.riot.Lang ;
-import org.apache.jena.riot.RDFParserBuilder ;
-import org.apache.jena.riot.lang.PipedRDFIterator;
-import org.apache.jena.riot.lang.PipedRDFStream;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * An abstract implementation for a record reader that reads records from whole
- * files i.e. the whole file must be kept together to allow tuples to be
- * successfully read. This only supports reading from file splits currently.
- * <p>
- * The keys produced are the approximate position in the file at which a tuple
- * was found and the values will be node tuples. Positions are approximate
- * because they are recorded after the point at which the most recent tuple was
- * parsed from the input thus they reflect the approximate position in the
- * stream immediately after which the triple was found.
- * </p>
- * <p>
- * You should also be aware that with whole file formats syntax compressions in
- * the format may mean that there are multiple triples produced with the same
- * position and thus key.
- * </p>
- *
- *
- *
- * @param <TValue>
- * Value type
- * @param <T>
- * Tuple type
- */
-public abstract class AbstractWholeFileNodeTupleReader<TValue, T extends AbstractNodeTupleWritable<TValue>> extends RecordReader<LongWritable, T> {
-
- private static final Logger LOG = LoggerFactory.getLogger(AbstractLineBasedNodeTupleReader.class);
- private CompressionCodec compressionCodecs;
- private TrackedInputStream input;
- private LongWritable key;
- private long length;
- private T tuple;
- private TrackedPipedRDFStream<TValue> stream;
- private PipedRDFIterator<TValue> iter;
- private Thread parserThread;
- private boolean finished = false;
- private boolean ignoreBadTuples = true;
- private boolean parserFinished = false;
- private Throwable parserError = null;
-
- @Override
- public void initialize(InputSplit genericSplit, TaskAttemptContext context) throws IOException {
- LOG.debug("initialize({}, {})", genericSplit, context);
-
- // Assuming file split
- if (!(genericSplit instanceof FileSplit))
- throw new IOException("This record reader only supports FileSplit inputs");
- FileSplit split = (FileSplit) genericSplit;
-
- // Configuration
- Configuration config = context.getConfiguration();
- this.ignoreBadTuples = config.getBoolean(RdfIOConstants.INPUT_IGNORE_BAD_TUPLES, true);
- if (this.ignoreBadTuples)
- LOG.warn(
- "Configured to ignore bad tuples, parsing errors will be logged and further parsing aborted but no user visible errors will be thrown. Consider setting {} to false to disable this behaviour",
- RdfIOConstants.INPUT_IGNORE_BAD_TUPLES);
-
- // Figure out what portion of the file to read
- if (split.getStart() > 0)
- throw new IOException("This record reader requires a file split which covers the entire file");
- final Path file = split.getPath();
- long totalLength = file.getFileSystem(context.getConfiguration()).getFileStatus(file).getLen();
- CompressionCodecFactory factory = new CompressionCodecFactory(config);
- this.compressionCodecs = factory.getCodec(file);
-
- LOG.info(String.format("Got split with start %d and length %d for file with total length of %d", new Object[] { split.getStart(), split.getLength(),
- totalLength }));
-
- if (totalLength > split.getLength())
- throw new IOException("This record reader requires a file split which covers the entire file");
-
- // Open the file and prepare the input stream
- FileSystem fs = file.getFileSystem(config);
- FSDataInputStream fileIn = fs.open(file);
- this.length = split.getLength();
- if (this.compressionCodecs != null) {
- // Compressed input
- input = new TrackedInputStream(this.compressionCodecs.createInputStream(fileIn));
- } else {
- // Uncompressed input
- input = new TrackedInputStream(fileIn);
- }
-
- // Set up background thread for parser
- iter = this.getPipedIterator();
- this.stream = this.getPipedStream(iter, this.input);
- RDFParserBuilder builder = RdfIOUtils.createRDFParserBuilder(context, file);
- Runnable parserRunnable = this.createRunnable(this, this.input, stream, this.getRdfLanguage(), builder);
- this.parserThread = new Thread(parserRunnable);
- this.parserThread.setDaemon(true);
- this.parserThread.start();
- }
-
- /**
- * Gets the RDF iterator to use
- *
- * @return Iterator
- */
- protected abstract PipedRDFIterator<TValue> getPipedIterator();
-
- /**
- * Gets the RDF stream to parse to
- *
- * @param iterator
- * Iterator
- * @return RDF stream
- */
- protected abstract TrackedPipedRDFStream<TValue> getPipedStream(PipedRDFIterator<TValue> iterator, TrackableInputStream input);
-
- /**
- * Gets the RDF language to use for parsing
- *
- * @return
- */
- protected abstract Lang getRdfLanguage();
-
- /**
- * Creates the runnable upon which the parsing will run
- *
- * @param input
- * Input
- * @param stream
- * Stream
- * @param lang
- * Language to use for parsing
- * @param builder
- * RDFParser setup
- * @return Parser runnable
- */
- private Runnable createRunnable(final AbstractWholeFileNodeTupleReader<?, ?> reader, final InputStream input,
- final PipedRDFStream<TValue> stream, final Lang lang, RDFParserBuilder builder) {
- return new Runnable() {
- @Override
- public void run() {
- try {
- builder.lang(lang).source(input).parse(stream);
- reader.setParserFinished(null);
- } catch (Throwable e) {
- reader.setParserFinished(e);
- }
- }
- };
- }
-
- /**
- * Sets the parser thread finished state
- *
- * @param e
- * Error (if any)
- */
- private void setParserFinished(Throwable e) {
- synchronized (this.parserThread) {
- this.parserError = e;
- this.parserFinished = true;
- }
- }
-
- /**
- * Waits for the parser thread to have reported as finished
- *
- * @throws InterruptedException
- */
- private void waitForParserFinished() throws InterruptedException {
- do {
- synchronized (this.parserThread) {
- if (this.parserFinished)
- return;
- }
- Thread.sleep(50);
- } while (true);
- }
-
- /**
- * Creates an instance of a writable tuple from the given tuple value
- *
- * @param tuple
- * Tuple value
- * @return Writable tuple
- */
- protected abstract T createInstance(TValue tuple);
-
- @Override
- public boolean nextKeyValue() throws IOException {
- // Reuse key for efficiency
- if (key == null) {
- key = new LongWritable();
- }
-
- if (this.finished)
- return false;
-
- try {
- if (this.iter.hasNext()) {
- Long l = this.stream.getPosition();
- if (l != null) {
- this.key.set(l);
- // For compressed input the actual length from which we
- // calculate progress is likely less than the actual
- // uncompressed length so we may need to increment the
- // length as we go along
- // We always add 1 more than the current length because we
- // don't want to report 100% progress until we really have
- // finished
- if (this.compressionCodecs != null && l > this.length)
- this.length = l + 1;
- }
- this.tuple = this.createInstance(this.iter.next());
- return true;
- } else {
- // Need to ensure that the parser thread has finished in order
- // to determine whether we finished without error
- this.waitForParserFinished();
- if (this.parserError != null) {
- LOG.error("Error parsing whole file, aborting further parsing", this.parserError);
- if (!this.ignoreBadTuples)
- throw new IOException("Error parsing whole file at position " + this.input.getBytesRead() + ", aborting further parsing",
- this.parserError);
-
- }
-
- this.key = null;
- this.tuple = null;
- this.finished = true;
- // This is necessary so that when compressed input is used we
- // report 100% progress once we've reached the genuine end of
- // the stream
- if (this.compressionCodecs != null)
- this.length--;
- return false;
- }
- } catch (Throwable e) {
- // Failed to read the tuple on this line
- LOG.error("Error parsing whole file, aborting further parsing", e);
- if (!this.ignoreBadTuples) {
- this.iter.close();
- throw new IOException("Error parsing whole file at position " + this.input.getBytesRead() + ", aborting further parsing", e);
- }
- this.key = null;
- this.tuple = null;
- this.finished = true;
- return false;
- }
- }
-
- @Override
- public LongWritable getCurrentKey() {
- return this.key;
- }
-
- @Override
- public T getCurrentValue() {
- return this.tuple;
- }
-
- @Override
- public float getProgress() {
- float progress = 0.0f;
- if (this.key == null) {
- // We've either not started or we've finished
- progress = (this.finished ? 1.0f : 0.0f);
- } else if (this.key.get() == Long.MIN_VALUE) {
- // We don't have a position so we've either in-progress or finished
- progress = (this.finished ? 1.0f : 0.5f);
- } else {
- // We're some way through the file
- progress = this.key.get() / (float) this.length;
- }
- LOG.debug("getProgress() --> {}", progress);
- return progress;
- }
-
- @Override
- public void close() throws IOException {
- this.iter.close();
- this.input.close();
- this.finished = true;
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/AbstractWholeFileQuadReader.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/AbstractWholeFileQuadReader.java
deleted file mode 100644
index 8097d52..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/AbstractWholeFileQuadReader.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.readers;
-
-import org.apache.jena.hadoop.rdf.io.input.util.TrackableInputStream;
-import org.apache.jena.hadoop.rdf.io.input.util.TrackedPipedQuadsStream;
-import org.apache.jena.hadoop.rdf.io.input.util.TrackedPipedRDFStream;
-import org.apache.jena.hadoop.rdf.types.QuadWritable;
-import org.apache.jena.riot.lang.PipedRDFIterator;
-import org.apache.jena.sparql.core.Quad ;
-
-/**
- * An abstract record reader for whole file triple formats
- *
- *
- *
- */
-public abstract class AbstractWholeFileQuadReader extends AbstractWholeFileNodeTupleReader<Quad, QuadWritable> {
-
- @Override
- protected PipedRDFIterator<Quad> getPipedIterator() {
- return new PipedRDFIterator<Quad>();
- }
-
- @Override
- protected TrackedPipedRDFStream<Quad> getPipedStream(PipedRDFIterator<Quad> iterator, TrackableInputStream input) {
- return new TrackedPipedQuadsStream(iterator, input);
- }
-
- @Override
- protected QuadWritable createInstance(Quad tuple) {
- return new QuadWritable(tuple);
- }
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/AbstractWholeFileTripleReader.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/AbstractWholeFileTripleReader.java
deleted file mode 100644
index 1f56b07..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/AbstractWholeFileTripleReader.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.readers;
-
-import org.apache.jena.graph.Triple ;
-import org.apache.jena.hadoop.rdf.io.input.util.TrackableInputStream;
-import org.apache.jena.hadoop.rdf.io.input.util.TrackedPipedRDFStream;
-import org.apache.jena.hadoop.rdf.io.input.util.TrackedPipedTriplesStream;
-import org.apache.jena.hadoop.rdf.types.TripleWritable;
-import org.apache.jena.riot.lang.PipedRDFIterator;
-
-/**
- * An abstract record reader for whole file triple formats
- *
- *
- *
- */
-public abstract class AbstractWholeFileTripleReader extends AbstractWholeFileNodeTupleReader<Triple, TripleWritable> {
-
- @Override
- protected PipedRDFIterator<Triple> getPipedIterator() {
- return new PipedRDFIterator<Triple>();
- }
-
- @Override
- protected TrackedPipedRDFStream<Triple> getPipedStream(PipedRDFIterator<Triple> iterator, TrackableInputStream input) {
- return new TrackedPipedTriplesStream(iterator, input);
- }
-
- @Override
- protected TripleWritable createInstance(Triple tuple) {
- return new TripleWritable(tuple);
- }
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/QuadsReader.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/QuadsReader.java
deleted file mode 100644
index ecd930a..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/QuadsReader.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.readers;
-
-import java.io.IOException;
-
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.mapreduce.RecordReader;
-import org.apache.jena.hadoop.rdf.io.registry.HadoopRdfIORegistry;
-import org.apache.jena.hadoop.rdf.types.QuadWritable;
-import org.apache.jena.riot.Lang;
-import org.apache.jena.riot.RDFLanguages;
-import org.apache.jena.sparql.core.Quad ;
-
-/**
- * A record reader that reads triples from any RDF quads format
- */
-public class QuadsReader extends AbstractRdfReader<Quad, QuadWritable> {
-
- @Override
- protected RecordReader<LongWritable, QuadWritable> selectRecordReader(Lang lang) throws IOException {
- if (!RDFLanguages.isQuads(lang))
- throw new IOException(
- lang.getLabel()
- + " is not a RDF quads format, perhaps you wanted TriplesInputFormat or TriplesOrQuadsInputFormat instead?");
-
- // This will throw an appropriate error if the language does not support
- // triples
- return HadoopRdfIORegistry.createQuadReader(lang);
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/TriplesOrQuadsReader.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/TriplesOrQuadsReader.java
deleted file mode 100644
index c559b96..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/TriplesOrQuadsReader.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.readers;
-
-import java.io.IOException;
-
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.mapreduce.RecordReader;
-import org.apache.jena.graph.Node ;
-import org.apache.jena.hadoop.rdf.io.registry.HadoopRdfIORegistry;
-import org.apache.jena.hadoop.rdf.types.QuadWritable;
-import org.apache.jena.riot.Lang;
-import org.apache.jena.riot.RDFLanguages;
-import org.apache.jena.sparql.core.Quad ;
-
-/**
- * A record reader that reads RDF from any triples/quads format. Triples are
- * converted into quads in the default graph. This behaviour can be changed by
- * deriving from this class and overriding the {@link #getGraphNode()} method
- *
- *
- *
- */
-public class TriplesOrQuadsReader extends AbstractRdfReader<Quad, QuadWritable> {
-
- @Override
- protected RecordReader<LongWritable, QuadWritable> selectRecordReader(Lang lang) throws IOException {
- if (!RDFLanguages.isQuads(lang) && !RDFLanguages.isTriples(lang))
- throw new IOException(lang.getLabel() + " is not a RDF triples/quads format");
-
- if (HadoopRdfIORegistry.hasQuadReader(lang)) {
- // Supports quads directly
- return HadoopRdfIORegistry.createQuadReader(lang);
- } else {
- // Try to create a triples reader and wrap upwards into quads
- // This will throw an error if a triple reader is not available
- return new TriplesToQuadsReader(HadoopRdfIORegistry.createTripleReader(lang));
- }
- }
-
- /**
- * Gets the graph node which represents the graph into which triples will be
- * indicated to belong to when they are converting into quads.
- * <p>
- * Defaults to {@link Quad#defaultGraphNodeGenerated} which represents the
- * default graph
- * </p>
- *
- * @return Graph node
- */
- protected Node getGraphNode() {
- return Quad.defaultGraphNodeGenerated;
- }
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/TriplesReader.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/TriplesReader.java
deleted file mode 100644
index 0467b5c..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/TriplesReader.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.readers;
-
-import java.io.IOException;
-
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.mapreduce.RecordReader;
-import org.apache.jena.graph.Triple ;
-import org.apache.jena.hadoop.rdf.io.registry.HadoopRdfIORegistry;
-import org.apache.jena.hadoop.rdf.types.TripleWritable;
-import org.apache.jena.riot.Lang;
-import org.apache.jena.riot.RDFLanguages;
-
-/**
- * A record reader that reads triples from any RDF triples format
- */
-public class TriplesReader extends AbstractRdfReader<Triple, TripleWritable> {
-
- @Override
- protected RecordReader<LongWritable, TripleWritable> selectRecordReader(Lang lang) throws IOException {
- if (!RDFLanguages.isTriples(lang))
- throw new IOException(
- lang.getLabel()
- + " is not a RDF triples format, perhaps you wanted QuadsInputFormat or TriplesOrQuadsInputFormat instead?");
-
- // This will throw an appropriate error if the language does not support
- // triples
- return HadoopRdfIORegistry.createTripleReader(lang);
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/TriplesToQuadsReader.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/TriplesToQuadsReader.java
deleted file mode 100644
index e4de126..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/TriplesToQuadsReader.java
+++ /dev/null
@@ -1,101 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.readers;
-
-import java.io.IOException;
-
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.RecordReader;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.jena.graph.Node ;
-import org.apache.jena.hadoop.rdf.types.QuadWritable;
-import org.apache.jena.hadoop.rdf.types.TripleWritable;
-import org.apache.jena.sparql.core.Quad ;
-
-/**
- * A record reader that converts triples into quads by wrapping a
- * {@code RecordReader<LongWritable, TripleWritable>} implementation
- *
- *
- *
- */
-public class TriplesToQuadsReader extends RecordReader<LongWritable, QuadWritable> {
-
- private final RecordReader<LongWritable, TripleWritable> reader;
- private Node graph;
-
- /**
- * Creates a new reader
- *
- * @param reader
- * Triple reader
- */
- public TriplesToQuadsReader(RecordReader<LongWritable, TripleWritable> reader) {
- this(reader, Quad.defaultGraphNodeGenerated);
- }
-
- /**
- * Creates a new reader
- *
- * @param reader
- * Triple reader
- * @param graphNode
- * Graph node
- */
- public TriplesToQuadsReader(RecordReader<LongWritable, TripleWritable> reader, Node graphNode) {
- if (reader == null)
- throw new NullPointerException("reader cannot be null");
- if (graphNode == null)
- throw new NullPointerException("Graph node cannot be null");
- this.reader = reader;
- this.graph = graphNode;
- }
-
- @Override
- public void initialize(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException {
- this.reader.initialize(split, context);
- }
-
- @Override
- public final boolean nextKeyValue() throws IOException, InterruptedException {
- return this.reader.nextKeyValue();
- }
-
- @Override
- public final LongWritable getCurrentKey() throws IOException, InterruptedException {
- return this.reader.getCurrentKey();
- }
-
- @Override
- public final QuadWritable getCurrentValue() throws IOException, InterruptedException {
- TripleWritable t = this.reader.getCurrentValue();
- return new QuadWritable(new Quad(this.graph, t.get()));
- }
-
- @Override
- public final float getProgress() throws IOException, InterruptedException {
- return this.reader.getProgress();
- }
-
- @Override
- public final void close() throws IOException {
- this.reader.close();
- }
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/jsonld/JsonLDQuadReader.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/jsonld/JsonLDQuadReader.java
deleted file mode 100644
index 1b3f467..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/jsonld/JsonLDQuadReader.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.readers.jsonld;
-
-import org.apache.jena.hadoop.rdf.io.input.readers.AbstractWholeFileQuadReader;
-import org.apache.jena.riot.Lang;
-import org.apache.jena.riot.RDFLanguages;
-
-public class JsonLDQuadReader extends AbstractWholeFileQuadReader {
-
- @Override
- protected Lang getRdfLanguage() {
- return RDFLanguages.JSONLD;
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/jsonld/JsonLDTripleReader.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/jsonld/JsonLDTripleReader.java
deleted file mode 100644
index 7cdea9e..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/jsonld/JsonLDTripleReader.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.readers.jsonld;
-
-import org.apache.jena.hadoop.rdf.io.input.readers.AbstractWholeFileTripleReader;
-import org.apache.jena.riot.Lang;
-import org.apache.jena.riot.RDFLanguages;
-
-public class JsonLDTripleReader extends AbstractWholeFileTripleReader {
- @Override
- protected Lang getRdfLanguage() {
- return RDFLanguages.JSONLD;
- }
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/nquads/BlockedNQuadsReader.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/nquads/BlockedNQuadsReader.java
deleted file mode 100644
index eb08152..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/nquads/BlockedNQuadsReader.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.readers.nquads;
-
-import org.apache.jena.hadoop.rdf.io.input.readers.AbstractBlockBasedQuadReader;
-import org.apache.jena.riot.Lang;
-
-/**
- * A record for NQuads
- * <p>
- * This is a hybrid of the {@link NQuadsReader} and the
- * {@link WholeFileNQuadsReader} in that it does not process individual lines
- * rather it processes the inputs in blocks of lines parsing the whole block
- * rather than individual lines. This provides a compromise between the higher
- * parser setup of creating more parsers and the benefit of being able to split
- * input files over multiple mappers.
- * </p>
- *
- *
- *
- */
-public class BlockedNQuadsReader extends AbstractBlockBasedQuadReader {
-
- @Override
- protected Lang getRdfLanguage() {
- return Lang.NQUADS;
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/nquads/NQuadsReader.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/nquads/NQuadsReader.java
deleted file mode 100644
index a6ac232..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/nquads/NQuadsReader.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.readers.nquads;
-
-import java.util.Iterator;
-
-import org.apache.jena.hadoop.rdf.io.input.readers.AbstractLineBasedQuadReader;
-import org.apache.jena.riot.lang.RiotParsers;
-import org.apache.jena.riot.system.ParserProfile;
-import org.apache.jena.riot.tokens.Tokenizer;
-import org.apache.jena.sparql.core.Quad ;
-
-/**
- * A record reader for NQuads
- *
- *
- *
- */
-public class NQuadsReader extends AbstractLineBasedQuadReader {
- @Override
- protected Iterator<Quad> getQuadsIterator(Tokenizer tokenizer, ParserProfile maker) {
- return RiotParsers.createParserNQuads(tokenizer, null, maker);
- }
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/nquads/WholeFileNQuadsReader.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/nquads/WholeFileNQuadsReader.java
deleted file mode 100644
index d5f36a3..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/nquads/WholeFileNQuadsReader.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.readers.nquads;
-
-import org.apache.jena.hadoop.rdf.io.input.readers.AbstractWholeFileQuadReader;
-import org.apache.jena.riot.Lang;
-
-/**
- * A record for NQuads
- * <p>
- * Unlike the {@link NQuadsReader} this processes files as a whole rather than
- * individual lines. This has the advantage of less parser setup overhead but
- * the disadvantage that the input cannot be split between multiple mappers.
- * </p>
- *
- *
- *
- */
-public class WholeFileNQuadsReader extends AbstractWholeFileQuadReader {
-
- @Override
- protected Lang getRdfLanguage() {
- return Lang.NQUADS;
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/ntriples/BlockedNTriplesReader.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/ntriples/BlockedNTriplesReader.java
deleted file mode 100644
index aa56984..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/ntriples/BlockedNTriplesReader.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.readers.ntriples;
-
-import org.apache.jena.hadoop.rdf.io.input.readers.AbstractBlockBasedTripleReader;
-import org.apache.jena.riot.Lang;
-
-/**
- * A record for NTriples
- * <p>
- * This is a hybrid of the {@link NTriplesReader} and the
- * {@link WholeFileNTriplesReader} in that it does not process individual lines
- * rather it processes the inputs in blocks of lines parsing the whole block
- * rather than individual lines. This provides a compromise between the higher
- * parser setup of creating more parsers and the benefit of being able to split
- * input files over multiple mappers.
- * </p>
- *
- *
- *
- */
-public class BlockedNTriplesReader extends AbstractBlockBasedTripleReader {
-
- @Override
- protected Lang getRdfLanguage() {
- return Lang.NTRIPLES;
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/ntriples/NTriplesReader.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/ntriples/NTriplesReader.java
deleted file mode 100644
index 2e20af4..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/ntriples/NTriplesReader.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.readers.ntriples;
-
-import java.util.Iterator;
-
-import org.apache.jena.graph.Triple ;
-import org.apache.jena.hadoop.rdf.io.input.readers.AbstractLineBasedTripleReader;
-import org.apache.jena.riot.lang.RiotParsers;
-import org.apache.jena.riot.system.ParserProfile;
-import org.apache.jena.riot.tokens.Tokenizer;
-
-/**
- * A record reader for NTriples
- *
- *
- *
- */
-public class NTriplesReader extends AbstractLineBasedTripleReader {
-
- @Override
- protected Iterator<Triple> getTriplesIterator(Tokenizer tokenizer, ParserProfile maker) {
- return RiotParsers.createParserNTriples(tokenizer, null, maker);
- }
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/ntriples/WholeFileNTriplesReader.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/ntriples/WholeFileNTriplesReader.java
deleted file mode 100644
index c6d1be3..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/ntriples/WholeFileNTriplesReader.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.readers.ntriples;
-
-import org.apache.jena.hadoop.rdf.io.input.readers.AbstractWholeFileTripleReader;
-import org.apache.jena.riot.Lang;
-
-/**
- * A record for NTriples
- * <p>
- * Unlike the {@link NTriplesReader} this processes files as a whole rather than
- * individual lines. This has the advantage of less parser setup overhead but
- * the disadvantage that the input cannot be split between multiple mappers.
- * </p>
- *
- *
- *
- */
-public class WholeFileNTriplesReader extends AbstractWholeFileTripleReader {
-
- @Override
- protected Lang getRdfLanguage() {
- return Lang.NTRIPLES;
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/rdfjson/RdfJsonReader.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/rdfjson/RdfJsonReader.java
deleted file mode 100644
index 6cab094..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/rdfjson/RdfJsonReader.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.readers.rdfjson;
-
-import org.apache.jena.hadoop.rdf.io.input.readers.AbstractWholeFileTripleReader;
-import org.apache.jena.riot.Lang;
-
-/**
- * A record reader for RDF/JSON files
- *
- *
- *
- */
-public class RdfJsonReader extends AbstractWholeFileTripleReader {
-
- @Override
- protected Lang getRdfLanguage() {
- return Lang.RDFJSON;
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/rdfxml/RdfXmlReader.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/rdfxml/RdfXmlReader.java
deleted file mode 100644
index b5e943f..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/rdfxml/RdfXmlReader.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.readers.rdfxml;
-
-import org.apache.jena.hadoop.rdf.io.input.readers.AbstractWholeFileTripleReader;
-import org.apache.jena.riot.Lang;
-
-/**
- * A record reader for RDF/XML files
- *
- *
- *
- */
-public class RdfXmlReader extends AbstractWholeFileTripleReader {
-
- @Override
- protected Lang getRdfLanguage() {
- return Lang.RDFXML;
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/thrift/ThriftQuadReader.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/thrift/ThriftQuadReader.java
deleted file mode 100644
index 084b1ec..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/thrift/ThriftQuadReader.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.readers.thrift;
-
-import org.apache.jena.hadoop.rdf.io.input.readers.AbstractWholeFileQuadReader;
-import org.apache.jena.riot.Lang;
-import org.apache.jena.riot.RDFLanguages;
-
-public class ThriftQuadReader extends AbstractWholeFileQuadReader {
-
- @Override
- protected Lang getRdfLanguage() {
- return RDFLanguages.THRIFT;
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/thrift/ThriftTripleReader.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/thrift/ThriftTripleReader.java
deleted file mode 100644
index 713bfa7..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/thrift/ThriftTripleReader.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.readers.thrift;
-
-import org.apache.jena.hadoop.rdf.io.input.readers.AbstractWholeFileTripleReader;
-import org.apache.jena.riot.Lang;
-import org.apache.jena.riot.RDFLanguages;
-
-public class ThriftTripleReader extends AbstractWholeFileTripleReader {
- @Override
- protected Lang getRdfLanguage() {
- return RDFLanguages.THRIFT;
- }
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/trig/TriGReader.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/trig/TriGReader.java
deleted file mode 100644
index 237c9c1..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/trig/TriGReader.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.readers.trig;
-
-import org.apache.jena.hadoop.rdf.io.input.readers.AbstractWholeFileQuadReader;
-import org.apache.jena.riot.Lang;
-
-/**
- * A record reader for TriG files
- *
- *
- *
- */
-public class TriGReader extends AbstractWholeFileQuadReader {
-
- @Override
- protected Lang getRdfLanguage() {
- return Lang.TRIG;
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/trix/TriXReader.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/trix/TriXReader.java
deleted file mode 100644
index 5087370..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/trix/TriXReader.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.readers.trix;
-
-import org.apache.jena.hadoop.rdf.io.input.readers.AbstractWholeFileQuadReader;
-import org.apache.jena.riot.Lang;
-
-/**
- * A record reader for TriX files
- *
- *
- *
- */
-public class TriXReader extends AbstractWholeFileQuadReader {
-
- @Override
- protected Lang getRdfLanguage() {
- return Lang.TRIX;
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/turtle/TurtleReader.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/turtle/TurtleReader.java
deleted file mode 100644
index b0417f6..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/readers/turtle/TurtleReader.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.readers.turtle;
-
-import org.apache.jena.hadoop.rdf.io.input.readers.AbstractWholeFileTripleReader;
-import org.apache.jena.riot.Lang;
-
-/**
- * A record reader for Turtle files
- *
- *
- *
- */
-public class TurtleReader extends AbstractWholeFileTripleReader {
-
- @Override
- protected Lang getRdfLanguage() {
- return Lang.TURTLE;
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/thrift/ThriftQuadInputFormat.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/thrift/ThriftQuadInputFormat.java
deleted file mode 100644
index 59e6d70..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/thrift/ThriftQuadInputFormat.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.thrift;
-
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.RecordReader;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.jena.hadoop.rdf.io.input.AbstractWholeFileInputFormat;
-import org.apache.jena.hadoop.rdf.io.input.readers.thrift.ThriftQuadReader;
-import org.apache.jena.hadoop.rdf.types.QuadWritable;
-
-public class ThriftQuadInputFormat extends AbstractWholeFileInputFormat<LongWritable, QuadWritable> {
-
- @Override
- public RecordReader<LongWritable, QuadWritable> createRecordReader(InputSplit split, TaskAttemptContext context) {
- return new ThriftQuadReader();
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/thrift/ThriftTripleInputFormat.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/thrift/ThriftTripleInputFormat.java
deleted file mode 100644
index e135441..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/thrift/ThriftTripleInputFormat.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.thrift;
-
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.RecordReader;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.jena.hadoop.rdf.io.input.AbstractWholeFileInputFormat;
-import org.apache.jena.hadoop.rdf.io.input.readers.thrift.ThriftTripleReader;
-import org.apache.jena.hadoop.rdf.types.TripleWritable;
-
-public class ThriftTripleInputFormat extends AbstractWholeFileInputFormat<LongWritable, TripleWritable> {
-
- @Override
- public RecordReader<LongWritable, TripleWritable> createRecordReader(InputSplit split, TaskAttemptContext context) {
- return new ThriftTripleReader();
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/trig/TriGInputFormat.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/trig/TriGInputFormat.java
deleted file mode 100644
index e4b4c40..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/trig/TriGInputFormat.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.trig;
-
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.RecordReader;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.jena.hadoop.rdf.io.input.AbstractWholeFileInputFormat;
-import org.apache.jena.hadoop.rdf.io.input.readers.trig.TriGReader;
-import org.apache.jena.hadoop.rdf.types.QuadWritable;
-
-
-/**
- * Input format for TriG
- *
- *
- *
- */
-public class TriGInputFormat extends AbstractWholeFileInputFormat<LongWritable, QuadWritable> {
-
- @Override
- public RecordReader<LongWritable, QuadWritable> createRecordReader(InputSplit split, TaskAttemptContext context) {
- return new TriGReader();
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/trix/TriXInputFormat.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/trix/TriXInputFormat.java
deleted file mode 100644
index 9f11fe9..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/trix/TriXInputFormat.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.trix;
-
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.RecordReader;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.jena.hadoop.rdf.io.input.AbstractWholeFileInputFormat;
-import org.apache.jena.hadoop.rdf.io.input.readers.trix.TriXReader;
-import org.apache.jena.hadoop.rdf.types.QuadWritable;
-
-/**
- * Input format for TriX
- */
-public class TriXInputFormat extends AbstractWholeFileInputFormat<LongWritable, QuadWritable> {
-
- @Override
- public RecordReader<LongWritable, QuadWritable> createRecordReader(InputSplit split, TaskAttemptContext context) {
- return new TriXReader();
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/turtle/TurtleInputFormat.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/turtle/TurtleInputFormat.java
deleted file mode 100644
index 983b21e..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/turtle/TurtleInputFormat.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.turtle;
-
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.RecordReader;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.jena.hadoop.rdf.io.input.AbstractWholeFileInputFormat;
-import org.apache.jena.hadoop.rdf.io.input.readers.turtle.TurtleReader;
-import org.apache.jena.hadoop.rdf.types.TripleWritable;
-
-
-/**
- * Turtle input format
- *
- *
- *
- */
-public class TurtleInputFormat extends AbstractWholeFileInputFormat<LongWritable, TripleWritable> {
-
- @Override
- public RecordReader<LongWritable, TripleWritable> createRecordReader(InputSplit split, TaskAttemptContext context) {
- return new TurtleReader();
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/util/BlockInputStream.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/util/BlockInputStream.java
deleted file mode 100644
index 9dd4ccd..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/util/BlockInputStream.java
+++ /dev/null
@@ -1,94 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.util;
-
-import java.io.IOException;
-import java.io.InputStream;
-
-/**
- * A block input stream which can is a wrapper around another input stream which
- * restricts reading to a specific number of bytes and can report the number of
- * bytes read
- * <p>
- * The class assumes that the underlying input stream has already been seeked to
- * the appropriate start point
- * </p>
- *
- *
- *
- */
-public final class BlockInputStream extends TrackedInputStream {
-
- private long limit = Long.MAX_VALUE;
-
- /**
- * Creates a new tracked input stream
- *
- * @param input
- * Input stream to track
- * @param limit
- * Maximum number of bytes to read from the stream
- */
- public BlockInputStream(InputStream input, long limit) {
- super(input);
- if (limit < 0)
- throw new IllegalArgumentException("limit must be >= 0");
- this.limit = limit;
- }
-
- @Override
- public int read() throws IOException {
- if (this.bytesRead >= this.limit) {
- return -1;
- }
- return super.read();
- }
-
- @Override
- public int available() throws IOException {
- if (this.bytesRead >= this.limit) {
- return 0;
- }
- return super.available();
- }
-
- @Override
- public int read(byte[] b, int off, int len) throws IOException {
- if (len == 0) {
- return 0;
- } else if (this.bytesRead >= this.limit) {
- return -1;
- } else if (len > this.limit - this.bytesRead) {
- len = (int) (this.limit - this.bytesRead);
- }
- return super.read(b, off, len);
- }
-
- @Override
- public long skip(long n) throws IOException {
- if (n == 0) {
- return 0;
- } else if (this.bytesRead >= this.limit) {
- return -1;
- } else if (n > this.limit - this.bytesRead) {
- n = this.limit - this.bytesRead;
- }
- return super.skip(n);
- }
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/util/RdfIOUtils.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/util/RdfIOUtils.java
deleted file mode 100644
index ff34119..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/util/RdfIOUtils.java
+++ /dev/null
@@ -1,114 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.util;
-
-import java.util.UUID;
-
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapreduce.JobContext;
-import org.apache.jena.hadoop.rdf.io.RdfIOConstants;
-import org.apache.jena.riot.RDFParser ;
-import org.apache.jena.riot.RDFParserBuilder ;
-import org.apache.jena.riot.lang.LabelToNode;
-import org.apache.jena.riot.system.* ;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * RDF IO utility functions
- *
- *
- *
- */
-public class RdfIOUtils {
- private static final Logger LOGGER = LoggerFactory.getLogger(RdfIOUtils.class);
-
- /**
- * Private constructor prevents instantiation
- */
- private RdfIOUtils() {
- }
-
- /**
- * Creates a parser profile for the given job context
- *
- * @param context
- * Context
- * @param path
- * File path
- * @return Parser profile
- * @deprecated Legacy - use {@link #createRDFParserBuilder}.
- */
- @Deprecated
- public static ParserProfile createParserProfile(JobContext context, Path path) {
- LabelToNode labelMapping = createLabelToNode(context, path);
- ParserProfile profile = RiotLib.createParserProfile(RiotLib.factoryRDF(labelMapping), ErrorHandlerFactory.errorHandlerStd,
- IRIResolver.createNoResolve(), false);
- return profile;
- }
-
- public static RDFParserBuilder createRDFParserBuilder(JobContext context, Path path) {
- LabelToNode labelMapping = createLabelToNode(context, path);
- RDFParserBuilder builder = RDFParser.create()
- .labelToNode(labelMapping)
- .errorHandler(ErrorHandlerFactory.errorHandlerStd) ;
- return builder ;
- }
-
- public static LabelToNode createLabelToNode(JobContext context, Path path) {
- UUID seed = RdfIOUtils.getSeed(context, path);
- LabelToNode labelMapping = LabelToNode.createScopeByDocumentHash(seed);
- return labelMapping;
- }
- /**
- * Selects a seed for use in generating blank node identifiers
- *
- * @param context
- * Job Context
- * @param path
- * File path
- * @return Seed
- */
- public static UUID getSeed(JobContext context, Path path) {
- // This is to ensure that blank node allocation policy is constant when
- // subsequent MapReduce jobs need that
- String jobId = context.getJobID().toString();
- if (jobId == null) {
- jobId = String.valueOf(System.currentTimeMillis());
- LOGGER.warn(
- "Job ID was not set, using current milliseconds of {}. Sequence of MapReduce jobs must carefully handle blank nodes.",
- jobId);
- }
-
- if (!context.getConfiguration().getBoolean(RdfIOConstants.GLOBAL_BNODE_IDENTITY, false)) {
- // Using normal file scoped blank node allocation
- LOGGER.debug("Generating Blank Node Seed from Job Details (ID={}, Input Path={})", jobId, path);
-
- // Form a reproducible seed for the run
- return new UUID(jobId.hashCode(), path.hashCode());
- } else {
- // Using globally scoped blank node allocation
- LOGGER.warn(
- "Using globally scoped blank node allocation policy from Job Details (ID={}) - this is unsafe if your RDF inputs did not originate from a previous job",
- jobId);
-
- return new UUID(jobId.hashCode(), 0);
- }
- }
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/util/TrackableInputStream.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/util/TrackableInputStream.java
deleted file mode 100644
index cc0455c..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/util/TrackableInputStream.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.util;
-
-import java.io.InputStream;
-
-/**
- * An input stream that tracks the number of bytes read
- *
- *
- *
- */
-public abstract class TrackableInputStream extends InputStream {
-
- /**
- * Gets the number of bytes read
- *
- * @return Number of bytes read
- */
- public abstract long getBytesRead();
-
-}
\ No newline at end of file
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/util/TrackedInputStream.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/util/TrackedInputStream.java
deleted file mode 100644
index 73b7aa1..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/util/TrackedInputStream.java
+++ /dev/null
@@ -1,124 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.util;
-
-import java.io.IOException;
-import java.io.InputStream;
-
-/**
- * A tracked input stream which can is a wrapper around another input stream and
- * can report the number of bytes read
- *
- *
- *
- */
-public class TrackedInputStream extends TrackableInputStream {
-
- protected InputStream input;
- protected long bytesRead = 0, lastMark;
-
- /**
- * Creates a new tracked input stream
- *
- * @param input
- * Input stream to track
- */
- public TrackedInputStream(InputStream input) {
- if (input == null)
- throw new NullPointerException("Input cannot be null");
- this.input = input;
- }
-
- @Override
- public int read() throws IOException {
- int read = this.input.read();
- if (read >= 0)
- this.bytesRead++;
- return read;
- }
-
- @Override
- public long getBytesRead() {
- return this.bytesRead;
- }
-
- @Override
- public void close() throws IOException {
- this.input.close();
- }
-
- @Override
- public int available() throws IOException {
- return this.input.available();
- }
-
- @Override
- public synchronized void mark(int readlimit) {
- this.input.mark(readlimit);
- this.lastMark = this.bytesRead;
- }
-
- @Override
- public boolean markSupported() {
- return this.input.markSupported();
- }
-
- @Override
- public int read(byte[] b, int off, int len) throws IOException {
- if (len == 0) return 0;
- int read = this.input.read(b, off, len);
- if (read > 0)
- this.bytesRead += read;
- return read;
- }
-
- @Override
- public int read(byte[] b) throws IOException {
- return this.read(b, 0, b.length);
- }
-
- @Override
- public synchronized void reset() throws IOException {
- this.input.reset();
- this.bytesRead = this.lastMark;
- }
-
- @Override
- public long skip(long n) throws IOException {
- if (n == 0)
- return 0;
- long skipped = 0;
- byte[] buffer = new byte[16];
- int readSize = Math.min(buffer.length, n > Integer.MAX_VALUE ? Integer.MAX_VALUE : (int) n);
- int read;
- do {
- if (n - skipped > readSize) {
- read = this.input.read(buffer, 0, readSize);
- } else {
- read = this.input.read(buffer, 0, (int) (n - skipped));
- }
- if (read > 0) {
- this.bytesRead += read;
- skipped += read;
- }
- } while (skipped < n && read >= 0);
-
- return skipped;
- }
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/util/TrackedPipedQuadsStream.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/util/TrackedPipedQuadsStream.java
deleted file mode 100644
index e78d884..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/util/TrackedPipedQuadsStream.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.util;
-
-import org.apache.jena.graph.Triple ;
-import org.apache.jena.riot.lang.PipedRDFIterator;
-import org.apache.jena.sparql.core.Quad ;
-
-/**
- * A tracked piped quads stream
- *
- *
- *
- */
-public class TrackedPipedQuadsStream extends TrackedPipedRDFStream<Quad> {
-
- /**
- * Creates a new stream
- *
- * @param sink
- * Sink
- * @param input
- * Input stream
- */
- public TrackedPipedQuadsStream(PipedRDFIterator<Quad> sink, TrackableInputStream input) {
- super(sink, input);
- }
-
- @Override
- public void triple(Triple triple) {
- // Triples are discarded
- }
-
- @Override
- public void quad(Quad quad) {
- this.receive(quad);
- }
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/util/TrackedPipedRDFStream.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/util/TrackedPipedRDFStream.java
deleted file mode 100644
index 4af12e6..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/util/TrackedPipedRDFStream.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.util;
-
-import java.util.LinkedList;
-import java.util.Queue;
-
-import org.apache.jena.riot.lang.PipedRDFIterator;
-import org.apache.jena.riot.lang.PipedRDFStream;
-
-/**
- * A tracked piped RDF stream
- *
- *
- *
- * @param <T>
- * Type corresponding to a supported RDF primitive
- */
-public abstract class TrackedPipedRDFStream<T> extends PipedRDFStream<T> {
-
- private TrackableInputStream input;
- private Queue<Long> positions = new LinkedList<Long>();
-
- protected TrackedPipedRDFStream(PipedRDFIterator<T> sink, TrackableInputStream input) {
- super(sink);
- this.input = input;
- }
-
- @Override
- protected void receive(T t) {
- // Track positions the input stream is at as we receive inputs
- synchronized (this.positions) {
- this.positions.add(this.input.getBytesRead());
- }
- super.receive(t);
- }
-
- /**
- * Gets the next position
- *
- * @return Position
- */
- public Long getPosition() {
- synchronized (this.positions) {
- return this.positions.poll();
- }
- }
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/util/TrackedPipedTriplesStream.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/util/TrackedPipedTriplesStream.java
deleted file mode 100644
index d37cee1..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/util/TrackedPipedTriplesStream.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.input.util;
-
-import org.apache.jena.graph.Triple ;
-import org.apache.jena.riot.lang.PipedRDFIterator;
-import org.apache.jena.sparql.core.Quad ;
-
-/**
- * A tracked piped triples stream
- *
- *
- *
- */
-public class TrackedPipedTriplesStream extends TrackedPipedRDFStream<Triple> {
-
- /**
- * Creates a tracked triples stream
- *
- * @param sink
- * Sink
- * @param input
- * Input stream
- */
- public TrackedPipedTriplesStream(PipedRDFIterator<Triple> sink, TrackableInputStream input) {
- super(sink, input);
- }
-
- @Override
- public void triple(Triple triple) {
- receive(triple);
- }
-
- @Override
- public void quad(Quad quad) {
- // Quads are discarded
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/AbstractBatchedNodeTupleOutputFormat.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/AbstractBatchedNodeTupleOutputFormat.java
deleted file mode 100644
index 175d9ff..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/AbstractBatchedNodeTupleOutputFormat.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.output;
-
-import java.io.Writer;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapreduce.RecordWriter;
-import org.apache.jena.hadoop.rdf.io.RdfIOConstants;
-import org.apache.jena.hadoop.rdf.io.output.writers.AbstractBatchedNodeTupleWriter;
-import org.apache.jena.hadoop.rdf.types.AbstractNodeTupleWritable;
-
-
-/**
- * Abstract output format for formats that use a
- * {@link AbstractBatchedNodeTupleWriter} as their writer
- *
- *
- *
- * @param <TKey>
- * Key type
- * @param <TTuple>
- * Tuple type
- * @param <TValue>
- * Writable tuple type i.e. the value type
- */
-public abstract class AbstractBatchedNodeTupleOutputFormat<TKey, TTuple, TValue extends AbstractNodeTupleWritable<TTuple>> extends
- AbstractNodeTupleOutputFormat<TKey, TTuple, TValue> {
-
- @Override
- protected RecordWriter<TKey, TValue> getRecordWriter(Writer writer, Configuration config, Path outputPath) {
- long batchSize = config.getLong(RdfIOConstants.OUTPUT_BATCH_SIZE, RdfIOConstants.DEFAULT_OUTPUT_BATCH_SIZE);
- return this.getRecordWriter(writer, batchSize);
- }
-
- protected abstract RecordWriter<TKey, TValue> getRecordWriter(Writer writer, long batchSize);
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/AbstractNodeOutputFormat.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/AbstractNodeOutputFormat.java
deleted file mode 100644
index 64b0d1b..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/AbstractNodeOutputFormat.java
+++ /dev/null
@@ -1,94 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.output;
-
-import java.io.IOException;
-import java.io.OutputStreamWriter;
-import java.io.Writer;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.compress.CompressionCodec;
-import org.apache.hadoop.io.compress.GzipCodec;
-import org.apache.hadoop.mapreduce.RecordWriter;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
-import org.apache.hadoop.util.ReflectionUtils;
-import org.apache.jena.hadoop.rdf.types.NodeWritable;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-
-/**
- * Abstract output format which takes pairs with Node keys and arbitrary values
- * and writes them as a simple line based text file
- *
- *
- *
- * @param <TValue> Value type
- */
-public abstract class AbstractNodeOutputFormat<TValue> extends FileOutputFormat<NodeWritable, TValue> {
-
- private static final Logger LOG = LoggerFactory.getLogger(AbstractNodeOutputFormat.class);
-
- @Override
- public RecordWriter<NodeWritable, TValue> getRecordWriter(TaskAttemptContext context) throws IOException {
- Configuration config = context.getConfiguration();
- boolean isCompressed = getCompressOutput(context);
- CompressionCodec codec = null;
- String extension = this.getFileExtension();
- if (isCompressed) {
- Class<? extends CompressionCodec> codecClass = getOutputCompressorClass(context, GzipCodec.class);
- codec = ReflectionUtils.newInstance(codecClass, config);
- extension += codec.getDefaultExtension();
- }
- Path file = getDefaultWorkFile(context, extension);
- LOG.info("Writing output to file " + file);
- FileSystem fs = file.getFileSystem(config);
- if (!isCompressed) {
- FSDataOutputStream fileOut = fs.create(file, false);
- return this.getRecordWriter(new OutputStreamWriter(fileOut), config);
- } else {
- FSDataOutputStream fileOut = fs.create(file, false);
- return this.getRecordWriter(new OutputStreamWriter(codec.createOutputStream(fileOut)), config);
- }
- }
-
- /**
- * Gets the file extension to use for output
- *
- * @return File extension including the '.'
- */
- protected String getFileExtension() {
- return ".nodes";
- }
-
- /**
- * Gets the record writer to use
- *
- * @param writer
- * Writer to write output to
- * @param config
- * Configuration
- * @return Record writer
- */
- protected abstract RecordWriter<NodeWritable, TValue> getRecordWriter(Writer writer, Configuration config);
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/AbstractNodeTupleOutputFormat.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/AbstractNodeTupleOutputFormat.java
deleted file mode 100644
index b98d652..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/AbstractNodeTupleOutputFormat.java
+++ /dev/null
@@ -1,109 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.output;
-
-import java.io.IOException;
-import java.io.OutputStreamWriter;
-import java.io.Writer;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.compress.CompressionCodec;
-import org.apache.hadoop.io.compress.GzipCodec;
-import org.apache.hadoop.mapreduce.RecordWriter;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
-import org.apache.hadoop.util.ReflectionUtils;
-import org.apache.jena.hadoop.rdf.types.AbstractNodeTupleWritable;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * An abstract implementation of an output format for line based tuple formats
- * where the key is ignored and only the tuple values will be output
- *
- *
- * @param <TKey>
- * Key type
- * @param <TValue>
- * Tuple value type
- * @param <T>
- * Writable node tuple type
- *
- */
-public abstract class AbstractNodeTupleOutputFormat<TKey, TValue, T extends AbstractNodeTupleWritable<TValue>> extends
- FileOutputFormat<TKey, T> {
-
- private static final Logger LOG = LoggerFactory.getLogger(AbstractNodeTupleOutputFormat.class);
-
- @Override
- public RecordWriter<TKey, T> getRecordWriter(TaskAttemptContext context) throws IOException {
- Configuration config = context.getConfiguration();
- boolean isCompressed = getCompressOutput(context);
- CompressionCodec codec = null;
-
- // Build the output file path
- String extension = this.getFileExtension();
- if (isCompressed) {
- // Add compression extension if applicable
- Class<? extends CompressionCodec> codecClass = getOutputCompressorClass(context, GzipCodec.class);
- codec = ReflectionUtils.newInstance(codecClass, config);
- extension += codec.getDefaultExtension();
- }
- Path file = getDefaultWorkFile(context, extension);
- LOG.info("Writing output to file " + file);
-
- // Open the file appropriately and create a record writer for it
- FileSystem fs = file.getFileSystem(config);
- if (!isCompressed) {
- FSDataOutputStream fileOut = fs.create(file, false);
- return this.getRecordWriter(new OutputStreamWriter(fileOut), config, file);
- } else {
- FSDataOutputStream fileOut = fs.create(file, false);
- return this.getRecordWriter(new OutputStreamWriter(codec.createOutputStream(fileOut)), config, file);
- }
- }
-
- /**
- * Gets the file extension to use for output
- *
- * @return File extension including the '.'
- */
- protected abstract String getFileExtension();
-
- /**
- * Gets the record writer to use
- *
- * @param writer
- * Writer to write output to
- * @param config
- * Configuration
- * @param outputPath
- * Output path being written to
- * @return Record writer
- * @throws IOException
- * May be thrown if a record writer cannot be obtained for any
- * reason
- */
- protected abstract RecordWriter<TKey, T> getRecordWriter(Writer writer, Configuration config, Path outputPath)
- throws IOException;
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/AbstractStreamRdfNodeTupleOutputFormat.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/AbstractStreamRdfNodeTupleOutputFormat.java
deleted file mode 100644
index 30999ae..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/AbstractStreamRdfNodeTupleOutputFormat.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.output;
-
-import java.io.Writer;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapreduce.RecordWriter;
-import org.apache.jena.hadoop.rdf.types.AbstractNodeTupleWritable;
-import org.apache.jena.riot.system.StreamRDF;
-
-/**
- * Abstract output format for formats that use the RIOT {@link StreamRDF} API to
- * stream the writes
- *
- * @param <TKey>
- * Key type
- * @param <TTuple>
- * Tuple type
- * @param <TValue>
- * Writable tuple type i.e. the value type
- */
-public abstract class AbstractStreamRdfNodeTupleOutputFormat<TKey, TTuple, TValue extends AbstractNodeTupleWritable<TTuple>>
- extends AbstractNodeTupleOutputFormat<TKey, TTuple, TValue> {
-
- @Override
- protected RecordWriter<TKey, TValue> getRecordWriter(Writer writer, Configuration config, Path outputPath) {
- return getRecordWriter(getStream(writer, config), writer, config);
- }
-
- /**
- * Gets a writer which provides a bridge between the {@link RecordWriter}
- * and {@link StreamRDF} APIs
- *
- * @param stream
- * RDF Stream
- * @param writer
- * Writer
- * @param config
- * Configuration
- * @return Record Writer
- */
- protected abstract RecordWriter<TKey, TValue> getRecordWriter(StreamRDF stream, Writer writer, Configuration config);
-
- /**
- * Gets a {@link StreamRDF} to which the tuples to be output should be
- * passed
- *
- * @param writer
- * Writer
- * @param config
- * Configuration
- * @return RDF Stream
- */
- protected abstract StreamRDF getStream(Writer writer, Configuration config);
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/QuadsOutputFormat.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/QuadsOutputFormat.java
deleted file mode 100644
index f85b85f..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/QuadsOutputFormat.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jena.hadoop.rdf.io.output;
-
-import java.io.IOException;
-import java.io.Writer;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapreduce.RecordWriter;
-import org.apache.jena.hadoop.rdf.io.registry.HadoopRdfIORegistry;
-import org.apache.jena.hadoop.rdf.types.QuadWritable;
-import org.apache.jena.riot.Lang;
-import org.apache.jena.riot.RDFLanguages;
-import org.apache.jena.sparql.core.Quad ;
-
-/**
- * An output format for RDF quads that dynamically selects the appropriate quad
- * writer to use based on the file extension of the output file.
- * <p>
- * For example this is useful when the output format may be controlled by a user
- * supplied filename i.e. the desired RDF output format is not precisely known
- * in advance
- * </p>
- *
- * @param <TKey>
- * Key type
- */
-public abstract class QuadsOutputFormat<TKey> extends AbstractNodeTupleOutputFormat<TKey, Quad, QuadWritable> {
-
- @Override
- protected RecordWriter<TKey, QuadWritable> getRecordWriter(Writer writer, Configuration config, Path outputPath)
- throws IOException {
- Lang lang = RDFLanguages.filenameToLang(outputPath.getName());
- if (lang == null)
- throw new IOException("There is no registered RDF language for the output file " + outputPath.toString());
-
- if (!RDFLanguages.isQuads(lang))
- throw new IOException(
- lang.getName()
- + " is not a RDF quads format, perhaps you wanted TriplesOutputFormat or TriplesOrQuadsOutputFormat instead?");
-
- // This will throw an appropriate error if the language does not support
- // writing quads
- return HadoopRdfIORegistry.<TKey> createQuadWriter(lang, writer, config);
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/TriplesOrQuadsOutputFormat.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/TriplesOrQuadsOutputFormat.java
deleted file mode 100644
index 82f553e..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/TriplesOrQuadsOutputFormat.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jena.hadoop.rdf.io.output;
-
-import java.io.IOException;
-import java.io.Writer;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapreduce.RecordWriter;
-import org.apache.jena.hadoop.rdf.io.output.writers.QuadsToTriplesWriter;
-import org.apache.jena.hadoop.rdf.io.registry.HadoopRdfIORegistry;
-import org.apache.jena.hadoop.rdf.types.QuadWritable;
-import org.apache.jena.riot.Lang;
-import org.apache.jena.riot.RDFLanguages;
-import org.apache.jena.sparql.core.Quad ;
-
-/**
- * An output format for RDF triples/quads that dynamically selects the
- * appropriate triple/quad writer to use based on the file extension of the
- * output file.
- * <p>
- * For example this is useful when the output format may be controlled by a user
- * supplied filename i.e. the desired RDF output format is not precisely known
- * in advance.
- * </p>
- * <h3>Warning</h3>
- * <p>
- * Where the format is determined to be triples the quads are converted into
- * triples are thus will lose any graph information that might be carried.
- * </p>
- *
- * @param <TKey>
- * Key type
- */
-public abstract class TriplesOrQuadsOutputFormat<TKey> extends AbstractNodeTupleOutputFormat<TKey, Quad, QuadWritable> {
-
- @Override
- protected RecordWriter<TKey, QuadWritable> getRecordWriter(Writer writer, Configuration config, Path outputPath)
- throws IOException {
- Lang lang = RDFLanguages.filenameToLang(outputPath.getName());
- if (lang == null)
- throw new IOException("There is no registered RDF language for the output file " + outputPath.toString());
-
- if (!RDFLanguages.isQuads(lang) && !RDFLanguages.isTriples(lang))
- throw new IOException(lang.getName() + " is not a RDF triples/quads format");
-
- if (HadoopRdfIORegistry.hasQuadWriter(lang)) {
- // Supports quads directly
- return HadoopRdfIORegistry.<TKey> createQuadWriter(lang, writer, config);
- } else {
- // Try to create a triples writer and wrap downwards from quads
- // This will throw an error if a triple writer is not available
- return new QuadsToTriplesWriter<TKey>(HadoopRdfIORegistry.<TKey> createTripleWriter(lang, writer, config));
- }
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/TriplesOutputFormat.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/TriplesOutputFormat.java
deleted file mode 100644
index 7034064..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/TriplesOutputFormat.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jena.hadoop.rdf.io.output;
-
-import java.io.IOException;
-import java.io.Writer;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapreduce.RecordWriter;
-import org.apache.jena.graph.Triple ;
-import org.apache.jena.hadoop.rdf.io.registry.HadoopRdfIORegistry;
-import org.apache.jena.hadoop.rdf.types.TripleWritable;
-import org.apache.jena.riot.Lang;
-import org.apache.jena.riot.RDFLanguages;
-
-/**
- * An output format for RDF triples that dynamically selects the appropriate triple
- * writer to use based on the file extension of the output file.
- * <p>
- * For example this is useful when the output format may be controlled by a user
- * supplied filename i.e. the desired RDF output format is not precisely known
- * in advance
- * </p>
- *
- * @param <TKey>
- * Key type
- */
-public abstract class TriplesOutputFormat<TKey> extends AbstractNodeTupleOutputFormat<TKey, Triple, TripleWritable> {
-
- @Override
- protected RecordWriter<TKey, TripleWritable> getRecordWriter(Writer writer, Configuration config, Path outputPath) throws IOException {
- Lang lang = RDFLanguages.filenameToLang(outputPath.getName());
- if (lang == null)
- throw new IOException("There is no registered RDF language for the output file " + outputPath.toString());
-
- if (!RDFLanguages.isTriples(lang)) throw new IOException(
- lang.getName()
- + " is not a RDF triples format, perhaps you wanted QuadsOutputFormat or TriplesOrQuadsOutputFormat instead?");
-
- // This will throw an appropriate error if the language does not support writing triples
- return HadoopRdfIORegistry.<TKey>createTripleWriter(lang, writer, config);
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/jsonld/JsonLDQuadOutputFormat.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/jsonld/JsonLDQuadOutputFormat.java
deleted file mode 100644
index fcec298..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/jsonld/JsonLDQuadOutputFormat.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.output.jsonld;
-
-import java.io.Writer;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapreduce.RecordWriter;
-import org.apache.jena.hadoop.rdf.io.output.AbstractNodeTupleOutputFormat;
-import org.apache.jena.hadoop.rdf.io.output.writers.jsonld.JsonLDQuadWriter;
-import org.apache.jena.hadoop.rdf.types.QuadWritable;
-import org.apache.jena.sparql.core.Quad ;
-
-public class JsonLDQuadOutputFormat<TKey> extends AbstractNodeTupleOutputFormat<TKey, Quad, QuadWritable> {
-
- @Override
- protected String getFileExtension() {
- return ".jsonld";
- }
-
- @Override
- protected RecordWriter<TKey, QuadWritable> getRecordWriter(Writer writer, Configuration config, Path outputPath) {
- return new JsonLDQuadWriter<TKey>(writer);
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/jsonld/JsonLDTripleOutputFormat.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/jsonld/JsonLDTripleOutputFormat.java
deleted file mode 100644
index 131cea9..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/jsonld/JsonLDTripleOutputFormat.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.output.jsonld;
-
-import java.io.Writer;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapreduce.RecordWriter;
-import org.apache.jena.graph.Triple ;
-import org.apache.jena.hadoop.rdf.io.output.AbstractNodeTupleOutputFormat;
-import org.apache.jena.hadoop.rdf.io.output.writers.jsonld.JsonLDTripleWriter;
-import org.apache.jena.hadoop.rdf.types.TripleWritable;
-
-public class JsonLDTripleOutputFormat<TKey> extends AbstractNodeTupleOutputFormat<TKey, Triple, TripleWritable> {
-
- @Override
- protected String getFileExtension() {
- return ".jsonld";
- }
-
- @Override
- protected RecordWriter<TKey, TripleWritable> getRecordWriter(Writer writer, Configuration config, Path outputPath) {
- return new JsonLDTripleWriter<TKey>(writer);
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/nquads/NQuadsOutputFormat.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/nquads/NQuadsOutputFormat.java
deleted file mode 100644
index 5d2c10b..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/nquads/NQuadsOutputFormat.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.output.nquads;
-
-import java.io.Writer;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapreduce.RecordWriter;
-import org.apache.jena.hadoop.rdf.io.output.AbstractNodeTupleOutputFormat;
-import org.apache.jena.hadoop.rdf.io.output.writers.nquads.NQuadsWriter;
-import org.apache.jena.hadoop.rdf.types.QuadWritable;
-import org.apache.jena.sparql.core.Quad ;
-
-/**
- * NQuads output format
- *
- *
- *
- * @param <TKey>
- * Key type
- */
-public class NQuadsOutputFormat<TKey> extends AbstractNodeTupleOutputFormat<TKey, Quad, QuadWritable> {
-
- @Override
- protected RecordWriter<TKey, QuadWritable> getRecordWriter(Writer writer, Configuration config, Path outputPath) {
- return new NQuadsWriter<TKey>(writer);
- }
-
- @Override
- protected String getFileExtension() {
- return ".nq";
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/ntriples/NTriplesNodeOutputFormat.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/ntriples/NTriplesNodeOutputFormat.java
deleted file mode 100644
index a33b32e..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/ntriples/NTriplesNodeOutputFormat.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.output.ntriples;
-
-import java.io.Writer;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.mapreduce.RecordWriter;
-import org.apache.jena.hadoop.rdf.io.output.AbstractNodeOutputFormat;
-import org.apache.jena.hadoop.rdf.io.output.writers.ntriples.NTriplesNodeWriter;
-import org.apache.jena.hadoop.rdf.types.NodeWritable;
-
-
-/**
- * NTriples based node output format
- *
- *
- *
- * @param <TValue>
- * Value type
- */
-public class NTriplesNodeOutputFormat<TValue> extends AbstractNodeOutputFormat<TValue> {
-
- @Override
- protected RecordWriter<NodeWritable, TValue> getRecordWriter(Writer writer, Configuration config) {
- return new NTriplesNodeWriter<TValue>(writer);
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/ntriples/NTriplesOutputFormat.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/ntriples/NTriplesOutputFormat.java
deleted file mode 100644
index 65e1665..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/ntriples/NTriplesOutputFormat.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.output.ntriples;
-
-import java.io.Writer;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapreduce.RecordWriter;
-import org.apache.jena.graph.Triple ;
-import org.apache.jena.hadoop.rdf.io.output.AbstractNodeTupleOutputFormat;
-import org.apache.jena.hadoop.rdf.io.output.writers.ntriples.NTriplesWriter;
-import org.apache.jena.hadoop.rdf.types.TripleWritable;
-
-/**
- * NTriples output format
- *
- *
- * @param <TKey>
- *
- */
-public class NTriplesOutputFormat<TKey> extends AbstractNodeTupleOutputFormat<TKey, Triple, TripleWritable> {
-
- @Override
- protected RecordWriter<TKey, TripleWritable> getRecordWriter(Writer writer, Configuration config, Path outputPath) {
- return new NTriplesWriter<TKey>(writer);
- }
-
- @Override
- protected String getFileExtension() {
- return ".nt";
- }
-
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/rdfjson/RdfJsonOutputFormat.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/rdfjson/RdfJsonOutputFormat.java
deleted file mode 100644
index 13e996b..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/rdfjson/RdfJsonOutputFormat.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.output.rdfjson;
-
-import java.io.Writer;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapreduce.RecordWriter;
-import org.apache.jena.graph.Triple ;
-import org.apache.jena.hadoop.rdf.io.output.AbstractNodeTupleOutputFormat;
-import org.apache.jena.hadoop.rdf.io.output.writers.rdfjson.RdfJsonWriter;
-import org.apache.jena.hadoop.rdf.types.TripleWritable;
-
-/**
- * RDF/JSON output format
- *
- *
- *
- * @param <TKey>
- * Key type
- */
-public class RdfJsonOutputFormat<TKey> extends AbstractNodeTupleOutputFormat<TKey, Triple, TripleWritable> {
-
- @Override
- protected String getFileExtension() {
- return ".rj";
- }
-
- @Override
- protected RecordWriter<TKey, TripleWritable> getRecordWriter(Writer writer, Configuration config, Path outputPath) {
- return new RdfJsonWriter<TKey>(writer);
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/rdfxml/RdfXmlOutputFormat.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/rdfxml/RdfXmlOutputFormat.java
deleted file mode 100644
index 8955e4c..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/rdfxml/RdfXmlOutputFormat.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.output.rdfxml;
-
-import java.io.Writer;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapreduce.RecordWriter;
-import org.apache.jena.graph.Triple ;
-import org.apache.jena.hadoop.rdf.io.output.AbstractNodeTupleOutputFormat;
-import org.apache.jena.hadoop.rdf.io.output.writers.rdfxml.RdfXmlWriter;
-import org.apache.jena.hadoop.rdf.types.TripleWritable;
-
-/**
- * RDF/XML output format
- *
- *
- *
- * @param <TKey>
- * Key type
- */
-public class RdfXmlOutputFormat<TKey> extends AbstractNodeTupleOutputFormat<TKey, Triple, TripleWritable> {
-
- @Override
- protected String getFileExtension() {
- return ".rdf";
- }
-
- @Override
- protected RecordWriter<TKey, TripleWritable> getRecordWriter(Writer writer, Configuration config, Path outputPath) {
- return new RdfXmlWriter<TKey>(writer);
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/thrift/ThriftQuadOutputFormat.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/thrift/ThriftQuadOutputFormat.java
deleted file mode 100644
index b3bb815..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/thrift/ThriftQuadOutputFormat.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.output.thrift;
-
-import java.io.Writer;
-import java.nio.charset.Charset;
-
-import org.apache.commons.io.output.WriterOutputStream;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.mapreduce.RecordWriter;
-import org.apache.jena.hadoop.rdf.io.output.AbstractStreamRdfNodeTupleOutputFormat;
-import org.apache.jena.hadoop.rdf.io.output.writers.StreamRdfQuadWriter;
-import org.apache.jena.hadoop.rdf.types.QuadWritable;
-import org.apache.jena.riot.system.StreamRDF;
-import org.apache.jena.riot.thrift.StreamRDF2Thrift;
-import org.apache.jena.sparql.core.Quad ;
-
-public class ThriftQuadOutputFormat<TKey> extends AbstractStreamRdfNodeTupleOutputFormat<TKey, Quad, QuadWritable> {
-
- @Override
- protected String getFileExtension() {
- return ".trdf";
- }
-
- @Override
- protected RecordWriter<TKey, QuadWritable> getRecordWriter(StreamRDF stream, Writer writer, Configuration config) {
- return new StreamRdfQuadWriter<TKey>(stream, writer);
- }
-
- @Override
- protected StreamRDF getStream(Writer writer, Configuration config) {
- return new StreamRDF2Thrift(new WriterOutputStream(writer, Charset.forName("utf-8")), false);
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/thrift/ThriftTripleOutputFormat.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/thrift/ThriftTripleOutputFormat.java
deleted file mode 100644
index 830a202..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/thrift/ThriftTripleOutputFormat.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.output.thrift;
-
-import java.io.Writer;
-import java.nio.charset.Charset;
-
-import org.apache.commons.io.output.WriterOutputStream;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.mapreduce.RecordWriter;
-import org.apache.jena.graph.Triple ;
-import org.apache.jena.hadoop.rdf.io.output.AbstractStreamRdfNodeTupleOutputFormat;
-import org.apache.jena.hadoop.rdf.io.output.writers.StreamRdfTripleWriter;
-import org.apache.jena.hadoop.rdf.types.TripleWritable;
-import org.apache.jena.riot.system.StreamRDF;
-import org.apache.jena.riot.thrift.StreamRDF2Thrift;
-
-public class ThriftTripleOutputFormat<TKey> extends AbstractStreamRdfNodeTupleOutputFormat<TKey, Triple, TripleWritable> {
-
- @Override
- protected String getFileExtension() {
- return ".trdf";
- }
-
- @Override
- protected RecordWriter<TKey, TripleWritable> getRecordWriter(StreamRDF stream, Writer writer, Configuration config) {
- return new StreamRdfTripleWriter<TKey>(stream, writer);
- }
-
- @Override
- protected StreamRDF getStream(Writer writer, Configuration config) {
- return new StreamRDF2Thrift(new WriterOutputStream(writer, Charset.forName("utf-8")), false);
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/trig/BatchedTriGOutputFormat.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/trig/BatchedTriGOutputFormat.java
deleted file mode 100644
index 48a8694..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/trig/BatchedTriGOutputFormat.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.output.trig;
-
-import java.io.Writer;
-
-import org.apache.hadoop.mapreduce.RecordWriter;
-import org.apache.jena.hadoop.rdf.io.output.AbstractBatchedNodeTupleOutputFormat;
-import org.apache.jena.hadoop.rdf.io.output.writers.trig.BatchedTriGWriter;
-import org.apache.jena.hadoop.rdf.types.QuadWritable;
-import org.apache.jena.sparql.core.Quad ;
-
-/**
- * Output format for TriG that uses a batched approach, note that this will
- * produce invalid data where blank nodes span batches so it is typically better
- * to use the {@link TriGOutputFormat} instead
- *
- *
- *
- * @param <TKey>
- * Key type
- */
-public class BatchedTriGOutputFormat<TKey> extends
- AbstractBatchedNodeTupleOutputFormat<TKey, Quad, QuadWritable> {
-
- @Override
- protected RecordWriter<TKey, QuadWritable> getRecordWriter(Writer writer,
- long batchSize) {
- return new BatchedTriGWriter<TKey>(writer, batchSize);
- }
-
- @Override
- protected String getFileExtension() {
- return ".trig";
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/trig/TriGOutputFormat.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/trig/TriGOutputFormat.java
deleted file mode 100644
index c569538..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/trig/TriGOutputFormat.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.output.trig;
-
-import java.io.Writer;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.mapreduce.RecordWriter;
-import org.apache.jena.hadoop.rdf.io.output.AbstractStreamRdfNodeTupleOutputFormat;
-import org.apache.jena.hadoop.rdf.io.output.writers.StreamRdfQuadWriter;
-import org.apache.jena.hadoop.rdf.types.QuadWritable;
-import org.apache.jena.riot.system.StreamRDF;
-import org.apache.jena.riot.writer.WriterStreamRDFBlocks;
-import org.apache.jena.sparql.core.Quad ;
-
-/**
- * Output format for TriG
- *
- *
- *
- * @param <TKey>
- * Key type
- */
-public class TriGOutputFormat<TKey> extends AbstractStreamRdfNodeTupleOutputFormat<TKey, Quad, QuadWritable> {
-
- @Override
- protected RecordWriter<TKey, QuadWritable> getRecordWriter(StreamRDF stream, Writer writer, Configuration config) {
- return new StreamRdfQuadWriter<TKey>(stream, writer);
- }
-
- @Override
- protected StreamRDF getStream(Writer writer, Configuration config) {
- return new WriterStreamRDFBlocks(writer, null);
- }
-
- @Override
- protected String getFileExtension() {
- return ".trig";
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/trix/TriXOutputFormat.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/trix/TriXOutputFormat.java
deleted file mode 100644
index 54ba260..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/trix/TriXOutputFormat.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.output.trix;
-
-import java.io.Writer;
-import java.nio.charset.Charset;
-
-import org.apache.commons.io.output.WriterOutputStream;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.mapreduce.RecordWriter;
-import org.apache.jena.hadoop.rdf.io.output.AbstractStreamRdfNodeTupleOutputFormat;
-import org.apache.jena.hadoop.rdf.io.output.writers.StreamRdfQuadWriter;
-import org.apache.jena.hadoop.rdf.types.QuadWritable;
-import org.apache.jena.riot.system.StreamRDF;
-import org.apache.jena.riot.writer.StreamWriterTriX;
-import org.apache.jena.sparql.core.Quad ;
-
-/**
- * Output format for TriX
- *
- * @param <TKey>
- * Key type
- */
-public class TriXOutputFormat<TKey> extends AbstractStreamRdfNodeTupleOutputFormat<TKey, Quad, QuadWritable> {
-
- @Override
- protected RecordWriter<TKey, QuadWritable> getRecordWriter(StreamRDF stream, Writer writer, Configuration config) {
- return new StreamRdfQuadWriter<TKey>(stream, writer);
- }
-
- @Override
- protected StreamRDF getStream(Writer writer, Configuration config) {
- return new StreamWriterTriX(new WriterOutputStream(writer, Charset.forName("utf-8")));
- }
-
- @Override
- protected String getFileExtension() {
- return ".trix";
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/turtle/BatchedTurtleOutputFormat.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/turtle/BatchedTurtleOutputFormat.java
deleted file mode 100644
index 3d528bc..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/turtle/BatchedTurtleOutputFormat.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.output.turtle;
-
-import java.io.Writer;
-
-import org.apache.hadoop.mapreduce.RecordWriter;
-import org.apache.jena.graph.Triple ;
-import org.apache.jena.hadoop.rdf.io.output.AbstractBatchedNodeTupleOutputFormat;
-import org.apache.jena.hadoop.rdf.io.output.writers.turtle.BatchedTurtleWriter;
-import org.apache.jena.hadoop.rdf.types.TripleWritable;
-
-/**
- * Output format for Turtle that uses a batched approach, note that this will
- * produce invalid data where blank nodes span batches so it is typically better
- * to use the {@link TurtleOutputFormat} instead
- *
- * @param <TKey>
- * Key type
- */
-public class BatchedTurtleOutputFormat<TKey> extends AbstractBatchedNodeTupleOutputFormat<TKey, Triple, TripleWritable> {
-
- @Override
- protected RecordWriter<TKey, TripleWritable> getRecordWriter(Writer writer, long batchSize) {
- return new BatchedTurtleWriter<TKey>(writer, batchSize);
- }
-
- @Override
- protected String getFileExtension() {
- return ".ttl";
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/turtle/TurtleOutputFormat.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/turtle/TurtleOutputFormat.java
deleted file mode 100644
index 20258ad..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/turtle/TurtleOutputFormat.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.output.turtle;
-
-import java.io.Writer;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.mapreduce.RecordWriter;
-import org.apache.jena.graph.Triple ;
-import org.apache.jena.hadoop.rdf.io.output.AbstractStreamRdfNodeTupleOutputFormat;
-import org.apache.jena.hadoop.rdf.io.output.writers.StreamRdfTripleWriter;
-import org.apache.jena.hadoop.rdf.types.TripleWritable;
-import org.apache.jena.riot.system.StreamRDF;
-import org.apache.jena.riot.writer.WriterStreamRDFBlocks;
-
-/**
- * Turtle output format
- *
- * @param <TKey>
- * Key type
- */
-public class TurtleOutputFormat<TKey> extends AbstractStreamRdfNodeTupleOutputFormat<TKey, Triple, TripleWritable> {
-
- @Override
- protected String getFileExtension() {
- return ".ttl";
- }
-
- @Override
- protected RecordWriter<TKey, TripleWritable> getRecordWriter(StreamRDF stream, Writer writer, Configuration config) {
- return new StreamRdfTripleWriter<TKey>(stream, writer);
- }
-
- @Override
- protected StreamRDF getStream(Writer writer, Configuration config) {
- return new WriterStreamRDFBlocks(writer, null);
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/AbstractBatchedNodeTupleWriter.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/AbstractBatchedNodeTupleWriter.java
deleted file mode 100644
index ac09463..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/AbstractBatchedNodeTupleWriter.java
+++ /dev/null
@@ -1,113 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.output.writers;
-
-import java.io.IOException;
-import java.io.Writer;
-
-import org.apache.hadoop.mapreduce.RecordWriter;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.jena.hadoop.rdf.types.AbstractNodeTupleWritable;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-
-/**
- * An abstract implementation of a record writer that writes records in batches.
- * <p>
- * It is important to note that the writer will write output periodically once
- * sufficient tuples have been gathered. If there is an incomplete batch when
- * the {@link #close(TaskAttemptContext)} method is called then the final batch
- * will be written then. Writing in batches increases the chances that the
- * writer will be able to effectively use the syntax compressions of the RDF
- * serialization being used.
- * </p>
- * <p>
- * The implementation only writes the value portion of the key value pair since
- * it is the value portion that is used to convey the node tuples
- * </p>
- *
- *
- *
- * @param <TKey>
- * @param <TValue>
- * @param <T>
- */
-public abstract class AbstractBatchedNodeTupleWriter<TKey, TValue, T extends AbstractNodeTupleWritable<TValue>> extends
- RecordWriter<TKey, T> {
- private static final Logger LOG = LoggerFactory.getLogger(AbstractBatchedNodeTupleWriter.class);
-
- private Writer writer;
- private long batchSize;
-
- protected AbstractBatchedNodeTupleWriter(Writer writer, long batchSize) {
- if (writer == null)
- throw new NullPointerException("writer cannot be null");
- if (batchSize <= 0)
- throw new IllegalArgumentException("batchSize must be >= 1");
- this.writer = writer;
- this.batchSize = batchSize;
- }
-
- @Override
- public final void write(TKey key, T value) throws IOException {
- LOG.debug("write({}={})", key, value);
- if (this.add(value) >= this.batchSize) {
- long size = this.writeOutput(writer);
- if (size > 0)
- throw new IOException("Derived implementation failed to empty the current batch after writing");
- }
- }
-
- /**
- * Adds the tuple to the batch of tuples that will be written when the batch
- * threshold is reached or when the {@link #close(TaskAttemptContext)}
- * method is called.
- *
- * @param value
- * Tuple
- * @return The current size of the batch waiting to be written
- */
- protected abstract long add(T value);
-
- @Override
- public void close(TaskAttemptContext context) throws IOException {
- if (this.writer != null) {
- long size = this.writeOutput(writer);
- if (size > 0)
- throw new IOException("Derived implementation failed to empty the current batch after writing");
- this.writer.close();
- this.writer = null;
- }
- }
-
- /**
- * Writes the current batch of tuples to the writer, the writer should not
- * be closed and the batch should be emptied by the implementation.
- * <p>
- * If the current batch is empty then this should be a no-op
- * </p>
- *
- * @param writer
- * Writer
- * @return Current batch size which should always be zero
- */
- protected abstract long writeOutput(Writer writer);
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/AbstractBatchedQuadWriter.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/AbstractBatchedQuadWriter.java
deleted file mode 100644
index 18e47ca..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/AbstractBatchedQuadWriter.java
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.output.writers;
-
-import java.io.Writer;
-import java.util.List;
-
-import org.apache.commons.collections.IteratorUtils;
-import org.apache.jena.graph.Node ;
-import org.apache.jena.hadoop.rdf.types.QuadWritable;
-import org.apache.jena.riot.Lang;
-import org.apache.jena.riot.RDFDataMgr;
-import org.apache.jena.riot.RDFWriterRegistry;
-import org.apache.jena.sparql.core.DatasetGraph ;
-import org.apache.jena.sparql.core.DatasetGraphFactory ;
-import org.apache.jena.sparql.core.Quad ;
-
-/**
- * Abstract batched record writer for quad formats
- *
- *
- *
- * @param <TKey>
- */
-public abstract class AbstractBatchedQuadWriter<TKey> extends AbstractBatchedNodeTupleWriter<TKey, Quad, QuadWritable> {
-
- private DatasetGraph g = DatasetGraphFactory.createGeneral();
-
- protected AbstractBatchedQuadWriter(Writer writer, long batchSize) {
- super(writer, batchSize);
- }
-
- @Override
- protected final long add(QuadWritable value) {
- g.add(value.get());
- return g.size();
- }
-
- @SuppressWarnings("deprecation")
- @Override
- protected final long writeOutput(Writer writer) {
- if (this.g.size() == 0)
- return 0;
- RDFDataMgr.write(writer, this.g, RDFWriterRegistry.defaultSerialization(this.getRdfLanguage()));
-
- // Clear the dataset graph
- @SuppressWarnings("unchecked")
- List<Node> graphNames = IteratorUtils.toList(this.g.listGraphNodes());
- for (Node graphName : graphNames) {
- this.g.removeGraph(graphName);
- }
- this.g.getDefaultGraph().clear();
-
- return this.g.size();
- }
-
- /**
- * Gets the RDF language used for output
- *
- * @return RDF language
- */
- protected abstract Lang getRdfLanguage();
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/AbstractBatchedTripleWriter.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/AbstractBatchedTripleWriter.java
deleted file mode 100644
index 322921c..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/AbstractBatchedTripleWriter.java
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.output.writers;
-
-import java.io.Writer;
-
-import org.apache.jena.graph.Graph ;
-import org.apache.jena.graph.Triple ;
-import org.apache.jena.hadoop.rdf.types.TripleWritable;
-import org.apache.jena.riot.Lang;
-import org.apache.jena.riot.RDFDataMgr;
-import org.apache.jena.sparql.graph.GraphFactory ;
-
-/**
- * Abstract batched record writer for triple formats
- *
- *
- *
- * @param <TKey>
- */
-public abstract class AbstractBatchedTripleWriter<TKey> extends AbstractBatchedNodeTupleWriter<TKey, Triple, TripleWritable> {
-
- private Graph g = GraphFactory.createDefaultGraph();
-
- protected AbstractBatchedTripleWriter(Writer writer, long batchSize) {
- super(writer, batchSize);
- }
-
- @Override
- protected final long add(TripleWritable value) {
- g.add(value.get());
- return g.size();
- }
-
- @SuppressWarnings("deprecation")
- @Override
- protected final long writeOutput(Writer writer) {
- if (this.g.size() == 0)
- return 0;
- RDFDataMgr.write(writer, this.g, this.getRdfLanguage());
- this.g.clear();
- return this.g.size();
- }
-
- /**
- * Gets the RDF language used for output
- *
- * @return RDF language
- */
- protected abstract Lang getRdfLanguage();
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/AbstractLineBasedNodeTupleWriter.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/AbstractLineBasedNodeTupleWriter.java
deleted file mode 100644
index ae82036..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/AbstractLineBasedNodeTupleWriter.java
+++ /dev/null
@@ -1,150 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.output.writers;
-
-import java.io.Writer;
-
-import org.apache.hadoop.mapreduce.RecordWriter;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.jena.atlas.io.AWriter;
-import org.apache.jena.atlas.io.Writer2;
-import org.apache.jena.graph.Node ;
-import org.apache.jena.hadoop.rdf.types.AbstractNodeTupleWritable;
-import org.apache.jena.riot.out.NodeFormatter;
-import org.apache.jena.riot.out.NodeFormatterNT;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * An abstract implementation of a record writer that writes records to a line
- * based tuple formats.
- * <p>
- * The implementation only writes the value portion of the key value pair since
- * it is the value portion that is used to convey the node tuples
- * </p>
- *
- *
- * @param <TKey>
- * Key type
- * @param <TValue>
- * Tuple type
- * @param <T>
- * Writable node tuple type
- *
- */
-public abstract class AbstractLineBasedNodeTupleWriter<TKey, TValue, T extends AbstractNodeTupleWritable<TValue>> extends
- RecordWriter<TKey, T> {
- /**
- * Default separator written between nodes
- */
- public static final String DEFAULT_SEPARATOR = " ";
- /**
- * Default terminator written at the end of each line
- */
- public static final String DEFAULT_TERMINATOR = ".";
-
- private static final Logger log = LoggerFactory.getLogger(AbstractLineBasedNodeTupleWriter.class);
-
- private AWriter writer;
- private NodeFormatter formatter;
-
- /**
- * Creates a new tuple writer using the default NTriples node formatter
- *
- * @param writer
- * Writer
- */
- public AbstractLineBasedNodeTupleWriter(Writer writer) {
- this(writer, new NodeFormatterNT());
- }
-
- /**
- * Creates a new tuple writer
- *
- * @param writer
- * Writer
- * @param formatter
- * Node formatter
- */
- public AbstractLineBasedNodeTupleWriter(Writer writer, NodeFormatter formatter) {
- if (writer == null)
- throw new NullPointerException("writer cannot be null");
- if (formatter == null)
- throw new NullPointerException("formatter cannot be null");
- this.formatter = formatter;
- this.writer = Writer2.wrap(writer);
- }
-
- @Override
- public void write(TKey key, T value) {
- log.debug("write({}={})", key, value);
-
- Node[] ns = this.getNodes(value);
- String sep = this.getSeparator();
- NodeFormatter formatter = this.getNodeFormatter();
- for (int i = 0; i < ns.length; i++) {
- formatter.format(this.writer, ns[i]);
- this.writer.print(sep);
- }
- this.writer.println(this.getTerminator());
- this.writer.flush();
- }
-
- /**
- * Gets the nodes of the tuple in the order they should be written
- *
- * @param tuple
- * Tuple
- * @return Nodes
- */
- protected abstract Node[] getNodes(T tuple);
-
- /**
- * Gets the node formatter to use for formatting nodes
- *
- * @return Node formatter
- */
- protected NodeFormatter getNodeFormatter() {
- return this.formatter;
- }
-
- /**
- * Gets the separator that is written between nodes
- *
- * @return Separator
- */
- protected String getSeparator() {
- return DEFAULT_SEPARATOR;
- }
-
- /**
- * Gets the terminator that is written at the end of each tuple
- *
- * @return Terminator
- */
- protected String getTerminator() {
- return DEFAULT_TERMINATOR;
- }
-
- @Override
- public void close(TaskAttemptContext context) {
- log.debug("close({})", context);
- writer.close();
- }
-}
\ No newline at end of file
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/AbstractLineBasedQuadWriter.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/AbstractLineBasedQuadWriter.java
deleted file mode 100644
index d0c9971..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/AbstractLineBasedQuadWriter.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.output.writers;
-
-import java.io.Writer;
-
-import org.apache.jena.graph.Node ;
-import org.apache.jena.hadoop.rdf.types.QuadWritable;
-import org.apache.jena.riot.out.NodeFormatter;
-import org.apache.jena.riot.out.NodeFormatterNT;
-import org.apache.jena.sparql.core.Quad ;
-
-/**
- * An abstract writer for line based quad formats
- *
- *
- * @param <TKey>
- *
- */
-public abstract class AbstractLineBasedQuadWriter<TKey> extends AbstractLineBasedNodeTupleWriter<TKey, Quad, QuadWritable> {
-
- /**
- * Creates a new writer using the default NTriples node formatter
- *
- * @param writer
- * Writer
- */
- public AbstractLineBasedQuadWriter(Writer writer) {
- this(writer, new NodeFormatterNT());
- }
-
- /**
- * Creates a new writer using the specified node formatter
- *
- * @param writer
- * Writer
- * @param formatter
- * Node formatter
- */
- public AbstractLineBasedQuadWriter(Writer writer, NodeFormatter formatter) {
- super(writer, formatter);
- }
-
- @Override
- protected Node[] getNodes(QuadWritable tuple) {
- Quad q = tuple.get();
- if (q.isDefaultGraph()) {
- return new Node[] { q.getSubject(), q.getPredicate(), q.getObject() };
- } else {
- return new Node[] { q.getSubject(), q.getPredicate(), q.getObject(), q.getGraph() };
- }
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/AbstractLineBasedTripleWriter.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/AbstractLineBasedTripleWriter.java
deleted file mode 100644
index 9be5faa..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/AbstractLineBasedTripleWriter.java
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.output.writers;
-
-import java.io.Writer;
-
-import org.apache.jena.graph.Node ;
-import org.apache.jena.graph.Triple ;
-import org.apache.jena.hadoop.rdf.types.TripleWritable;
-import org.apache.jena.riot.out.NodeFormatter;
-import org.apache.jena.riot.out.NodeFormatterNT;
-
-/**
- * An abstract writer for line based triple formats
- *
- * @param <TKey>
- * Key type
- *
- */
-public abstract class AbstractLineBasedTripleWriter<TKey> extends
- AbstractLineBasedNodeTupleWriter<TKey, Triple, TripleWritable> {
-
- /**
- * Creates a new writer using the default NTriples node formatter
- *
- * @param writer
- * Writer
- */
- public AbstractLineBasedTripleWriter(Writer writer) {
- this(writer, new NodeFormatterNT());
- }
-
- /**
- * Creates a new writer using the specified node formatter
- *
- * @param writer
- * Writer
- * @param formatter
- * Node formatter
- */
- public AbstractLineBasedTripleWriter(Writer writer, NodeFormatter formatter) {
- super(writer, formatter);
- }
-
- @Override
- protected Node[] getNodes(TripleWritable tuple) {
- Triple t = tuple.get();
- return new Node[] { t.getSubject(), t.getPredicate(), t.getObject() };
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/AbstractNodeWriter.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/AbstractNodeWriter.java
deleted file mode 100644
index a627e1e..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/AbstractNodeWriter.java
+++ /dev/null
@@ -1,192 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.output.writers;
-
-import java.io.Writer;
-
-import org.apache.hadoop.io.NullWritable;
-import org.apache.hadoop.mapreduce.RecordWriter;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.jena.atlas.io.AWriter;
-import org.apache.jena.atlas.io.Writer2;
-import org.apache.jena.atlas.lib.tuple.Tuple ;
-import org.apache.jena.graph.Node ;
-import org.apache.jena.graph.Triple ;
-import org.apache.jena.hadoop.rdf.types.NodeTupleWritable;
-import org.apache.jena.hadoop.rdf.types.NodeWritable;
-import org.apache.jena.hadoop.rdf.types.QuadWritable;
-import org.apache.jena.hadoop.rdf.types.TripleWritable;
-import org.apache.jena.riot.out.NodeFormatter;
-import org.apache.jena.riot.out.NodeFormatterNT;
-import org.apache.jena.sparql.core.Quad ;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Abstract implementation of a record writer which writes pairs of nodes and
- * arbitrary values to text based files
- *
- *
- *
- * @param <TValue>
- */
-public abstract class AbstractNodeWriter<TValue> extends RecordWriter<NodeWritable, TValue> {
-
- /**
- * Default separator written between nodes and their associated values
- */
- public static final String DEFAULT_SEPARATOR = "\t";
-
- private static final Logger log = LoggerFactory.getLogger(AbstractNodeWriter.class);
-
- protected AWriter writer;
- private NodeFormatter formatter;
-
- /**
- * Creates a new tuple writer using the default NTriples node formatter
- *
- * @param writer
- * Writer
- */
- public AbstractNodeWriter(Writer writer) {
- this(writer, new NodeFormatterNT());
- }
-
- /**
- * Creates a new tuple writer
- *
- * @param writer
- * Writer
- * @param formatter
- * Node formatter
- */
- public AbstractNodeWriter(Writer writer, NodeFormatter formatter) {
- if (writer == null)
- throw new NullPointerException("writer cannot be null");
- if (formatter == null)
- throw new NullPointerException("formatter cannot be null");
- this.formatter = formatter;
- this.writer = Writer2.wrap(writer);
- }
-
- @Override
- public final void write(NodeWritable key, TValue value) {
- this.writeKey(key);
- this.writer.write(this.getSeparator());
- this.writeValue(value);
- this.writer.write('\n');
- }
-
- /**
- * Writes the given key
- *
- * @param key
- * Key
- */
- protected void writeKey(NodeWritable key) {
- writeNode(key.get());
- }
-
- /**
- * Writes a Node
- *
- * @param n
- * Node
- */
- protected void writeNode(Node n) {
- this.getNodeFormatter().format(this.writer, n);
- }
-
- /**
- * Writes a sequence of nodes
- *
- * @param ns
- * Nodes
- */
- protected void writeNodes(Node... ns) {
- String sep = this.getSeparator();
- for (int i = 0; i < ns.length; i++) {
- writeNode(ns[i]);
- if (i < ns.length - 1)
- this.writer.write(sep);
- }
- }
-
- /**
- * Writes the given value
- * <p>
- * If the value is one of the RDF primitives - {@link NodeWritable},
- * {@link TripleWritable}, {@link QuadWritable} and
- * {@link NodeTupleWritable} - then it is formatted as a series of nodes
- * separated by the separator. Otherwise it is formatted by simply calling
- * {@code toString()} on it.
- * </p>
- *
- * @param value
- * Values
- */
- protected void writeValue(TValue value) {
- // Handle null specially
- if (value instanceof NullWritable || value == null)
- return;
-
- // Handle RDF primitives specially and format them as proper nodes
- if (value instanceof NodeWritable) {
- this.writeKey((NodeWritable) value);
- } else if (value instanceof TripleWritable) {
- Triple t = ((TripleWritable) value).get();
- this.writeNodes(t.getSubject(), t.getPredicate(), t.getObject());
- } else if (value instanceof QuadWritable) {
- Quad q = ((QuadWritable) value).get();
- this.writeNodes(q.getGraph(), q.getSubject(), q.getPredicate(), q.getObject());
- } else if (value instanceof NodeTupleWritable) {
- Tuple<Node> tuple = ((NodeTupleWritable) value).get();
- Node[] n = new Node[tuple.len()] ;
- tuple.copyInto(n);
- this.writeNodes(n);
- } else {
- // For arbitrary values just toString() them
- this.writer.write(value.toString());
- }
- }
-
- @Override
- public void close(TaskAttemptContext context) {
- log.debug("close({})", context);
- writer.close();
- }
-
- /**
- * Gets the node formatter to use for formatting nodes
- *
- * @return Node formatter
- */
- protected NodeFormatter getNodeFormatter() {
- return this.formatter;
- }
-
- /**
- * Gets the separator that is written between nodes
- *
- * @return Separator
- */
- protected String getSeparator() {
- return DEFAULT_SEPARATOR;
- }
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/AbstractStreamRdfNodeTupleWriter.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/AbstractStreamRdfNodeTupleWriter.java
deleted file mode 100644
index 3b4ec19..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/AbstractStreamRdfNodeTupleWriter.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.output.writers;
-
-import java.io.IOException;
-import java.io.Writer;
-
-import org.apache.hadoop.mapreduce.RecordWriter;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.jena.hadoop.rdf.types.AbstractNodeTupleWritable;
-import org.apache.jena.riot.system.StreamRDF;
-
-public abstract class AbstractStreamRdfNodeTupleWriter<TKey, TTuple, TValue extends AbstractNodeTupleWritable<TTuple>>
- extends RecordWriter<TKey, TValue> {
-
- private StreamRDF stream;
- private Writer writer;
-
- public AbstractStreamRdfNodeTupleWriter(StreamRDF stream, Writer writer) {
- if (stream == null)
- throw new NullPointerException("stream cannot be null");
- if (writer == null)
- throw new NullPointerException("writer cannot be null");
- this.stream = stream;
- this.stream.start();
- this.writer = writer;
- }
-
- @Override
- public void close(TaskAttemptContext context) throws IOException {
- this.stream.finish();
- this.writer.close();
- }
-
- @Override
- public void write(TKey key, TValue value) {
- this.sendOutput(key, value, this.stream);
- }
-
- /**
- * Method that handles an actual key value pair passing it to the
- * {@link StreamRDF} instance as appropriate
- *
- * @param key
- * Key
- * @param value
- * Value
- * @param stream
- * RDF Stream
- */
- protected abstract void sendOutput(TKey key, TValue value, StreamRDF stream);
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/AbstractWholeFileNodeTupleWriter.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/AbstractWholeFileNodeTupleWriter.java
deleted file mode 100644
index d5643b1..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/AbstractWholeFileNodeTupleWriter.java
+++ /dev/null
@@ -1,96 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.output.writers;
-
-import java.io.IOException;
-import java.io.Writer;
-
-import org.apache.hadoop.mapreduce.RecordWriter;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.jena.hadoop.rdf.types.AbstractNodeTupleWritable;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-
-/**
- * An abstract implementation of a record writer that writes records to whole
- * file formats.
- * <p>
- * It is important to note that the writer does not actually write any output
- * until the {@link #close(TaskAttemptContext)} method is called as it must
- * write the entire output in one go otherwise the output would be invalid. Also
- * writing in one go increases the chances that the writer will be able to
- * effectively use the syntax compressions of the RDF serialization being used.
- * </p>
- * <p>
- * The implementation only writes the value portion of the key value pair since
- * it is the value portion that is used to convey the node tuples
- * </p>
- *
- *
- *
- * @param <TKey>
- * @param <TValue>
- * @param <T>
- */
-public abstract class AbstractWholeFileNodeTupleWriter<TKey, TValue, T extends AbstractNodeTupleWritable<TValue>> extends
- RecordWriter<TKey, T> {
- private static final Logger LOG = LoggerFactory.getLogger(AbstractWholeFileNodeTupleWriter.class);
-
- private Writer writer;
-
- protected AbstractWholeFileNodeTupleWriter(Writer writer) {
- if (writer == null)
- throw new NullPointerException("writer cannot be null");
- this.writer = writer;
- }
-
- @Override
- public final void write(TKey key, T value) {
- LOG.debug("write({}={})", key, value);
- this.add(value);
- }
-
- /**
- * Adds the tuple to the cache of tuples that will be written when the
- * {@link #close(TaskAttemptContext)} method is called
- *
- * @param value
- */
- protected abstract void add(T value);
-
- @Override
- public void close(TaskAttemptContext context) throws IOException {
- if (this.writer != null) {
- this.writeOutput(writer);
- this.writer.close();
- this.writer = null;
- }
- }
-
- /**
- * Writes the cached tuples to the writer, the writer should not be closed
- * by this method implementation
- *
- * @param writer
- * Writer
- */
- protected abstract void writeOutput(Writer writer);
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/AbstractWholeFileQuadWriter.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/AbstractWholeFileQuadWriter.java
deleted file mode 100644
index 620246a..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/AbstractWholeFileQuadWriter.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.output.writers;
-
-import java.io.Writer;
-
-import org.apache.jena.hadoop.rdf.types.QuadWritable;
-import org.apache.jena.riot.Lang;
-import org.apache.jena.riot.RDFDataMgr;
-import org.apache.jena.riot.RDFWriterRegistry;
-import org.apache.jena.sparql.core.DatasetGraph ;
-import org.apache.jena.sparql.core.DatasetGraphFactory ;
-import org.apache.jena.sparql.core.Quad ;
-
-/**
- * An abstract record writer for whole file triple formats
- *
- *
- *
- * @param <TKey>
- * Key type
- */
-public abstract class AbstractWholeFileQuadWriter<TKey> extends AbstractWholeFileNodeTupleWriter<TKey, Quad, QuadWritable> {
-
- private DatasetGraph g = DatasetGraphFactory.createGeneral();
-
- protected AbstractWholeFileQuadWriter(Writer writer) {
- super(writer);
- }
-
- @Override
- protected final void add(QuadWritable value) {
- this.g.add(value.get());
- }
-
- @SuppressWarnings("deprecation")
- @Override
- protected void writeOutput(Writer writer) {
- RDFDataMgr.write(writer, this.g, RDFWriterRegistry.defaultSerialization(this.getRdfLanguage()));
- }
-
- /**
- * Gets the RDF language to write the output in
- *
- * @return RDF language
- */
- protected abstract Lang getRdfLanguage();
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/AbstractWholeFileTripleWriter.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/AbstractWholeFileTripleWriter.java
deleted file mode 100644
index 719ab95..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/AbstractWholeFileTripleWriter.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.output.writers;
-
-import java.io.Writer;
-
-import org.apache.jena.graph.Graph ;
-import org.apache.jena.graph.Triple ;
-import org.apache.jena.hadoop.rdf.types.TripleWritable;
-import org.apache.jena.riot.Lang;
-import org.apache.jena.riot.RDFDataMgr;
-import org.apache.jena.sparql.graph.GraphFactory ;
-
-/**
- * An abstract record writer for whole file triple formats
- *
- *
- *
- * @param <TKey>
- * Key type
- */
-public abstract class AbstractWholeFileTripleWriter<TKey> extends AbstractWholeFileNodeTupleWriter<TKey, Triple, TripleWritable> {
-
- private Graph g = GraphFactory.createDefaultGraph();
-
- protected AbstractWholeFileTripleWriter(Writer writer) {
- super(writer);
- }
-
- @Override
- protected final void add(TripleWritable value) {
- this.g.add(value.get());
- }
-
- @SuppressWarnings("deprecation")
- @Override
- protected final void writeOutput(Writer writer) {
- RDFDataMgr.write(writer, this.g, this.getRdfLanguage());
- }
-
- /**
- * Gets the RDF language to write the output in
- *
- * @return RDF language
- */
- protected abstract Lang getRdfLanguage();
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/QuadsToTriplesWriter.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/QuadsToTriplesWriter.java
deleted file mode 100644
index e932e1f..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/QuadsToTriplesWriter.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jena.hadoop.rdf.io.output.writers;
-
-import java.io.IOException;
-
-import org.apache.hadoop.mapreduce.RecordWriter;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.jena.hadoop.rdf.types.QuadWritable;
-import org.apache.jena.hadoop.rdf.types.TripleWritable;
-
-/**
- * A record writer that converts quads into triples by stripping off the graph
- * field
- *
- * @param <TKey>
- * Key type
- */
-public class QuadsToTriplesWriter<TKey> extends RecordWriter<TKey, QuadWritable> {
-
- private RecordWriter<TKey, TripleWritable> writer;
-
- /**
- * Creates a new writer
- *
- * @param tripleWriter
- * Triple writer to use
- */
- public QuadsToTriplesWriter(RecordWriter<TKey, TripleWritable> tripleWriter) {
- if (tripleWriter == null)
- throw new NullPointerException("tripleWriter cannot be null");
- this.writer = tripleWriter;
- }
-
- @Override
- public void write(TKey key, QuadWritable value) throws IOException, InterruptedException {
- this.writer.write(key, new TripleWritable(value.get().asTriple()));
- }
-
- @Override
- public void close(TaskAttemptContext context) throws IOException, InterruptedException {
- this.writer.close(context);
- }
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/StreamRdfQuadWriter.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/StreamRdfQuadWriter.java
deleted file mode 100644
index 0c9c30d..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/StreamRdfQuadWriter.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.output.writers;
-
-import java.io.Writer;
-
-import org.apache.jena.hadoop.rdf.types.QuadWritable;
-import org.apache.jena.riot.system.StreamRDF;
-import org.apache.jena.sparql.core.Quad ;
-
-/**
- * A writer for {@link StreamRDF} based quad writers
- *
- * @param <TKey>
- * Key type
- */
-public class StreamRdfQuadWriter<TKey> extends
- AbstractStreamRdfNodeTupleWriter<TKey, Quad, QuadWritable> {
-
- public StreamRdfQuadWriter(StreamRDF stream, Writer writer) {
- super(stream, writer);
- }
-
- @Override
- protected void sendOutput(TKey key, QuadWritable value, StreamRDF stream) {
- stream.quad(value.get());
- }
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/StreamRdfTripleWriter.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/StreamRdfTripleWriter.java
deleted file mode 100644
index f71c777..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/StreamRdfTripleWriter.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.output.writers;
-
-import java.io.Writer;
-
-import org.apache.jena.graph.Triple ;
-import org.apache.jena.hadoop.rdf.types.TripleWritable;
-import org.apache.jena.riot.system.StreamRDF;
-
-/**
- * A writer for {@link StreamRDF} based triple writers
- *
- * @param <TKey>
- * Key type
- */
-public class StreamRdfTripleWriter<TKey> extends AbstractStreamRdfNodeTupleWriter<TKey, Triple, TripleWritable> {
-
- public StreamRdfTripleWriter(StreamRDF stream, Writer writer) {
- super(stream, writer);
- }
-
- @Override
- protected void sendOutput(TKey key, TripleWritable value, StreamRDF stream) {
- stream.triple(value.get());
- }
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/jsonld/JsonLDQuadWriter.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/jsonld/JsonLDQuadWriter.java
deleted file mode 100644
index 1b4b62f..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/jsonld/JsonLDQuadWriter.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.output.writers.jsonld;
-
-import java.io.Writer;
-
-import org.apache.jena.hadoop.rdf.io.output.writers.AbstractWholeFileQuadWriter;
-import org.apache.jena.riot.Lang;
-import org.apache.jena.riot.RDFLanguages;
-
-public class JsonLDQuadWriter<TKey> extends AbstractWholeFileQuadWriter<TKey> {
-
- public JsonLDQuadWriter(Writer writer) {
- super(writer);
- }
-
- @Override
- protected Lang getRdfLanguage() {
- return RDFLanguages.JSONLD;
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/jsonld/JsonLDTripleWriter.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/jsonld/JsonLDTripleWriter.java
deleted file mode 100644
index 8d2079d..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/jsonld/JsonLDTripleWriter.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.output.writers.jsonld;
-
-import java.io.Writer;
-
-import org.apache.jena.hadoop.rdf.io.output.writers.AbstractWholeFileTripleWriter;
-import org.apache.jena.riot.Lang;
-import org.apache.jena.riot.RDFLanguages;
-
-public class JsonLDTripleWriter<TKey> extends AbstractWholeFileTripleWriter<TKey> {
-
- public JsonLDTripleWriter(Writer writer) {
- super(writer);
- }
-
- @Override
- protected Lang getRdfLanguage() {
- return RDFLanguages.JSONLD;
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/nquads/NQuadsWriter.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/nquads/NQuadsWriter.java
deleted file mode 100644
index 8b0b9ef..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/nquads/NQuadsWriter.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.output.writers.nquads;
-
-import java.io.Writer;
-
-import org.apache.jena.atlas.lib.CharSpace ;
-import org.apache.jena.hadoop.rdf.io.output.writers.AbstractLineBasedQuadWriter;
-import org.apache.jena.riot.out.NodeFormatterNT;
-
-/**
- * A record writer for NQuads
- *
- *
- *
- * @param <TKey>
- */
-public class NQuadsWriter<TKey> extends AbstractLineBasedQuadWriter<TKey> {
-
- /**
- * Creates a new writer
- *
- * @param writer
- * Writer
- */
- public NQuadsWriter(Writer writer) {
- super(writer, new NodeFormatterNT());
- }
-
- /**
- * Creates a new writer using the given character space
- *
- * @param writer
- * Writer
- * @param charSpace
- * Character space
- */
- public NQuadsWriter(Writer writer, CharSpace charSpace) {
- super(writer, new NodeFormatterNT(charSpace));
- }
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/ntriples/NTriplesNodeWriter.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/ntriples/NTriplesNodeWriter.java
deleted file mode 100644
index 3340802..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/ntriples/NTriplesNodeWriter.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.output.writers.ntriples;
-
-import java.io.Writer;
-
-import org.apache.jena.atlas.lib.CharSpace ;
-import org.apache.jena.hadoop.rdf.io.output.writers.AbstractNodeWriter;
-import org.apache.jena.riot.out.NodeFormatterNT;
-
-/**
- * A NTriples based node writer
- *
- *
- *
- * @param <TValue>
- * Value type
- */
-public class NTriplesNodeWriter<TValue> extends AbstractNodeWriter<TValue> {
-
- /**
- * Creates a new writer
- *
- * @param writer
- * Writer
- */
- public NTriplesNodeWriter(Writer writer) {
- super(writer);
- }
-
- /**
- * Creates a new writer
- *
- * @param writer
- * Writer
- * @param charSpace
- * Character space to use
- */
- public NTriplesNodeWriter(Writer writer, CharSpace charSpace) {
- super(writer, new NodeFormatterNT(charSpace));
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/ntriples/NTriplesWriter.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/ntriples/NTriplesWriter.java
deleted file mode 100644
index a01a351..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/ntriples/NTriplesWriter.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.output.writers.ntriples;
-
-import java.io.Writer;
-
-import org.apache.jena.atlas.lib.CharSpace ;
-import org.apache.jena.hadoop.rdf.io.output.writers.AbstractLineBasedTripleWriter;
-import org.apache.jena.riot.out.NodeFormatterNT;
-
-/**
- * A record writer for NTriples
- *
- *
- * @param <TKey>
- * Key type
- *
- */
-public class NTriplesWriter<TKey> extends AbstractLineBasedTripleWriter<TKey> {
-
- /**
- * Creates a new writer
- *
- * @param writer
- * Writer
- */
- public NTriplesWriter(Writer writer) {
- super(writer, new NodeFormatterNT());
- }
-
- /**
- * Creates a new writer using the given character space
- *
- * @param writer
- * Writer
- * @param charSpace
- * Character space
- */
- public NTriplesWriter(Writer writer, CharSpace charSpace) {
- super(writer, new NodeFormatterNT(charSpace));
- }
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/rdfjson/RdfJsonWriter.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/rdfjson/RdfJsonWriter.java
deleted file mode 100644
index da7d1bd..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/rdfjson/RdfJsonWriter.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.output.writers.rdfjson;
-
-import java.io.Writer;
-
-import org.apache.jena.hadoop.rdf.io.output.writers.AbstractWholeFileTripleWriter;
-import org.apache.jena.riot.Lang;
-
-/**
- * A record writer for RDF/JSON
- *
- *
- * @param <TKey>
- * Key type
- *
- */
-public class RdfJsonWriter<TKey> extends AbstractWholeFileTripleWriter<TKey> {
-
- /**
- * Creates a new record writer
- *
- * @param writer
- * Writer
- */
- public RdfJsonWriter(Writer writer) {
- super(writer);
- }
-
- @Override
- protected Lang getRdfLanguage() {
- return Lang.RDFJSON;
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/rdfxml/RdfXmlWriter.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/rdfxml/RdfXmlWriter.java
deleted file mode 100644
index 8dbef01..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/rdfxml/RdfXmlWriter.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.output.writers.rdfxml;
-
-import java.io.Writer;
-
-import org.apache.jena.hadoop.rdf.io.output.writers.AbstractWholeFileTripleWriter;
-import org.apache.jena.riot.Lang;
-
-/**
- * A record writer for RDF/XML
- *
- *
- * @param <TKey>
- * Key type
- *
- */
-public class RdfXmlWriter<TKey> extends AbstractWholeFileTripleWriter<TKey> {
-
- /**
- * Creates a new record writer
- *
- * @param writer
- * Writer
- */
- public RdfXmlWriter(Writer writer) {
- super(writer);
- }
-
- @Override
- protected Lang getRdfLanguage() {
- return Lang.RDFXML;
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/thrift/ThriftQuadWriter.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/thrift/ThriftQuadWriter.java
deleted file mode 100644
index 599dba9..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/thrift/ThriftQuadWriter.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.output.writers.thrift;
-
-import java.io.Writer;
-
-import org.apache.jena.hadoop.rdf.io.output.writers.AbstractWholeFileQuadWriter;
-import org.apache.jena.riot.Lang;
-import org.apache.jena.riot.RDFLanguages;
-
-public class ThriftQuadWriter<TKey> extends AbstractWholeFileQuadWriter<TKey> {
-
- public ThriftQuadWriter(Writer writer) {
- super(writer);
- }
-
- @Override
- protected Lang getRdfLanguage() {
- return RDFLanguages.THRIFT;
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/thrift/ThriftTripleWriter.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/thrift/ThriftTripleWriter.java
deleted file mode 100644
index 0089459..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/thrift/ThriftTripleWriter.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.output.writers.thrift;
-
-import java.io.Writer;
-
-import org.apache.jena.hadoop.rdf.io.output.writers.AbstractWholeFileTripleWriter;
-import org.apache.jena.riot.Lang;
-import org.apache.jena.riot.RDFLanguages;
-
-public class ThriftTripleWriter<TKey> extends AbstractWholeFileTripleWriter<TKey> {
-
- public ThriftTripleWriter(Writer writer) {
- super(writer);
- }
-
- @Override
- protected Lang getRdfLanguage() {
- return RDFLanguages.THRIFT;
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/trig/BatchedTriGWriter.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/trig/BatchedTriGWriter.java
deleted file mode 100644
index e946c13..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/trig/BatchedTriGWriter.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.output.writers.trig;
-
-import java.io.Writer;
-
-import org.apache.jena.hadoop.rdf.io.output.writers.AbstractBatchedQuadWriter;
-import org.apache.jena.riot.Lang;
-
-/**
- * A record writer for TriG that uses the batched approach, note that this
- * approach will produce invalid data when blank nodes span batches
- *
- * @param <TKey>
- * Key type
- */
-public class BatchedTriGWriter<TKey> extends AbstractBatchedQuadWriter<TKey> {
-
- /**
- * Creates a new record writer
- *
- * @param writer
- * Writer
- * @param batchSize
- * Batch size
- */
- public BatchedTriGWriter(Writer writer, long batchSize) {
- super(writer, batchSize);
- }
-
- @Override
- protected Lang getRdfLanguage() {
- return Lang.TRIG;
- }
-
-}
diff --git a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/turtle/BatchedTurtleWriter.java b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/turtle/BatchedTurtleWriter.java
deleted file mode 100644
index 3702c6e..0000000
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/output/writers/turtle/BatchedTurtleWriter.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jena.hadoop.rdf.io.output.writers.turtle;
-
-import java.io.Writer;
-
-import org.apache.jena.hadoop.rdf.io.output.writers.AbstractBatchedTripleWriter;
-import org.apache.jena.riot.Lang;
-
-/**
- * A record writer for Turtle that uses the batched approach, note that this
- * approach will produce invalid data when blank nodes span batches
- *
- *
- *
- * @param <TKey>
- */
-public class BatchedTurtleWriter<TKey> extends
- AbstractBatchedTripleWriter<TKey> {
-
- /**
- * Creates a new record writer
- *
- * @param writer
- * Writer
- * @param batchSize
- * Batch size
- */
- public BatchedTurtleWriter(Writer writer, long batchSize) {
- super(writer, batchSize);
- }
-
- @Override
- protected Lang getRdfLanguage() {
- return Lang.TURTLE;
... 21253 lines suppressed ...