You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@jena.apache.org by an...@apache.org on 2016/05/14 14:03:19 UTC

[20/42] jena git commit: Merge commit 'refs/pull/143/head' of github.com:apache/jena

http://git-wip-us.apache.org/repos/asf/jena/blob/4b5cd267/jena-elephas/jena-elephas-common/src/main/java/org/apache/jena/hadoop/rdf/types/QuadWritable.java
----------------------------------------------------------------------
diff --cc jena-elephas/jena-elephas-common/src/main/java/org/apache/jena/hadoop/rdf/types/QuadWritable.java
index e862c04,e862c04..31f9645
--- a/jena-elephas/jena-elephas-common/src/main/java/org/apache/jena/hadoop/rdf/types/QuadWritable.java
+++ b/jena-elephas/jena-elephas-common/src/main/java/org/apache/jena/hadoop/rdf/types/QuadWritable.java
@@@ -1,135 -1,135 +1,135 @@@
--/*
-- * Licensed to the Apache Software Foundation (ASF) under one
-- * or more contributor license agreements.  See the NOTICE file
-- * distributed with this work for additional information
-- * regarding copyright ownership.  The ASF licenses this file
-- * to you under the Apache License, Version 2.0 (the
-- * "License"); you may not use this file except in compliance
-- * with the License.  You may obtain a copy of the License at
-- * 
-- *     http://www.apache.org/licenses/LICENSE-2.0
-- *     
-- * Unless required by applicable law or agreed to in writing, software
-- * distributed under the License is distributed on an "AS IS" BASIS,
-- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- * See the License for the specific language governing permissions and
-- * limitations under the License.
-- */
--
--package org.apache.jena.hadoop.rdf.types;
--
--import java.io.DataInput;
--import java.io.DataOutput;
--import java.io.IOException;
--
--import org.apache.hadoop.io.WritableComparator;
--import org.apache.jena.graph.Node ;
--import org.apache.jena.hadoop.rdf.types.comparators.SimpleBinaryComparator;
--import org.apache.jena.hadoop.rdf.types.converters.ThriftConverter;
--import org.apache.jena.riot.thrift.ThriftConvert;
--import org.apache.jena.riot.thrift.wire.RDF_Quad;
--import org.apache.jena.sparql.core.Quad ;
--import org.apache.thrift.TException;
--
--/**
-- * A writable quad
-- */
--public class QuadWritable extends AbstractNodeTupleWritable<Quad> {
--
--    static {
--        WritableComparator.define(QuadWritable.class, new SimpleBinaryComparator());
--    }
--
--    private RDF_Quad quad = new RDF_Quad();
--
--    /**
--     * Creates a new empty instance
--     */
--    public QuadWritable() {
--        this(null);
--    }
--
--    /**
--     * Creates a new instance with the given value
--     * 
--     * @param q
--     *            Quad
--     */
--    public QuadWritable(Quad q) {
--        super(q);
--    }
--
--    /**
--     * Creates a new instance from the given input
--     * 
--     * @param input
--     *            Input
--     * @return New instance
--     * @throws IOException
--     */
--    public static QuadWritable read(DataInput input) throws IOException {
--        QuadWritable q = new QuadWritable();
--        q.readFields(input);
--        return q;
--    }
--
--    @Override
--    public void set(Quad tuple) {
--        super.set(tuple);
--        this.quad.clear();
--    }
--
--    @Override
--    public void readFields(DataInput input) throws IOException {
--        this.quad.clear();
--        int tripleLength = input.readInt();
--        byte[] buffer = new byte[tripleLength];
--        input.readFully(buffer);
--        try {
--            ThriftConverter.fromBytes(buffer, this.quad);
--        } catch (TException e) {
--            throw new IOException(e);
--        }
--        this.setInternal(new Quad(ThriftConvert.convert(this.quad.getG()), ThriftConvert.convert(this.quad.getS()),
--                ThriftConvert.convert(this.quad.getP()), ThriftConvert.convert(this.quad.getO())));
--    }
--
--    @Override
--    public void write(DataOutput output) throws IOException {
--        if (this.get() == null)
--            throw new IOException(
--                    "Null quads cannot be written using this class, consider using NodeTupleWritable instead");
--
--        // May not have yet prepared the Thrift triple
--        if (!this.quad.isSetS()) {
--            Quad tuple = this.get();
--            this.quad.setG(ThriftConvert.convert(tuple.getGraph(), false));
--            this.quad.setS(ThriftConvert.convert(tuple.getSubject(), false));
--            this.quad.setP(ThriftConvert.convert(tuple.getPredicate(), false));
--            this.quad.setO(ThriftConvert.convert(tuple.getObject(), false));
--        }
--
--        byte[] buffer;
--        try {
--            buffer = ThriftConverter.toBytes(this.quad);
--        } catch (TException e) {
--            throw new IOException(e);
--        }
--        output.writeInt(buffer.length);
--        output.write(buffer);
--    }
--
--    @Override
--    protected Quad createTuple(Node[] ns) {
--        if (ns.length != 4)
--            throw new IllegalArgumentException(String.format(
--                    "Incorrect number of nodes to form a quad - got %d but expected 4", ns.length));
--        return new Quad(ns[0], ns[1], ns[2], ns[3]);
--    }
--
--    @Override
--    protected Node[] createNodes(Quad tuple) {
--        return new Node[] { tuple.getGraph(), tuple.getSubject(), tuple.getPredicate(), tuple.getObject() };
--    }
--
--}
++/*
++ * Licensed to the Apache Software Foundation (ASF) under one
++ * or more contributor license agreements.  See the NOTICE file
++ * distributed with this work for additional information
++ * regarding copyright ownership.  The ASF licenses this file
++ * to you under the Apache License, Version 2.0 (the
++ * "License"); you may not use this file except in compliance
++ * with the License.  You may obtain a copy of the License at
++ * 
++ *     http://www.apache.org/licenses/LICENSE-2.0
++ *     
++ * Unless required by applicable law or agreed to in writing, software
++ * distributed under the License is distributed on an "AS IS" BASIS,
++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++ * See the License for the specific language governing permissions and
++ * limitations under the License.
++ */
++
++package org.apache.jena.hadoop.rdf.types;
++
++import java.io.DataInput;
++import java.io.DataOutput;
++import java.io.IOException;
++
++import org.apache.hadoop.io.WritableComparator;
++import org.apache.jena.graph.Node ;
++import org.apache.jena.hadoop.rdf.types.comparators.SimpleBinaryComparator;
++import org.apache.jena.hadoop.rdf.types.converters.ThriftConverter;
++import org.apache.jena.riot.thrift.ThriftConvert;
++import org.apache.jena.riot.thrift.wire.RDF_Quad;
++import org.apache.jena.sparql.core.Quad ;
++import org.apache.thrift.TException;
++
++/**
++ * A writable quad
++ */
++public class QuadWritable extends AbstractNodeTupleWritable<Quad> {
++
++    static {
++        WritableComparator.define(QuadWritable.class, new SimpleBinaryComparator());
++    }
++
++    private RDF_Quad quad = new RDF_Quad();
++
++    /**
++     * Creates a new empty instance
++     */
++    public QuadWritable() {
++        this(null);
++    }
++
++    /**
++     * Creates a new instance with the given value
++     * 
++     * @param q
++     *            Quad
++     */
++    public QuadWritable(Quad q) {
++        super(q);
++    }
++
++    /**
++     * Creates a new instance from the given input
++     * 
++     * @param input
++     *            Input
++     * @return New instance
++     * @throws IOException
++     */
++    public static QuadWritable read(DataInput input) throws IOException {
++        QuadWritable q = new QuadWritable();
++        q.readFields(input);
++        return q;
++    }
++
++    @Override
++    public void set(Quad tuple) {
++        super.set(tuple);
++        this.quad.clear();
++    }
++
++    @Override
++    public void readFields(DataInput input) throws IOException {
++        this.quad.clear();
++        int tripleLength = input.readInt();
++        byte[] buffer = new byte[tripleLength];
++        input.readFully(buffer);
++        try {
++            ThriftConverter.fromBytes(buffer, this.quad);
++        } catch (TException e) {
++            throw new IOException(e);
++        }
++        this.setInternal(new Quad(ThriftConvert.convert(this.quad.getG()), ThriftConvert.convert(this.quad.getS()),
++                ThriftConvert.convert(this.quad.getP()), ThriftConvert.convert(this.quad.getO())));
++    }
++
++    @Override
++    public void write(DataOutput output) throws IOException {
++        if (this.get() == null)
++            throw new IOException(
++                    "Null quads cannot be written using this class, consider using NodeTupleWritable instead");
++
++        // May not have yet prepared the Thrift triple
++        if (!this.quad.isSetS()) {
++            Quad tuple = this.get();
++            this.quad.setG(ThriftConvert.convert(tuple.getGraph(), false));
++            this.quad.setS(ThriftConvert.convert(tuple.getSubject(), false));
++            this.quad.setP(ThriftConvert.convert(tuple.getPredicate(), false));
++            this.quad.setO(ThriftConvert.convert(tuple.getObject(), false));
++        }
++
++        byte[] buffer;
++        try {
++            buffer = ThriftConverter.toBytes(this.quad);
++        } catch (TException e) {
++            throw new IOException(e);
++        }
++        output.writeInt(buffer.length);
++        output.write(buffer);
++    }
++
++    @Override
++    protected Quad createTuple(Node[] ns) {
++        if (ns.length != 4)
++            throw new IllegalArgumentException(String.format(
++                    "Incorrect number of nodes to form a quad - got %d but expected 4", ns.length));
++        return new Quad(ns[0], ns[1], ns[2], ns[3]);
++    }
++
++    @Override
++    protected Node[] createNodes(Quad tuple) {
++        return new Node[] { tuple.getGraph(), tuple.getSubject(), tuple.getPredicate(), tuple.getObject() };
++    }
++
++}

http://git-wip-us.apache.org/repos/asf/jena/blob/4b5cd267/jena-elephas/jena-elephas-common/src/main/java/org/apache/jena/hadoop/rdf/types/TripleWritable.java
----------------------------------------------------------------------
diff --cc jena-elephas/jena-elephas-common/src/main/java/org/apache/jena/hadoop/rdf/types/TripleWritable.java
index 2457bb7,2457bb7..ba81b66
--- a/jena-elephas/jena-elephas-common/src/main/java/org/apache/jena/hadoop/rdf/types/TripleWritable.java
+++ b/jena-elephas/jena-elephas-common/src/main/java/org/apache/jena/hadoop/rdf/types/TripleWritable.java
@@@ -1,137 -1,137 +1,137 @@@
--/*
-- * Licensed to the Apache Software Foundation (ASF) under one
-- * or more contributor license agreements.  See the NOTICE file
-- * distributed with this work for additional information
-- * regarding copyright ownership.  The ASF licenses this file
-- * to you under the Apache License, Version 2.0 (the
-- * "License"); you may not use this file except in compliance
-- * with the License.  You may obtain a copy of the License at
-- * 
-- *     http://www.apache.org/licenses/LICENSE-2.0
-- *     
-- * Unless required by applicable law or agreed to in writing, software
-- * distributed under the License is distributed on an "AS IS" BASIS,
-- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- * See the License for the specific language governing permissions and
-- * limitations under the License.
-- */
--
--package org.apache.jena.hadoop.rdf.types;
--
--import java.io.DataInput;
--import java.io.DataOutput;
--import java.io.IOException;
--
--import org.apache.hadoop.io.WritableComparator;
--import org.apache.jena.graph.Node ;
--import org.apache.jena.graph.Triple ;
--import org.apache.jena.hadoop.rdf.types.comparators.SimpleBinaryComparator;
--import org.apache.jena.hadoop.rdf.types.converters.ThriftConverter;
--import org.apache.jena.riot.thrift.ThriftConvert;
--import org.apache.jena.riot.thrift.wire.RDF_Triple;
--import org.apache.thrift.TException;
--
--/**
-- * A writable triple
-- * 
-- * 
-- * 
-- */
--public class TripleWritable extends AbstractNodeTupleWritable<Triple> {
--    
--    static {
--        WritableComparator.define(TripleWritable.class, new SimpleBinaryComparator());
--    }
--
--    private RDF_Triple triple = new RDF_Triple();
--
--    /**
--     * Creates a new instance using the default NTriples node formatter
--     */
--    public TripleWritable() {
--        this(null);
--    }
--
--    /**
--     * Creates a new instance with a given value that uses a specific node
--     * formatter
--     * 
--     * @param t
--     *            Triple
--     */
--    public TripleWritable(Triple t) {
--        super(t);
--    }
--
--    /**
--     * Creates a new instance from the given input
--     * 
--     * @param input
--     *            Input
--     * @return New instance
--     * @throws IOException
--     */
--    public static TripleWritable read(DataInput input) throws IOException {
--        TripleWritable t = new TripleWritable();
--        t.readFields(input);
--        return t;
--    }
--
--    @Override
--    public void set(Triple tuple) {
--        super.set(tuple);
--        this.triple.clear();
--    }
--
--    @Override
--    public void readFields(DataInput input) throws IOException {
--        this.triple.clear();
--        int tripleLength = input.readInt();
--        byte[] buffer = new byte[tripleLength];
--        input.readFully(buffer);
--        try {
--            ThriftConverter.fromBytes(buffer, this.triple);
--        } catch (TException e) {
--            throw new IOException(e);
--        }
--        this.setInternal(new Triple(ThriftConvert.convert(this.triple.getS()),
--                ThriftConvert.convert(this.triple.getP()), ThriftConvert.convert(this.triple.getO())));
--    }
--
--    @Override
--    public void write(DataOutput output) throws IOException {
--        if (this.get() == null)
--            throw new IOException(
--                    "Null triples cannot be written using this class, consider using NodeTupleWritable instead");
--        
--        // May not have yet prepared the Thrift triple
--        if (!this.triple.isSetS()) {
--            Triple tuple = this.get();
--            this.triple.setS(ThriftConvert.convert(tuple.getSubject(), false));
--            this.triple.setP(ThriftConvert.convert(tuple.getPredicate(), false));
--            this.triple.setO(ThriftConvert.convert(tuple.getObject(), false));
--        }
--
--        byte[] buffer;
--        try {
--            buffer = ThriftConverter.toBytes(this.triple);
--        } catch (TException e) {
--            throw new IOException(e);
--        }
--        output.writeInt(buffer.length);
--        output.write(buffer);
--    }
--
--    @Override
--    protected Triple createTuple(Node[] ns) {
--        if (ns.length != 3)
--            throw new IllegalArgumentException(String.format(
--                    "Incorrect number of nodes to form a triple - got %d but expected 3", ns.length));
--        return new Triple(ns[0], ns[1], ns[2]);
--    }
--
--    @Override
--    protected Node[] createNodes(Triple tuple) {
--        return new Node[] { tuple.getSubject(), tuple.getPredicate(), tuple.getObject() };
--    }
--}
++/*
++ * Licensed to the Apache Software Foundation (ASF) under one
++ * or more contributor license agreements.  See the NOTICE file
++ * distributed with this work for additional information
++ * regarding copyright ownership.  The ASF licenses this file
++ * to you under the Apache License, Version 2.0 (the
++ * "License"); you may not use this file except in compliance
++ * with the License.  You may obtain a copy of the License at
++ * 
++ *     http://www.apache.org/licenses/LICENSE-2.0
++ *     
++ * Unless required by applicable law or agreed to in writing, software
++ * distributed under the License is distributed on an "AS IS" BASIS,
++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++ * See the License for the specific language governing permissions and
++ * limitations under the License.
++ */
++
++package org.apache.jena.hadoop.rdf.types;
++
++import java.io.DataInput;
++import java.io.DataOutput;
++import java.io.IOException;
++
++import org.apache.hadoop.io.WritableComparator;
++import org.apache.jena.graph.Node ;
++import org.apache.jena.graph.Triple ;
++import org.apache.jena.hadoop.rdf.types.comparators.SimpleBinaryComparator;
++import org.apache.jena.hadoop.rdf.types.converters.ThriftConverter;
++import org.apache.jena.riot.thrift.ThriftConvert;
++import org.apache.jena.riot.thrift.wire.RDF_Triple;
++import org.apache.thrift.TException;
++
++/**
++ * A writable triple
++ * 
++ * 
++ * 
++ */
++public class TripleWritable extends AbstractNodeTupleWritable<Triple> {
++    
++    static {
++        WritableComparator.define(TripleWritable.class, new SimpleBinaryComparator());
++    }
++
++    private RDF_Triple triple = new RDF_Triple();
++
++    /**
++     * Creates a new instance using the default NTriples node formatter
++     */
++    public TripleWritable() {
++        this(null);
++    }
++
++    /**
++     * Creates a new instance with a given value that uses a specific node
++     * formatter
++     * 
++     * @param t
++     *            Triple
++     */
++    public TripleWritable(Triple t) {
++        super(t);
++    }
++
++    /**
++     * Creates a new instance from the given input
++     * 
++     * @param input
++     *            Input
++     * @return New instance
++     * @throws IOException
++     */
++    public static TripleWritable read(DataInput input) throws IOException {
++        TripleWritable t = new TripleWritable();
++        t.readFields(input);
++        return t;
++    }
++
++    @Override
++    public void set(Triple tuple) {
++        super.set(tuple);
++        this.triple.clear();
++    }
++
++    @Override
++    public void readFields(DataInput input) throws IOException {
++        this.triple.clear();
++        int tripleLength = input.readInt();
++        byte[] buffer = new byte[tripleLength];
++        input.readFully(buffer);
++        try {
++            ThriftConverter.fromBytes(buffer, this.triple);
++        } catch (TException e) {
++            throw new IOException(e);
++        }
++        this.setInternal(new Triple(ThriftConvert.convert(this.triple.getS()),
++                ThriftConvert.convert(this.triple.getP()), ThriftConvert.convert(this.triple.getO())));
++    }
++
++    @Override
++    public void write(DataOutput output) throws IOException {
++        if (this.get() == null)
++            throw new IOException(
++                    "Null triples cannot be written using this class, consider using NodeTupleWritable instead");
++        
++        // May not have yet prepared the Thrift triple
++        if (!this.triple.isSetS()) {
++            Triple tuple = this.get();
++            this.triple.setS(ThriftConvert.convert(tuple.getSubject(), false));
++            this.triple.setP(ThriftConvert.convert(tuple.getPredicate(), false));
++            this.triple.setO(ThriftConvert.convert(tuple.getObject(), false));
++        }
++
++        byte[] buffer;
++        try {
++            buffer = ThriftConverter.toBytes(this.triple);
++        } catch (TException e) {
++            throw new IOException(e);
++        }
++        output.writeInt(buffer.length);
++        output.write(buffer);
++    }
++
++    @Override
++    protected Triple createTuple(Node[] ns) {
++        if (ns.length != 3)
++            throw new IllegalArgumentException(String.format(
++                    "Incorrect number of nodes to form a triple - got %d but expected 3", ns.length));
++        return new Triple(ns[0], ns[1], ns[2]);
++    }
++
++    @Override
++    protected Node[] createNodes(Triple tuple) {
++        return new Node[] { tuple.getSubject(), tuple.getPredicate(), tuple.getObject() };
++    }
++}

http://git-wip-us.apache.org/repos/asf/jena/blob/4b5cd267/jena-elephas/jena-elephas-common/src/main/java/org/apache/jena/hadoop/rdf/types/comparators/SimpleBinaryComparator.java
----------------------------------------------------------------------
diff --cc jena-elephas/jena-elephas-common/src/main/java/org/apache/jena/hadoop/rdf/types/comparators/SimpleBinaryComparator.java
index 6c46714,6c46714..cc2924d
--- a/jena-elephas/jena-elephas-common/src/main/java/org/apache/jena/hadoop/rdf/types/comparators/SimpleBinaryComparator.java
+++ b/jena-elephas/jena-elephas-common/src/main/java/org/apache/jena/hadoop/rdf/types/comparators/SimpleBinaryComparator.java
@@@ -1,34 -1,34 +1,34 @@@
--/*
-- * Licensed to the Apache Software Foundation (ASF) under one
-- * or more contributor license agreements.  See the NOTICE file
-- * distributed with this work for additional information
-- * regarding copyright ownership.  The ASF licenses this file
-- * to you under the Apache License, Version 2.0 (the
-- * "License"); you may not use this file except in compliance
-- * with the License.  You may obtain a copy of the License at
-- * 
-- *     http://www.apache.org/licenses/LICENSE-2.0
-- *     
-- * Unless required by applicable law or agreed to in writing, software
-- * distributed under the License is distributed on an "AS IS" BASIS,
-- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- * See the License for the specific language governing permissions and
-- * limitations under the License.
-- */
--
--package org.apache.jena.hadoop.rdf.types.comparators;
--
--import org.apache.hadoop.io.WritableComparator;
--
--/**
-- * A general purpose comparator that may be used with any types which can be
-- * compared directly on their binary encodings
-- */
--public class SimpleBinaryComparator extends WritableComparator {
--
--    @Override
--    public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
--        return WritableComparator.compareBytes(b1, s1, l1, b2, s2, l2);
--    }
--
--}
++/*
++ * Licensed to the Apache Software Foundation (ASF) under one
++ * or more contributor license agreements.  See the NOTICE file
++ * distributed with this work for additional information
++ * regarding copyright ownership.  The ASF licenses this file
++ * to you under the Apache License, Version 2.0 (the
++ * "License"); you may not use this file except in compliance
++ * with the License.  You may obtain a copy of the License at
++ * 
++ *     http://www.apache.org/licenses/LICENSE-2.0
++ *     
++ * Unless required by applicable law or agreed to in writing, software
++ * distributed under the License is distributed on an "AS IS" BASIS,
++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++ * See the License for the specific language governing permissions and
++ * limitations under the License.
++ */
++
++package org.apache.jena.hadoop.rdf.types.comparators;
++
++import org.apache.hadoop.io.WritableComparator;
++
++/**
++ * A general purpose comparator that may be used with any types which can be
++ * compared directly on their binary encodings
++ */
++public class SimpleBinaryComparator extends WritableComparator {
++
++    @Override
++    public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
++        return WritableComparator.compareBytes(b1, s1, l1, b2, s2, l2);
++    }
++
++}

http://git-wip-us.apache.org/repos/asf/jena/blob/4b5cd267/jena-elephas/jena-elephas-common/src/test/java/org/apache/jena/hadoop/rdf/io/types/CharacteristicTests.java
----------------------------------------------------------------------
diff --cc jena-elephas/jena-elephas-common/src/test/java/org/apache/jena/hadoop/rdf/io/types/CharacteristicTests.java
index aded8c4,aded8c4..5edff0f
--- a/jena-elephas/jena-elephas-common/src/test/java/org/apache/jena/hadoop/rdf/io/types/CharacteristicTests.java
+++ b/jena-elephas/jena-elephas-common/src/test/java/org/apache/jena/hadoop/rdf/io/types/CharacteristicTests.java
@@@ -1,21 -1,21 +1,21 @@@
--/*
-- * Licensed to the Apache Software Foundation (ASF) under one
-- * or more contributor license agreements.  See the NOTICE file
-- * distributed with this work for additional information
-- * regarding copyright ownership.  The ASF licenses this file
-- * to you under the Apache License, Version 2.0 (the
-- * "License"); you may not use this file except in compliance
-- * with the License.  You may obtain a copy of the License at
-- * 
-- *     http://www.apache.org/licenses/LICENSE-2.0
-- *     
-- * Unless required by applicable law or agreed to in writing, software
-- * distributed under the License is distributed on an "AS IS" BASIS,
-- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- * See the License for the specific language governing permissions and
-- * limitations under the License.
-- */
--
++/*
++ * Licensed to the Apache Software Foundation (ASF) under one
++ * or more contributor license agreements.  See the NOTICE file
++ * distributed with this work for additional information
++ * regarding copyright ownership.  The ASF licenses this file
++ * to you under the Apache License, Version 2.0 (the
++ * "License"); you may not use this file except in compliance
++ * with the License.  You may obtain a copy of the License at
++ * 
++ *     http://www.apache.org/licenses/LICENSE-2.0
++ *     
++ * Unless required by applicable law or agreed to in writing, software
++ * distributed under the License is distributed on an "AS IS" BASIS,
++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++ * See the License for the specific language governing permissions and
++ * limitations under the License.
++ */
++
  package org.apache.jena.hadoop.rdf.io.types;
  
  import java.io.ByteArrayInputStream;
@@@ -24,11 -24,11 +24,11 @@@ import java.io.DataInputStream
  import java.io.DataOutputStream;
  import java.io.IOException;
  import java.util.Iterator;
--
--import org.apache.jena.graph.Node ;
--import org.apache.jena.graph.NodeFactory ;
--import org.apache.jena.hadoop.rdf.types.CharacteristicSetWritable;
--import org.apache.jena.hadoop.rdf.types.CharacteristicWritable;
++
++import org.apache.jena.graph.Node ;
++import org.apache.jena.graph.NodeFactory ;
++import org.apache.jena.hadoop.rdf.types.CharacteristicSetWritable;
++import org.apache.jena.hadoop.rdf.types.CharacteristicWritable;
  import org.junit.Assert;
  import org.junit.Test;
  

http://git-wip-us.apache.org/repos/asf/jena/blob/4b5cd267/jena-elephas/jena-elephas-common/src/test/java/org/apache/jena/hadoop/rdf/io/types/RdfTypesTest.java
----------------------------------------------------------------------
diff --cc jena-elephas/jena-elephas-common/src/test/java/org/apache/jena/hadoop/rdf/io/types/RdfTypesTest.java
index e51ab6a,e51ab6a..bf2dbe9
--- a/jena-elephas/jena-elephas-common/src/test/java/org/apache/jena/hadoop/rdf/io/types/RdfTypesTest.java
+++ b/jena-elephas/jena-elephas-common/src/test/java/org/apache/jena/hadoop/rdf/io/types/RdfTypesTest.java
@@@ -1,401 -1,401 +1,401 @@@
--/*
-- * Licensed to the Apache Software Foundation (ASF) under one
-- * or more contributor license agreements.  See the NOTICE file
-- * distributed with this work for additional information
-- * regarding copyright ownership.  The ASF licenses this file
-- * to you under the Apache License, Version 2.0 (the
-- * "License"); you may not use this file except in compliance
-- * with the License.  You may obtain a copy of the License at
-- * 
-- *     http://www.apache.org/licenses/LICENSE-2.0
-- *     
-- * Unless required by applicable law or agreed to in writing, software
-- * distributed under the License is distributed on an "AS IS" BASIS,
-- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- * See the License for the specific language governing permissions and
-- * limitations under the License.
-- */
--
--package org.apache.jena.hadoop.rdf.io.types;
--
--import java.io.* ;
--
--import org.apache.hadoop.io.WritableComparable;
--import org.apache.jena.atlas.lib.tuple.Tuple ;
--import static org.apache.jena.atlas.lib.tuple.TupleFactory.tuple ;
--import org.apache.jena.datatypes.xsd.XSDDatatype ;
--import org.apache.jena.graph.Node ;
--import org.apache.jena.graph.NodeFactory ;
--import org.apache.jena.graph.Triple ;
--import org.apache.jena.hadoop.rdf.types.NodeTupleWritable;
--import org.apache.jena.hadoop.rdf.types.NodeWritable;
--import org.apache.jena.hadoop.rdf.types.QuadWritable;
--import org.apache.jena.hadoop.rdf.types.TripleWritable;
--import org.apache.jena.sparql.core.Quad ;
--import org.junit.Assert;
--import org.junit.Ignore;
--import org.junit.Test;
--import org.slf4j.Logger;
--import org.slf4j.LoggerFactory;
--
--/**
-- * Tests for the various RDF types defined by the
-- * {@link org.apache.jena.hadoop.rdf.types} package
-- * 
-- * 
-- * 
-- */
--public class RdfTypesTest {
--
--    private static final Logger LOG = LoggerFactory.getLogger(RdfTypesTest.class);
--
--    private ByteArrayOutputStream outputStream;
--    private ByteArrayInputStream inputStream;
--
--    /**
--     * Prepare for output
--     * 
--     * @return Data output
--     */
--    private DataOutput prepareOutput() {
--        this.outputStream = new ByteArrayOutputStream();
--        return new DataOutputStream(this.outputStream);
--    }
--
--    /**
--     * Prepare for input from the previously written output
--     * 
--     * @return Data Input
--     */
--    private DataInput prepareInput() {
--        this.inputStream = new ByteArrayInputStream(this.outputStream.toByteArray());
--        return new DataInputStream(this.inputStream);
--    }
--
--    /**
--     * Prepare for input from the given data
--     * 
--     * @param data
--     *            Data
--     * @return Data Input
--     */
--    @SuppressWarnings("unused")
--    private DataInput prepareInput(byte[] data) {
--        this.inputStream = new ByteArrayInputStream(data);
--        return new DataInputStream(this.inputStream);
--    }
--
--    @SuppressWarnings({ "unchecked", "rawtypes" })
--    private <T extends WritableComparable> void testWriteRead(T writable, T expected) throws IOException, InstantiationException, IllegalAccessException,
--            ClassNotFoundException {
--        // Write out data
--        DataOutput output = this.prepareOutput();
--        writable.write(output);
--
--        // Read back in data
--        DataInput input = this.prepareInput();
--        T actual = (T) Class.forName(writable.getClass().getName()).newInstance();
--        actual.readFields(input);
--
--        LOG.debug("Original = " + writable.toString());
--        LOG.debug("Round Tripped = " + actual.toString());
--
--        // Check equivalent
--        Assert.assertEquals(0, expected.compareTo(actual));
--    }
--    
--    /**
--     * Basic node writable round tripping test
--     * 
--     * @throws IOException
--     * @throws InstantiationException
--     * @throws IllegalAccessException
--     * @throws ClassNotFoundException
--     */
--    @Test
--    public void node_writable_null() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
--        Node n = null;
--        NodeWritable nw = new NodeWritable(n);
--        testWriteRead(nw, nw);
--    }
--    
--    /**
--     * Basic node writable round tripping test
--     * 
--     * @throws IOException
--     * @throws InstantiationException
--     * @throws IllegalAccessException
--     * @throws ClassNotFoundException
--     */
--    @Test
--    @Ignore
--    public void node_writable_variable_01() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
--        Node n = NodeFactory.createVariable("x");
--        NodeWritable nw = new NodeWritable(n);
--        testWriteRead(nw, nw);
--    }
--    
--    /**
--     * Basic node writable round tripping test
--     * 
--     * @throws IOException
--     * @throws InstantiationException
--     * @throws IllegalAccessException
--     * @throws ClassNotFoundException
--     */
--    @Test
--    @Ignore
--    public void node_writable_variable_02() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
--        Node n = NodeFactory.createVariable("really-log-variable-name-asddsfr4545egfdgdfgfdgdtgvdg-dfgfdgdfgdfgdfg4-dfvdfgdfgdfgfdgfdgdfgdfgfdg");
--        NodeWritable nw = new NodeWritable(n);
--        testWriteRead(nw, nw);
--    }
--
--    /**
--     * Basic node writable round tripping test
--     * 
--     * @throws IOException
--     * @throws InstantiationException
--     * @throws IllegalAccessException
--     * @throws ClassNotFoundException
--     */
--    @Test
--    public void node_writable_uri_01() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
--        Node n = NodeFactory.createURI("http://example.org");
--        NodeWritable nw = new NodeWritable(n);
--        testWriteRead(nw, nw);
--    }
--
--    /**
--     * Basic node writable round tripping test
--     * 
--     * @throws IOException
--     * @throws InstantiationException
--     * @throws IllegalAccessException
--     * @throws ClassNotFoundException
--     */
--    @Test
--    public void node_writable_uri_02() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
--        Node n = NodeFactory.createURI("http://user:password@example.org/some/path?key=value#id");
--        NodeWritable nw = new NodeWritable(n);
--        testWriteRead(nw, nw);
--    }
--
--    /**
--     * Basic node writable round tripping test
--     * 
--     * @throws IOException
--     * @throws InstantiationException
--     * @throws IllegalAccessException
--     * @throws ClassNotFoundException
--     */
--    @Test
--    public void node_writable_literal_01() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
--        Node n = NodeFactory.createLiteral("simple");
--        NodeWritable nw = new NodeWritable(n);
--        testWriteRead(nw, nw);
--    }
--
--    /**
--     * Basic node writable round tripping test
--     * 
--     * @throws IOException
--     * @throws InstantiationException
--     * @throws IllegalAccessException
--     * @throws ClassNotFoundException
--     */
--    @Test
--    public void node_writable_literal_02() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
--        Node n = NodeFactory.createLiteral("language", "en", null);
--        NodeWritable nw = new NodeWritable(n);
--        testWriteRead(nw, nw);
--    }
--
--    /**
--     * Basic node writable round tripping test
--     * 
--     * @throws IOException
--     * @throws InstantiationException
--     * @throws IllegalAccessException
--     * @throws ClassNotFoundException
--     */
--    @Test
--    public void node_writable_literal_03() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
--        Node n = NodeFactory.createLiteral("string", XSDDatatype.XSDstring);
--        NodeWritable nw = new NodeWritable(n);
--        testWriteRead(nw, nw);
--    }
--
--    /**
--     * Basic node writable round tripping test
--     * 
--     * @throws IOException
--     * @throws InstantiationException
--     * @throws IllegalAccessException
--     * @throws ClassNotFoundException
--     */
--    @Test
--    public void node_writable_literal_04() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
--        Node n = NodeFactory.createLiteral("1234", XSDDatatype.XSDinteger);
--        NodeWritable nw = new NodeWritable(n);
--        testWriteRead(nw, nw);
--    }
--
--    /**
--     * Basic node writable round tripping test
--     * 
--     * @throws IOException
--     * @throws InstantiationException
--     * @throws IllegalAccessException
--     * @throws ClassNotFoundException
--     */
--    @Test
--    public void node_writable_literal_05() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
--        Node n = NodeFactory.createLiteral("123.4", XSDDatatype.XSDdecimal);
--        NodeWritable nw = new NodeWritable(n);
--        testWriteRead(nw, nw);
--    }
--
--    /**
--     * Basic node writable round tripping test
--     * 
--     * @throws IOException
--     * @throws InstantiationException
--     * @throws IllegalAccessException
--     * @throws ClassNotFoundException
--     */
--    @Test
--    public void node_writable_literal_06() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
--        Node n = NodeFactory.createLiteral("12.3e4", XSDDatatype.XSDdouble);
--        NodeWritable nw = new NodeWritable(n);
--        testWriteRead(nw, nw);
--    }
--
--    /**
--     * Basic node writable round tripping test
--     * 
--     * @throws IOException
--     * @throws InstantiationException
--     * @throws IllegalAccessException
--     * @throws ClassNotFoundException
--     */
--    @Test
--    public void node_writable_literal_07() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
--        Node n = NodeFactory.createLiteral("true", XSDDatatype.XSDboolean);
--        NodeWritable nw = new NodeWritable(n);
--        testWriteRead(nw, nw);
--    }
--
--    /**
--     * Basic node writable round tripping test
--     * 
--     * @throws IOException
--     * @throws InstantiationException
--     * @throws IllegalAccessException
--     * @throws ClassNotFoundException
--     */
--    @Test
--    public void node_writable_bnode_01() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
--        Node n = NodeFactory.createBlankNode();
--        NodeWritable nw = new NodeWritable(n);
--        testWriteRead(nw, nw);
--    }
--
--    /**
--     * Basic node writable round tripping test
--     * 
--     * @throws IOException
--     * @throws InstantiationException
--     * @throws IllegalAccessException
--     * @throws ClassNotFoundException
--     */
--    @Test
--    public void node_writable_bnode_02() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
--        Node n = NodeFactory.createBlankNode();
--        NodeWritable nw = new NodeWritable(n);
--        testWriteRead(nw, nw);
--        NodeWritable nw2 = new NodeWritable(n);
--        testWriteRead(nw2, nw2);
--
--        Assert.assertEquals(0, nw.compareTo(nw2));
--    }
--
--    /**
--     * Basic triple writable round tripping test
--     * 
--     * @throws IOException
--     * @throws InstantiationException
--     * @throws IllegalAccessException
--     * @throws ClassNotFoundException
--     */
--    @Test
--    public void triple_writable_01() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
--        Triple t = new Triple(NodeFactory.createURI("http://example"), NodeFactory.createURI("http://predicate"), NodeFactory.createLiteral("value"));
--        TripleWritable tw = new TripleWritable(t);
--        testWriteRead(tw, tw);
--    }
--    
--    /**
--     * Basic triple writable round tripping test
--     * 
--     * @throws IOException
--     * @throws InstantiationException
--     * @throws IllegalAccessException
--     * @throws ClassNotFoundException
--     */
--    @Test
--    public void triple_writable_02() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
--        Triple t = new Triple(NodeFactory.createBlankNode(), NodeFactory.createURI("http://predicate"), NodeFactory.createLiteral("value"));
--        TripleWritable tw = new TripleWritable(t);
--        testWriteRead(tw, tw);
--    }
--
--    /**
--     * Basic quad writable round tripping test
--     * 
--     * @throws IOException
--     * @throws InstantiationException
--     * @throws IllegalAccessException
--     * @throws ClassNotFoundException
--     */
--    @Test
--    public void quad_writable_01() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
--        Quad q = new Quad(Quad.defaultGraphNodeGenerated, NodeFactory.createURI("http://example"), NodeFactory.createURI("http://predicate"),
--                NodeFactory.createLiteral("value"));
--        QuadWritable qw = new QuadWritable(q);
--        testWriteRead(qw, qw);
--    }
--    
--    /**
--     * Basic quad writable round tripping test
--     * 
--     * @throws IOException
--     * @throws InstantiationException
--     * @throws IllegalAccessException
--     * @throws ClassNotFoundException
--     */
--    @Test
--    public void quad_writable_02() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
--        Quad q = new Quad(Quad.defaultGraphNodeGenerated, NodeFactory.createBlankNode(), NodeFactory.createURI("http://predicate"),
--                NodeFactory.createLiteral("value"));
--        QuadWritable qw = new QuadWritable(q);
--        testWriteRead(qw, qw);
--    }
--
--    /**
--     * Basic tuple writable round tripping test
--     * 
--     * @throws IOException
--     * @throws InstantiationException
--     * @throws IllegalAccessException
--     * @throws ClassNotFoundException
--     */
--    @Test
--    public void tuple_writable_01() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
--        Tuple<Node> t = tuple(NodeFactory.createURI("http://one"), NodeFactory.createURI("http://two"),
--                              NodeFactory.createLiteral("value"),
--                              NodeFactory.createLiteral("foo"), NodeFactory.createURI("http://three"));
--        NodeTupleWritable tw = new NodeTupleWritable(t);
--        testWriteRead(tw, tw);
--    }
--}
++/*
++ * Licensed to the Apache Software Foundation (ASF) under one
++ * or more contributor license agreements.  See the NOTICE file
++ * distributed with this work for additional information
++ * regarding copyright ownership.  The ASF licenses this file
++ * to you under the Apache License, Version 2.0 (the
++ * "License"); you may not use this file except in compliance
++ * with the License.  You may obtain a copy of the License at
++ * 
++ *     http://www.apache.org/licenses/LICENSE-2.0
++ *     
++ * Unless required by applicable law or agreed to in writing, software
++ * distributed under the License is distributed on an "AS IS" BASIS,
++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++ * See the License for the specific language governing permissions and
++ * limitations under the License.
++ */
++
++package org.apache.jena.hadoop.rdf.io.types;
++
++import java.io.* ;
++
++import org.apache.hadoop.io.WritableComparable;
++import org.apache.jena.atlas.lib.tuple.Tuple ;
++import static org.apache.jena.atlas.lib.tuple.TupleFactory.tuple ;
++import org.apache.jena.datatypes.xsd.XSDDatatype ;
++import org.apache.jena.graph.Node ;
++import org.apache.jena.graph.NodeFactory ;
++import org.apache.jena.graph.Triple ;
++import org.apache.jena.hadoop.rdf.types.NodeTupleWritable;
++import org.apache.jena.hadoop.rdf.types.NodeWritable;
++import org.apache.jena.hadoop.rdf.types.QuadWritable;
++import org.apache.jena.hadoop.rdf.types.TripleWritable;
++import org.apache.jena.sparql.core.Quad ;
++import org.junit.Assert;
++import org.junit.Ignore;
++import org.junit.Test;
++import org.slf4j.Logger;
++import org.slf4j.LoggerFactory;
++
++/**
++ * Tests for the various RDF types defined by the
++ * {@link org.apache.jena.hadoop.rdf.types} package
++ * 
++ * 
++ * 
++ */
++public class RdfTypesTest {
++
++    private static final Logger LOG = LoggerFactory.getLogger(RdfTypesTest.class);
++
++    private ByteArrayOutputStream outputStream;
++    private ByteArrayInputStream inputStream;
++
++    /**
++     * Prepare for output
++     * 
++     * @return Data output
++     */
++    private DataOutput prepareOutput() {
++        this.outputStream = new ByteArrayOutputStream();
++        return new DataOutputStream(this.outputStream);
++    }
++
++    /**
++     * Prepare for input from the previously written output
++     * 
++     * @return Data Input
++     */
++    private DataInput prepareInput() {
++        this.inputStream = new ByteArrayInputStream(this.outputStream.toByteArray());
++        return new DataInputStream(this.inputStream);
++    }
++
++    /**
++     * Prepare for input from the given data
++     * 
++     * @param data
++     *            Data
++     * @return Data Input
++     */
++    @SuppressWarnings("unused")
++    private DataInput prepareInput(byte[] data) {
++        this.inputStream = new ByteArrayInputStream(data);
++        return new DataInputStream(this.inputStream);
++    }
++
++    @SuppressWarnings({ "unchecked", "rawtypes" })
++    private <T extends WritableComparable> void testWriteRead(T writable, T expected) throws IOException, InstantiationException, IllegalAccessException,
++            ClassNotFoundException {
++        // Write out data
++        DataOutput output = this.prepareOutput();
++        writable.write(output);
++
++        // Read back in data
++        DataInput input = this.prepareInput();
++        T actual = (T) Class.forName(writable.getClass().getName()).newInstance();
++        actual.readFields(input);
++
++        LOG.debug("Original = " + writable.toString());
++        LOG.debug("Round Tripped = " + actual.toString());
++
++        // Check equivalent
++        Assert.assertEquals(0, expected.compareTo(actual));
++    }
++    
++    /**
++     * Basic node writable round tripping test
++     * 
++     * @throws IOException
++     * @throws InstantiationException
++     * @throws IllegalAccessException
++     * @throws ClassNotFoundException
++     */
++    @Test
++    public void node_writable_null() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
++        Node n = null;
++        NodeWritable nw = new NodeWritable(n);
++        testWriteRead(nw, nw);
++    }
++    
++    /**
++     * Basic node writable round tripping test
++     * 
++     * @throws IOException
++     * @throws InstantiationException
++     * @throws IllegalAccessException
++     * @throws ClassNotFoundException
++     */
++    @Test
++    @Ignore
++    public void node_writable_variable_01() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
++        Node n = NodeFactory.createVariable("x");
++        NodeWritable nw = new NodeWritable(n);
++        testWriteRead(nw, nw);
++    }
++    
++    /**
++     * Basic node writable round tripping test
++     * 
++     * @throws IOException
++     * @throws InstantiationException
++     * @throws IllegalAccessException
++     * @throws ClassNotFoundException
++     */
++    @Test
++    @Ignore
++    public void node_writable_variable_02() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
++        Node n = NodeFactory.createVariable("really-log-variable-name-asddsfr4545egfdgdfgfdgdtgvdg-dfgfdgdfgdfgdfg4-dfvdfgdfgdfgfdgfdgdfgdfgfdg");
++        NodeWritable nw = new NodeWritable(n);
++        testWriteRead(nw, nw);
++    }
++
++    /**
++     * Basic node writable round tripping test
++     * 
++     * @throws IOException
++     * @throws InstantiationException
++     * @throws IllegalAccessException
++     * @throws ClassNotFoundException
++     */
++    @Test
++    public void node_writable_uri_01() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
++        Node n = NodeFactory.createURI("http://example.org");
++        NodeWritable nw = new NodeWritable(n);
++        testWriteRead(nw, nw);
++    }
++
++    /**
++     * Basic node writable round tripping test
++     * 
++     * @throws IOException
++     * @throws InstantiationException
++     * @throws IllegalAccessException
++     * @throws ClassNotFoundException
++     */
++    @Test
++    public void node_writable_uri_02() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
++        Node n = NodeFactory.createURI("http://user:password@example.org/some/path?key=value#id");
++        NodeWritable nw = new NodeWritable(n);
++        testWriteRead(nw, nw);
++    }
++
++    /**
++     * Basic node writable round tripping test
++     * 
++     * @throws IOException
++     * @throws InstantiationException
++     * @throws IllegalAccessException
++     * @throws ClassNotFoundException
++     */
++    @Test
++    public void node_writable_literal_01() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
++        Node n = NodeFactory.createLiteral("simple");
++        NodeWritable nw = new NodeWritable(n);
++        testWriteRead(nw, nw);
++    }
++
++    /**
++     * Basic node writable round tripping test
++     * 
++     * @throws IOException
++     * @throws InstantiationException
++     * @throws IllegalAccessException
++     * @throws ClassNotFoundException
++     */
++    @Test
++    public void node_writable_literal_02() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
++        Node n = NodeFactory.createLiteral("language", "en", null);
++        NodeWritable nw = new NodeWritable(n);
++        testWriteRead(nw, nw);
++    }
++
++    /**
++     * Basic node writable round tripping test
++     * 
++     * @throws IOException
++     * @throws InstantiationException
++     * @throws IllegalAccessException
++     * @throws ClassNotFoundException
++     */
++    @Test
++    public void node_writable_literal_03() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
++        Node n = NodeFactory.createLiteral("string", XSDDatatype.XSDstring);
++        NodeWritable nw = new NodeWritable(n);
++        testWriteRead(nw, nw);
++    }
++
++    /**
++     * Basic node writable round tripping test
++     * 
++     * @throws IOException
++     * @throws InstantiationException
++     * @throws IllegalAccessException
++     * @throws ClassNotFoundException
++     */
++    @Test
++    public void node_writable_literal_04() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
++        Node n = NodeFactory.createLiteral("1234", XSDDatatype.XSDinteger);
++        NodeWritable nw = new NodeWritable(n);
++        testWriteRead(nw, nw);
++    }
++
++    /**
++     * Basic node writable round tripping test
++     * 
++     * @throws IOException
++     * @throws InstantiationException
++     * @throws IllegalAccessException
++     * @throws ClassNotFoundException
++     */
++    @Test
++    public void node_writable_literal_05() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
++        Node n = NodeFactory.createLiteral("123.4", XSDDatatype.XSDdecimal);
++        NodeWritable nw = new NodeWritable(n);
++        testWriteRead(nw, nw);
++    }
++
++    /**
++     * Basic node writable round tripping test
++     * 
++     * @throws IOException
++     * @throws InstantiationException
++     * @throws IllegalAccessException
++     * @throws ClassNotFoundException
++     */
++    @Test
++    public void node_writable_literal_06() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
++        Node n = NodeFactory.createLiteral("12.3e4", XSDDatatype.XSDdouble);
++        NodeWritable nw = new NodeWritable(n);
++        testWriteRead(nw, nw);
++    }
++
++    /**
++     * Basic node writable round tripping test
++     * 
++     * @throws IOException
++     * @throws InstantiationException
++     * @throws IllegalAccessException
++     * @throws ClassNotFoundException
++     */
++    @Test
++    public void node_writable_literal_07() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
++        Node n = NodeFactory.createLiteral("true", XSDDatatype.XSDboolean);
++        NodeWritable nw = new NodeWritable(n);
++        testWriteRead(nw, nw);
++    }
++
++    /**
++     * Basic node writable round tripping test
++     * 
++     * @throws IOException
++     * @throws InstantiationException
++     * @throws IllegalAccessException
++     * @throws ClassNotFoundException
++     */
++    @Test
++    public void node_writable_bnode_01() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
++        Node n = NodeFactory.createBlankNode();
++        NodeWritable nw = new NodeWritable(n);
++        testWriteRead(nw, nw);
++    }
++
++    /**
++     * Basic node writable round tripping test
++     * 
++     * @throws IOException
++     * @throws InstantiationException
++     * @throws IllegalAccessException
++     * @throws ClassNotFoundException
++     */
++    @Test
++    public void node_writable_bnode_02() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
++        Node n = NodeFactory.createBlankNode();
++        NodeWritable nw = new NodeWritable(n);
++        testWriteRead(nw, nw);
++        NodeWritable nw2 = new NodeWritable(n);
++        testWriteRead(nw2, nw2);
++
++        Assert.assertEquals(0, nw.compareTo(nw2));
++    }
++
++    /**
++     * Basic triple writable round tripping test
++     * 
++     * @throws IOException
++     * @throws InstantiationException
++     * @throws IllegalAccessException
++     * @throws ClassNotFoundException
++     */
++    @Test
++    public void triple_writable_01() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
++        Triple t = new Triple(NodeFactory.createURI("http://example"), NodeFactory.createURI("http://predicate"), NodeFactory.createLiteral("value"));
++        TripleWritable tw = new TripleWritable(t);
++        testWriteRead(tw, tw);
++    }
++    
++    /**
++     * Basic triple writable round tripping test
++     * 
++     * @throws IOException
++     * @throws InstantiationException
++     * @throws IllegalAccessException
++     * @throws ClassNotFoundException
++     */
++    @Test
++    public void triple_writable_02() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
++        Triple t = new Triple(NodeFactory.createBlankNode(), NodeFactory.createURI("http://predicate"), NodeFactory.createLiteral("value"));
++        TripleWritable tw = new TripleWritable(t);
++        testWriteRead(tw, tw);
++    }
++
++    /**
++     * Basic quad writable round tripping test
++     * 
++     * @throws IOException
++     * @throws InstantiationException
++     * @throws IllegalAccessException
++     * @throws ClassNotFoundException
++     */
++    @Test
++    public void quad_writable_01() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
++        Quad q = new Quad(Quad.defaultGraphNodeGenerated, NodeFactory.createURI("http://example"), NodeFactory.createURI("http://predicate"),
++                NodeFactory.createLiteral("value"));
++        QuadWritable qw = new QuadWritable(q);
++        testWriteRead(qw, qw);
++    }
++    
++    /**
++     * Basic quad writable round tripping test
++     * 
++     * @throws IOException
++     * @throws InstantiationException
++     * @throws IllegalAccessException
++     * @throws ClassNotFoundException
++     */
++    @Test
++    public void quad_writable_02() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
++        Quad q = new Quad(Quad.defaultGraphNodeGenerated, NodeFactory.createBlankNode(), NodeFactory.createURI("http://predicate"),
++                NodeFactory.createLiteral("value"));
++        QuadWritable qw = new QuadWritable(q);
++        testWriteRead(qw, qw);
++    }
++
++    /**
++     * Basic tuple writable round tripping test
++     * 
++     * @throws IOException
++     * @throws InstantiationException
++     * @throws IllegalAccessException
++     * @throws ClassNotFoundException
++     */
++    @Test
++    public void tuple_writable_01() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException {
++        Tuple<Node> t = tuple(NodeFactory.createURI("http://one"), NodeFactory.createURI("http://two"),
++                              NodeFactory.createLiteral("value"),
++                              NodeFactory.createLiteral("foo"), NodeFactory.createURI("http://three"));
++        NodeTupleWritable tw = new NodeTupleWritable(t);
++        testWriteRead(tw, tw);
++    }
++}

http://git-wip-us.apache.org/repos/asf/jena/blob/4b5cd267/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/HadoopIOConstants.java
----------------------------------------------------------------------
diff --cc jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/HadoopIOConstants.java
index 5c1b41c,5c1b41c..e2cc847
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/HadoopIOConstants.java
+++ b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/HadoopIOConstants.java
@@@ -1,21 -1,21 +1,21 @@@
--/*
-- * Licensed to the Apache Software Foundation (ASF) under one
-- * or more contributor license agreements.  See the NOTICE file
-- * distributed with this work for additional information
-- * regarding copyright ownership.  The ASF licenses this file
-- * to you under the Apache License, Version 2.0 (the
-- * "License"); you may not use this file except in compliance
-- * with the License.  You may obtain a copy of the License at
-- * 
-- *     http://www.apache.org/licenses/LICENSE-2.0
-- *     
-- * Unless required by applicable law or agreed to in writing, software
-- * distributed under the License is distributed on an "AS IS" BASIS,
-- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- * See the License for the specific language governing permissions and
-- * limitations under the License.
-- */
--
++/*
++ * Licensed to the Apache Software Foundation (ASF) under one
++ * or more contributor license agreements.  See the NOTICE file
++ * distributed with this work for additional information
++ * regarding copyright ownership.  The ASF licenses this file
++ * to you under the Apache License, Version 2.0 (the
++ * "License"); you may not use this file except in compliance
++ * with the License.  You may obtain a copy of the License at
++ * 
++ *     http://www.apache.org/licenses/LICENSE-2.0
++ *     
++ * Unless required by applicable law or agreed to in writing, software
++ * distributed under the License is distributed on an "AS IS" BASIS,
++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++ * See the License for the specific language governing permissions and
++ * limitations under the License.
++ */
++
  package org.apache.jena.hadoop.rdf.io;
  
  /**

http://git-wip-us.apache.org/repos/asf/jena/blob/4b5cd267/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/RdfIOConstants.java
----------------------------------------------------------------------
diff --cc jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/RdfIOConstants.java
index 27c2bb2,27c2bb2..dbe16ff
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/RdfIOConstants.java
+++ b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/RdfIOConstants.java
@@@ -1,81 -1,81 +1,81 @@@
--/*
-- * Licensed to the Apache Software Foundation (ASF) under one
-- * or more contributor license agreements.  See the NOTICE file
-- * distributed with this work for additional information
-- * regarding copyright ownership.  The ASF licenses this file
-- * to you under the Apache License, Version 2.0 (the
-- * "License"); you may not use this file except in compliance
-- * with the License.  You may obtain a copy of the License at
-- * 
-- *     http://www.apache.org/licenses/LICENSE-2.0
-- *     
-- * Unless required by applicable law or agreed to in writing, software
-- * distributed under the License is distributed on an "AS IS" BASIS,
-- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- * See the License for the specific language governing permissions and
-- * limitations under the License.
-- */
--
--package org.apache.jena.hadoop.rdf.io;
--
--import java.io.IOException;
--
--/**
-- * RDF IO related constants
-- * 
-- * 
-- * 
-- */
--public class RdfIOConstants {
--
--    /**
--     * Private constructor prevents instantiation
--     */
--    private RdfIOConstants() {
--    }
--
--    /**
--     * Configuration key used to set whether bad tuples are ignored. This is the
--     * default behaviour, when explicitly set to {@code false} bad tuples will
--     * result in {@link IOException} being thrown by the relevant record
--     * readers.
--     */
--    public static final String INPUT_IGNORE_BAD_TUPLES = "rdf.io.input.ignore-bad-tuples";
--
--    /**
--     * Configuration key used to set the batch size used for RDF output formats
--     * that take a batched writing approach. Default value is given by the
--     * constant {@link #DEFAULT_OUTPUT_BATCH_SIZE}.
--     */
--    public static final String OUTPUT_BATCH_SIZE = "rdf.io.output.batch-size";
--
--    /**
--     * Default batch size for batched output formats
--     */
--    public static final long DEFAULT_OUTPUT_BATCH_SIZE = 10000;
--
--    /**
--     * Configuration key used to control behaviour with regards to how blank
--     * nodes are handled.
--     * <p>
--     * The default behaviour is that blank nodes are file scoped which is what
--     * the RDF specifications require.
--     * </p>
--     * <p>
--     * However in the case of a multi-stage pipeline this behaviour can cause
--     * blank nodes to diverge over several jobs and introduce spurious blank
--     * nodes over time. This is described in <a
--     * href="https://issues.apache.org/jira/browse/JENA-820">JENA-820</a> and
--     * enabling this flag for jobs in your pipeline allow you to work around
--     * this problem.
--     * </p>
--     * <h3>Warning</h3> You should only enable this flag for jobs that take in
--     * RDF output originating from previous jobs since our normal blank node
--     * allocation policy ensures that blank nodes will be file scoped and unique
--     * over all files (barring unfortunate hasing collisions). If you enable
--     * this for jobs that take in RDF originating from other sources you may
--     * incorrectly conflate blank nodes that are supposed to distinct and
--     * separate nodes.
--     */
--    public static final String GLOBAL_BNODE_IDENTITY = "rdf.io.input.bnodes.global-identity";
--}
++/*
++ * Licensed to the Apache Software Foundation (ASF) under one
++ * or more contributor license agreements.  See the NOTICE file
++ * distributed with this work for additional information
++ * regarding copyright ownership.  The ASF licenses this file
++ * to you under the Apache License, Version 2.0 (the
++ * "License"); you may not use this file except in compliance
++ * with the License.  You may obtain a copy of the License at
++ * 
++ *     http://www.apache.org/licenses/LICENSE-2.0
++ *     
++ * Unless required by applicable law or agreed to in writing, software
++ * distributed under the License is distributed on an "AS IS" BASIS,
++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++ * See the License for the specific language governing permissions and
++ * limitations under the License.
++ */
++
++package org.apache.jena.hadoop.rdf.io;
++
++import java.io.IOException;
++
++/**
++ * RDF IO related constants
++ * 
++ * 
++ * 
++ */
++public class RdfIOConstants {
++
++    /**
++     * Private constructor prevents instantiation
++     */
++    private RdfIOConstants() {
++    }
++
++    /**
++     * Configuration key used to set whether bad tuples are ignored. This is the
++     * default behaviour, when explicitly set to {@code false} bad tuples will
++     * result in {@link IOException} being thrown by the relevant record
++     * readers.
++     */
++    public static final String INPUT_IGNORE_BAD_TUPLES = "rdf.io.input.ignore-bad-tuples";
++
++    /**
++     * Configuration key used to set the batch size used for RDF output formats
++     * that take a batched writing approach. Default value is given by the
++     * constant {@link #DEFAULT_OUTPUT_BATCH_SIZE}.
++     */
++    public static final String OUTPUT_BATCH_SIZE = "rdf.io.output.batch-size";
++
++    /**
++     * Default batch size for batched output formats
++     */
++    public static final long DEFAULT_OUTPUT_BATCH_SIZE = 10000;
++
++    /**
++     * Configuration key used to control behaviour with regards to how blank
++     * nodes are handled.
++     * <p>
++     * The default behaviour is that blank nodes are file scoped which is what
++     * the RDF specifications require.
++     * </p>
++     * <p>
++     * However in the case of a multi-stage pipeline this behaviour can cause
++     * blank nodes to diverge over several jobs and introduce spurious blank
++     * nodes over time. This is described in <a
++     * href="https://issues.apache.org/jira/browse/JENA-820">JENA-820</a> and
++     * enabling this flag for jobs in your pipeline allow you to work around
++     * this problem.
++     * </p>
++     * <h3>Warning</h3> You should only enable this flag for jobs that take in
++     * RDF output originating from previous jobs since our normal blank node
++     * allocation policy ensures that blank nodes will be file scoped and unique
++     * over all files (barring unfortunate hasing collisions). If you enable
++     * this for jobs that take in RDF originating from other sources you may
++     * incorrectly conflate blank nodes that are supposed to distinct and
++     * separate nodes.
++     */
++    public static final String GLOBAL_BNODE_IDENTITY = "rdf.io.input.bnodes.global-identity";
++}

http://git-wip-us.apache.org/repos/asf/jena/blob/4b5cd267/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/AbstractNLineFileInputFormat.java
----------------------------------------------------------------------
diff --cc jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/AbstractNLineFileInputFormat.java
index 1fcb030,1fcb030..64fdc79
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/AbstractNLineFileInputFormat.java
+++ b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/AbstractNLineFileInputFormat.java
@@@ -1,21 -1,21 +1,21 @@@
--/*
-- * Licensed to the Apache Software Foundation (ASF) under one
-- * or more contributor license agreements.  See the NOTICE file
-- * distributed with this work for additional information
-- * regarding copyright ownership.  The ASF licenses this file
-- * to you under the Apache License, Version 2.0 (the
-- * "License"); you may not use this file except in compliance
-- * with the License.  You may obtain a copy of the License at
-- * 
-- *     http://www.apache.org/licenses/LICENSE-2.0
-- *     
-- * Unless required by applicable law or agreed to in writing, software
-- * distributed under the License is distributed on an "AS IS" BASIS,
-- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- * See the License for the specific language governing permissions and
-- * limitations under the License.
-- */
--
++/*
++ * Licensed to the Apache Software Foundation (ASF) under one
++ * or more contributor license agreements.  See the NOTICE file
++ * distributed with this work for additional information
++ * regarding copyright ownership.  The ASF licenses this file
++ * to you under the Apache License, Version 2.0 (the
++ * "License"); you may not use this file except in compliance
++ * with the License.  You may obtain a copy of the License at
++ * 
++ *     http://www.apache.org/licenses/LICENSE-2.0
++ *     
++ * Unless required by applicable law or agreed to in writing, software
++ * distributed under the License is distributed on an "AS IS" BASIS,
++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++ * See the License for the specific language governing permissions and
++ * limitations under the License.
++ */
++
  package org.apache.jena.hadoop.rdf.io.input;
  
  import java.io.IOException;
@@@ -27,8 -27,8 +27,8 @@@ import org.apache.hadoop.mapreduce.Inpu
  import org.apache.hadoop.mapreduce.JobContext;
  import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
  import org.apache.hadoop.mapreduce.lib.input.NLineInputFormat;
--import org.slf4j.Logger;
--import org.slf4j.LoggerFactory;
++import org.slf4j.Logger;
++import org.slf4j.LoggerFactory;
  
  /**
   * Abstract line based input format that reuses the machinery from
@@@ -41,8 -41,8 +41,8 @@@
   * @param <TValue>
   *            Value type
   */
--public abstract class AbstractNLineFileInputFormat<TKey, TValue> extends FileInputFormat<TKey, TValue> {
--    
++public abstract class AbstractNLineFileInputFormat<TKey, TValue> extends FileInputFormat<TKey, TValue> {
++    
      private static final Logger LOGGER = LoggerFactory.getLogger(AbstractNLineFileInputFormat.class);
  
      /**
@@@ -51,17 -51,17 +51,17 @@@
       * 
       * @see FileInputFormat#getSplits(JobContext)
       */
--    public final List<InputSplit> getSplits(JobContext job) throws IOException {
--        boolean debug = LOGGER.isDebugEnabled();
--        if (debug && FileInputFormat.getInputDirRecursive(job)) {
--            LOGGER.debug("Recursive searching for input data is enabled");
--        }
++    public final List<InputSplit> getSplits(JobContext job) throws IOException {
++        boolean debug = LOGGER.isDebugEnabled();
++        if (debug && FileInputFormat.getInputDirRecursive(job)) {
++            LOGGER.debug("Recursive searching for input data is enabled");
++        }
          
          List<InputSplit> splits = new ArrayList<InputSplit>();
          int numLinesPerSplit = NLineInputFormat.getNumLinesPerSplit(job);
--        for (FileStatus status : listStatus(job)) {
--            if (debug) {
--                LOGGER.debug("Determining how to split input file/directory {}", status.getPath());
++        for (FileStatus status : listStatus(job)) {
++            if (debug) {
++                LOGGER.debug("Determining how to split input file/directory {}", status.getPath());
              }
              splits.addAll(NLineInputFormat.getSplitsForFile(status, job.getConfiguration(), numLinesPerSplit));
          }

http://git-wip-us.apache.org/repos/asf/jena/blob/4b5cd267/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/AbstractWholeFileInputFormat.java
----------------------------------------------------------------------
diff --cc jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/AbstractWholeFileInputFormat.java
index e561cdb,e561cdb..01d78d5
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/AbstractWholeFileInputFormat.java
+++ b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/AbstractWholeFileInputFormat.java
@@@ -1,21 -1,21 +1,21 @@@
--/*
-- * Licensed to the Apache Software Foundation (ASF) under one
-- * or more contributor license agreements.  See the NOTICE file
-- * distributed with this work for additional information
-- * regarding copyright ownership.  The ASF licenses this file
-- * to you under the Apache License, Version 2.0 (the
-- * "License"); you may not use this file except in compliance
-- * with the License.  You may obtain a copy of the License at
-- * 
-- *     http://www.apache.org/licenses/LICENSE-2.0
-- *     
-- * Unless required by applicable law or agreed to in writing, software
-- * distributed under the License is distributed on an "AS IS" BASIS,
-- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- * See the License for the specific language governing permissions and
-- * limitations under the License.
-- */
--
++/*
++ * Licensed to the Apache Software Foundation (ASF) under one
++ * or more contributor license agreements.  See the NOTICE file
++ * distributed with this work for additional information
++ * regarding copyright ownership.  The ASF licenses this file
++ * to you under the Apache License, Version 2.0 (the
++ * "License"); you may not use this file except in compliance
++ * with the License.  You may obtain a copy of the License at
++ * 
++ *     http://www.apache.org/licenses/LICENSE-2.0
++ *     
++ * Unless required by applicable law or agreed to in writing, software
++ * distributed under the License is distributed on an "AS IS" BASIS,
++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++ * See the License for the specific language governing permissions and
++ * limitations under the License.
++ */
++
  package org.apache.jena.hadoop.rdf.io.input;
  
  import org.apache.hadoop.fs.Path;

http://git-wip-us.apache.org/repos/asf/jena/blob/4b5cd267/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/QuadsInputFormat.java
----------------------------------------------------------------------
diff --cc jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/QuadsInputFormat.java
index 7136472,7136472..255fc8e
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/QuadsInputFormat.java
+++ b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/QuadsInputFormat.java
@@@ -1,29 -1,29 +1,29 @@@
--/*
-- * Licensed to the Apache Software Foundation (ASF) under one
-- * or more contributor license agreements.  See the NOTICE file
-- * distributed with this work for additional information
-- * regarding copyright ownership.  The ASF licenses this file
-- * to you under the Apache License, Version 2.0 (the
-- * "License"); you may not use this file except in compliance
-- * with the License.  You may obtain a copy of the License at
-- * 
-- *     http://www.apache.org/licenses/LICENSE-2.0
-- *     
-- * Unless required by applicable law or agreed to in writing, software
-- * distributed under the License is distributed on an "AS IS" BASIS,
-- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- * See the License for the specific language governing permissions and
-- * limitations under the License.
-- */
--
++/*
++ * Licensed to the Apache Software Foundation (ASF) under one
++ * or more contributor license agreements.  See the NOTICE file
++ * distributed with this work for additional information
++ * regarding copyright ownership.  The ASF licenses this file
++ * to you under the Apache License, Version 2.0 (the
++ * "License"); you may not use this file except in compliance
++ * with the License.  You may obtain a copy of the License at
++ * 
++ *     http://www.apache.org/licenses/LICENSE-2.0
++ *     
++ * Unless required by applicable law or agreed to in writing, software
++ * distributed under the License is distributed on an "AS IS" BASIS,
++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++ * See the License for the specific language governing permissions and
++ * limitations under the License.
++ */
++
  package org.apache.jena.hadoop.rdf.io.input;
  
  import org.apache.hadoop.io.LongWritable;
  import org.apache.hadoop.mapreduce.InputSplit;
  import org.apache.hadoop.mapreduce.RecordReader;
  import org.apache.hadoop.mapreduce.TaskAttemptContext;
--import org.apache.jena.hadoop.rdf.io.input.readers.QuadsReader;
--import org.apache.jena.hadoop.rdf.types.QuadWritable;
++import org.apache.jena.hadoop.rdf.io.input.readers.QuadsReader;
++import org.apache.jena.hadoop.rdf.types.QuadWritable;
  
  
  /**

http://git-wip-us.apache.org/repos/asf/jena/blob/4b5cd267/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/TriplesInputFormat.java
----------------------------------------------------------------------
diff --cc jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/TriplesInputFormat.java
index 62144f3,62144f3..4ba5ff1
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/TriplesInputFormat.java
+++ b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/TriplesInputFormat.java
@@@ -1,39 -1,39 +1,39 @@@
--/*
-- * Licensed to the Apache Software Foundation (ASF) under one
-- * or more contributor license agreements.  See the NOTICE file
-- * distributed with this work for additional information
-- * regarding copyright ownership.  The ASF licenses this file
-- * to you under the Apache License, Version 2.0 (the
-- * "License"); you may not use this file except in compliance
-- * with the License.  You may obtain a copy of the License at
-- * 
-- *     http://www.apache.org/licenses/LICENSE-2.0
-- *     
-- * Unless required by applicable law or agreed to in writing, software
-- * distributed under the License is distributed on an "AS IS" BASIS,
-- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- * See the License for the specific language governing permissions and
-- * limitations under the License.
-- */
--
--package org.apache.jena.hadoop.rdf.io.input;
--
--import org.apache.hadoop.io.LongWritable;
--import org.apache.hadoop.mapreduce.InputSplit;
--import org.apache.hadoop.mapreduce.RecordReader;
--import org.apache.hadoop.mapreduce.TaskAttemptContext;
--import org.apache.jena.hadoop.rdf.io.input.readers.TriplesReader;
--import org.apache.jena.hadoop.rdf.types.TripleWritable;
--
--/**
-- * RDF input format that can handle any RDF triples format that ARQ supports
-- * selecting the format to use for each file based upon the file extension
-- */
--public class TriplesInputFormat extends AbstractWholeFileInputFormat<LongWritable, TripleWritable> {
--
--    @Override
--    public RecordReader<LongWritable, TripleWritable> createRecordReader(InputSplit split, TaskAttemptContext context) {
--        return new TriplesReader();
--    }
--
--}
++/*
++ * Licensed to the Apache Software Foundation (ASF) under one
++ * or more contributor license agreements.  See the NOTICE file
++ * distributed with this work for additional information
++ * regarding copyright ownership.  The ASF licenses this file
++ * to you under the Apache License, Version 2.0 (the
++ * "License"); you may not use this file except in compliance
++ * with the License.  You may obtain a copy of the License at
++ * 
++ *     http://www.apache.org/licenses/LICENSE-2.0
++ *     
++ * Unless required by applicable law or agreed to in writing, software
++ * distributed under the License is distributed on an "AS IS" BASIS,
++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++ * See the License for the specific language governing permissions and
++ * limitations under the License.
++ */
++
++package org.apache.jena.hadoop.rdf.io.input;
++
++import org.apache.hadoop.io.LongWritable;
++import org.apache.hadoop.mapreduce.InputSplit;
++import org.apache.hadoop.mapreduce.RecordReader;
++import org.apache.hadoop.mapreduce.TaskAttemptContext;
++import org.apache.jena.hadoop.rdf.io.input.readers.TriplesReader;
++import org.apache.jena.hadoop.rdf.types.TripleWritable;
++
++/**
++ * RDF input format that can handle any RDF triples format that ARQ supports
++ * selecting the format to use for each file based upon the file extension
++ */
++public class TriplesInputFormat extends AbstractWholeFileInputFormat<LongWritable, TripleWritable> {
++
++    @Override
++    public RecordReader<LongWritable, TripleWritable> createRecordReader(InputSplit split, TaskAttemptContext context) {
++        return new TriplesReader();
++    }
++
++}

http://git-wip-us.apache.org/repos/asf/jena/blob/4b5cd267/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/TriplesOrQuadsInputFormat.java
----------------------------------------------------------------------
diff --cc jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/TriplesOrQuadsInputFormat.java
index 50da740,50da740..4ef8656
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/TriplesOrQuadsInputFormat.java
+++ b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/TriplesOrQuadsInputFormat.java
@@@ -1,29 -1,29 +1,29 @@@
--/*
-- * Licensed to the Apache Software Foundation (ASF) under one
-- * or more contributor license agreements.  See the NOTICE file
-- * distributed with this work for additional information
-- * regarding copyright ownership.  The ASF licenses this file
-- * to you under the Apache License, Version 2.0 (the
-- * "License"); you may not use this file except in compliance
-- * with the License.  You may obtain a copy of the License at
-- * 
-- *     http://www.apache.org/licenses/LICENSE-2.0
-- *     
-- * Unless required by applicable law or agreed to in writing, software
-- * distributed under the License is distributed on an "AS IS" BASIS,
-- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- * See the License for the specific language governing permissions and
-- * limitations under the License.
-- */
--
++/*
++ * Licensed to the Apache Software Foundation (ASF) under one
++ * or more contributor license agreements.  See the NOTICE file
++ * distributed with this work for additional information
++ * regarding copyright ownership.  The ASF licenses this file
++ * to you under the Apache License, Version 2.0 (the
++ * "License"); you may not use this file except in compliance
++ * with the License.  You may obtain a copy of the License at
++ * 
++ *     http://www.apache.org/licenses/LICENSE-2.0
++ *     
++ * Unless required by applicable law or agreed to in writing, software
++ * distributed under the License is distributed on an "AS IS" BASIS,
++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++ * See the License for the specific language governing permissions and
++ * limitations under the License.
++ */
++
  package org.apache.jena.hadoop.rdf.io.input;
  
  import org.apache.hadoop.io.LongWritable;
  import org.apache.hadoop.mapreduce.InputSplit;
  import org.apache.hadoop.mapreduce.RecordReader;
  import org.apache.hadoop.mapreduce.TaskAttemptContext;
--import org.apache.jena.hadoop.rdf.io.input.readers.TriplesOrQuadsReader;
--import org.apache.jena.hadoop.rdf.types.QuadWritable;
++import org.apache.jena.hadoop.rdf.io.input.readers.TriplesOrQuadsReader;
++import org.apache.jena.hadoop.rdf.types.QuadWritable;
  
  
  /**

http://git-wip-us.apache.org/repos/asf/jena/blob/4b5cd267/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/nquads/BlockedNQuadsInputFormat.java
----------------------------------------------------------------------
diff --cc jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/nquads/BlockedNQuadsInputFormat.java
index 801f762,801f762..296e4c9
--- a/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/nquads/BlockedNQuadsInputFormat.java
+++ b/jena-elephas/jena-elephas-io/src/main/java/org/apache/jena/hadoop/rdf/io/input/nquads/BlockedNQuadsInputFormat.java
@@@ -1,30 -1,30 +1,30 @@@
--/*
-- * Licensed to the Apache Software Foundation (ASF) under one
-- * or more contributor license agreements.  See the NOTICE file
-- * distributed with this work for additional information
-- * regarding copyright ownership.  The ASF licenses this file
-- * to you under the Apache License, Version 2.0 (the
-- * "License"); you may not use this file except in compliance
-- * with the License.  You may obtain a copy of the License at
-- * 
-- *     http://www.apache.org/licenses/LICENSE-2.0
-- *     
-- * Unless required by applicable law or agreed to in writing, software
-- * distributed under the License is distributed on an "AS IS" BASIS,
-- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- * See the License for the specific language governing permissions and
-- * limitations under the License.
-- */
--
++/*
++ * Licensed to the Apache Software Foundation (ASF) under one
++ * or more contributor license agreements.  See the NOTICE file
++ * distributed with this work for additional information
++ * regarding copyright ownership.  The ASF licenses this file
++ * to you under the Apache License, Version 2.0 (the
++ * "License"); you may not use this file except in compliance
++ * with the License.  You may obtain a copy of the License at
++ * 
++ *     http://www.apache.org/licenses/LICENSE-2.0
++ *     
++ * Unless required by applicable law or agreed to in writing, software
++ * distributed under the License is distributed on an "AS IS" BASIS,
++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++ * See the License for the specific language governing permissions and
++ * limitations under the License.
++ */
++
  package org.apache.jena.hadoop.rdf.io.input.nquads;
  
  import org.apache.hadoop.io.LongWritable;
  import org.apache.hadoop.mapreduce.InputSplit;
  import org.apache.hadoop.mapreduce.RecordReader;
  import org.apache.hadoop.mapreduce.TaskAttemptContext;
--import org.apache.jena.hadoop.rdf.io.input.AbstractNLineFileInputFormat;
--import org.apache.jena.hadoop.rdf.io.input.readers.nquads.BlockedNQuadsReader;
--import org.apache.jena.hadoop.rdf.types.QuadWritable;
++import org.apache.jena.hadoop.rdf.io.input.AbstractNLineFileInputFormat;
++import org.apache.jena.hadoop.rdf.io.input.readers.nquads.BlockedNQuadsReader;
++import org.apache.jena.hadoop.rdf.types.QuadWritable;
  
  
  /**