jena-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From rve...@apache.org
Subject [07/50] [abbrv] Move Jena Hadoop RDF Tools into new location for merging into main git repository
Date Mon, 20 Oct 2014 14:47:29 GMT
http://git-wip-us.apache.org/repos/asf/jena/blob/05c389be/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/compressed/rdfxml/AbstractCompressedRdfXmlInputFormatTests.java
----------------------------------------------------------------------
diff --git a/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/compressed/rdfxml/AbstractCompressedRdfXmlInputFormatTests.java b/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/compressed/rdfxml/AbstractCompressedRdfXmlInputFormatTests.java
new file mode 100644
index 0000000..36b935b
--- /dev/null
+++ b/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/compressed/rdfxml/AbstractCompressedRdfXmlInputFormatTests.java
@@ -0,0 +1,75 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.io.input.compressed.rdfxml;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.compress.CompressionCodec;
+import org.apache.hadoop.mapreduce.InputFormat;
+import org.apache.jena.hadoop.rdf.io.input.RdfXmlInputFormat;
+import org.apache.jena.hadoop.rdf.io.input.compressed.AbstractCompressedWholeFileTripleInputFormatTests;
+import org.apache.jena.hadoop.rdf.types.TripleWritable;
+import org.apache.jena.riot.Lang;
+
+
+/**
+ * Abstract compressed RDF/XML input tests
+ * 
+ * 
+ * 
+ */
+public abstract class AbstractCompressedRdfXmlInputFormatTests extends
+ AbstractCompressedWholeFileTripleInputFormatTests {
+
+    private String ext;
+    private CompressionCodec codec;
+
+    /**
+     * Creates new tests
+     * 
+     * @param ext
+     *            File extension
+     * @param codec
+     *            Compression codec
+     */
+    public AbstractCompressedRdfXmlInputFormatTests(String ext, CompressionCodec codec) {
+        this.ext = ext;
+        this.codec = codec;
+    }
+
+    @Override
+    protected final String getFileExtension() {
+        return this.ext;
+    }
+
+    @Override
+    protected final CompressionCodec getCompressionCodec() {
+        return this.codec;
+    }
+
+    @Override
+    protected final Lang getRdfLanguage() {
+        return Lang.RDFXML;
+    }
+
+    @Override
+    protected final InputFormat<LongWritable, TripleWritable> getInputFormat() {
+        return new RdfXmlInputFormat();
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/jena/blob/05c389be/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/compressed/rdfxml/BZippedRdfXmlInputTest.java
----------------------------------------------------------------------
diff --git a/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/compressed/rdfxml/BZippedRdfXmlInputTest.java b/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/compressed/rdfxml/BZippedRdfXmlInputTest.java
new file mode 100644
index 0000000..30a6c39
--- /dev/null
+++ b/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/compressed/rdfxml/BZippedRdfXmlInputTest.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.io.input.compressed.rdfxml;
+
+import org.apache.hadoop.io.compress.BZip2Codec;
+
+/**
+ * Tests for BZipped RDF/XML input
+ * 
+ * 
+ * 
+ */
+public class BZippedRdfXmlInputTest extends AbstractCompressedRdfXmlInputFormatTests {
+
+    /**
+     * Creates new tests
+     */
+    public BZippedRdfXmlInputTest() {
+        super(".rdf.bz2", new BZip2Codec());
+    }
+}

http://git-wip-us.apache.org/repos/asf/jena/blob/05c389be/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/compressed/rdfxml/DeflatedRdfXmlInputTest.java
----------------------------------------------------------------------
diff --git a/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/compressed/rdfxml/DeflatedRdfXmlInputTest.java b/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/compressed/rdfxml/DeflatedRdfXmlInputTest.java
new file mode 100644
index 0000000..a3d747c
--- /dev/null
+++ b/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/compressed/rdfxml/DeflatedRdfXmlInputTest.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.io.input.compressed.rdfxml;
+
+import org.apache.hadoop.io.compress.DefaultCodec;
+
+/**
+ * Tests for Deflated RDF/XML input
+ * 
+ * 
+ * 
+ */
+public class DeflatedRdfXmlInputTest extends AbstractCompressedRdfXmlInputFormatTests {
+
+    /**
+     * Creates new tests
+     */
+    public DeflatedRdfXmlInputTest() {
+        super(".rdf.deflate", new DefaultCodec());
+    }
+}

http://git-wip-us.apache.org/repos/asf/jena/blob/05c389be/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/compressed/rdfxml/GZippedRdfXmlInputTest.java
----------------------------------------------------------------------
diff --git a/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/compressed/rdfxml/GZippedRdfXmlInputTest.java b/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/compressed/rdfxml/GZippedRdfXmlInputTest.java
new file mode 100644
index 0000000..748785c
--- /dev/null
+++ b/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/compressed/rdfxml/GZippedRdfXmlInputTest.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.io.input.compressed.rdfxml;
+
+import org.apache.hadoop.io.compress.GzipCodec;
+
+/**
+ * Tests for GZipped RDF/XML input
+ * 
+ * 
+ * 
+ */
+public class GZippedRdfXmlInputTest extends AbstractCompressedRdfXmlInputFormatTests {
+
+    /**
+     * Creates new tests
+     */
+    public GZippedRdfXmlInputTest() {
+        super(".rdf.gz", new GzipCodec());
+    }
+}

http://git-wip-us.apache.org/repos/asf/jena/blob/05c389be/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/compressed/trig/AbstractCompressedTriGInputFormatTests.java
----------------------------------------------------------------------
diff --git a/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/compressed/trig/AbstractCompressedTriGInputFormatTests.java b/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/compressed/trig/AbstractCompressedTriGInputFormatTests.java
new file mode 100644
index 0000000..9b92502
--- /dev/null
+++ b/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/compressed/trig/AbstractCompressedTriGInputFormatTests.java
@@ -0,0 +1,75 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.io.input.compressed.trig;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.compress.CompressionCodec;
+import org.apache.hadoop.mapreduce.InputFormat;
+import org.apache.jena.hadoop.rdf.io.input.TriGInputFormat;
+import org.apache.jena.hadoop.rdf.io.input.compressed.AbstractCompressedWholeFileQuadInputFormatTests;
+import org.apache.jena.hadoop.rdf.types.QuadWritable;
+import org.apache.jena.riot.Lang;
+
+
+/**
+ * Abstract compressed Turtle input tests
+ * 
+ * 
+ * 
+ */
+public abstract class AbstractCompressedTriGInputFormatTests extends
+        AbstractCompressedWholeFileQuadInputFormatTests {
+
+    private String ext;
+    private CompressionCodec codec;
+
+    /**
+     * Creates new tests
+     * 
+     * @param ext
+     *            File extension
+     * @param codec
+     *            Compression codec
+     */
+    public AbstractCompressedTriGInputFormatTests(String ext, CompressionCodec codec) {
+        this.ext = ext;
+        this.codec = codec;
+    }
+
+    @Override
+    protected final String getFileExtension() {
+        return this.ext;
+    }
+
+    @Override
+    protected final CompressionCodec getCompressionCodec() {
+        return this.codec;
+    }
+
+    @Override
+    protected final Lang getRdfLanguage() {
+        return Lang.TRIG;
+    }
+
+    @Override
+    protected final InputFormat<LongWritable, QuadWritable> getInputFormat() {
+        return new TriGInputFormat();
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/jena/blob/05c389be/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/compressed/trig/BZippedTriGInputTest.java
----------------------------------------------------------------------
diff --git a/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/compressed/trig/BZippedTriGInputTest.java b/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/compressed/trig/BZippedTriGInputTest.java
new file mode 100644
index 0000000..b2b3c33
--- /dev/null
+++ b/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/compressed/trig/BZippedTriGInputTest.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.io.input.compressed.trig;
+
+import org.apache.hadoop.io.compress.BZip2Codec;
+
+/**
+ * Tests for BZipped TriG input
+ * 
+ * 
+ * 
+ */
+public class BZippedTriGInputTest extends AbstractCompressedTriGInputFormatTests {
+
+    /**
+     * Creates new tests
+     */
+    public BZippedTriGInputTest() {
+        super(".trig.bz2", new BZip2Codec());
+    }
+}

http://git-wip-us.apache.org/repos/asf/jena/blob/05c389be/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/compressed/trig/DeflatedTriGInputTest.java
----------------------------------------------------------------------
diff --git a/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/compressed/trig/DeflatedTriGInputTest.java b/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/compressed/trig/DeflatedTriGInputTest.java
new file mode 100644
index 0000000..c9579a9
--- /dev/null
+++ b/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/compressed/trig/DeflatedTriGInputTest.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.io.input.compressed.trig;
+
+import org.apache.hadoop.io.compress.DefaultCodec;
+
+/**
+ * Tests for Deflated TriG input
+ * 
+ * 
+ * 
+ */
+public class DeflatedTriGInputTest extends AbstractCompressedTriGInputFormatTests {
+
+    /**
+     * Creates new tests
+     */
+    public DeflatedTriGInputTest() {
+        super(".trig.deflate", new DefaultCodec());
+    }
+}

http://git-wip-us.apache.org/repos/asf/jena/blob/05c389be/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/compressed/trig/GZippedTriGInputTest.java
----------------------------------------------------------------------
diff --git a/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/compressed/trig/GZippedTriGInputTest.java b/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/compressed/trig/GZippedTriGInputTest.java
new file mode 100644
index 0000000..c3e4106
--- /dev/null
+++ b/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/compressed/trig/GZippedTriGInputTest.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.io.input.compressed.trig;
+
+import org.apache.hadoop.io.compress.GzipCodec;
+
+/**
+ * Tests for GZipped TriG input
+ * 
+ * 
+ * 
+ */
+public class GZippedTriGInputTest extends AbstractCompressedTriGInputFormatTests {
+
+    /**
+     * Creates new tests
+     */
+    public GZippedTriGInputTest() {
+        super(".trig.gz", new GzipCodec());
+    }
+}

http://git-wip-us.apache.org/repos/asf/jena/blob/05c389be/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/compressed/turtle/AbstractCompressedTurtleInputFormatTests.java
----------------------------------------------------------------------
diff --git a/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/compressed/turtle/AbstractCompressedTurtleInputFormatTests.java b/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/compressed/turtle/AbstractCompressedTurtleInputFormatTests.java
new file mode 100644
index 0000000..c6be25c
--- /dev/null
+++ b/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/compressed/turtle/AbstractCompressedTurtleInputFormatTests.java
@@ -0,0 +1,75 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.io.input.compressed.turtle;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.compress.CompressionCodec;
+import org.apache.hadoop.mapreduce.InputFormat;
+import org.apache.jena.hadoop.rdf.io.input.TurtleInputFormat;
+import org.apache.jena.hadoop.rdf.io.input.compressed.AbstractCompressedWholeFileTripleInputFormatTests;
+import org.apache.jena.hadoop.rdf.types.TripleWritable;
+import org.apache.jena.riot.Lang;
+
+
+/**
+ * Abstract compressed Turtle input tests
+ * 
+ * 
+ * 
+ */
+public abstract class AbstractCompressedTurtleInputFormatTests extends
+        AbstractCompressedWholeFileTripleInputFormatTests {
+
+    private String ext;
+    private CompressionCodec codec;
+
+    /**
+     * Creates new tests
+     * 
+     * @param ext
+     *            File extension
+     * @param codec
+     *            Compression codec
+     */
+    public AbstractCompressedTurtleInputFormatTests(String ext, CompressionCodec codec) {
+        this.ext = ext;
+        this.codec = codec;
+    }
+
+    @Override
+    protected final String getFileExtension() {
+        return this.ext;
+    }
+
+    @Override
+    protected final CompressionCodec getCompressionCodec() {
+        return this.codec;
+    }
+
+    @Override
+    protected final Lang getRdfLanguage() {
+        return Lang.TURTLE;
+    }
+
+    @Override
+    protected final InputFormat<LongWritable, TripleWritable> getInputFormat() {
+        return new TurtleInputFormat();
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/jena/blob/05c389be/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/compressed/turtle/BZippedTurtleInputTest.java
----------------------------------------------------------------------
diff --git a/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/compressed/turtle/BZippedTurtleInputTest.java b/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/compressed/turtle/BZippedTurtleInputTest.java
new file mode 100644
index 0000000..724b847
--- /dev/null
+++ b/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/compressed/turtle/BZippedTurtleInputTest.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.io.input.compressed.turtle;
+
+import org.apache.hadoop.io.compress.BZip2Codec;
+
+/**
+ * Tests for BZipped NTriples input
+ * 
+ * 
+ * 
+ */
+public class BZippedTurtleInputTest extends AbstractCompressedTurtleInputFormatTests {
+
+    /**
+     * Creates new tests
+     */
+    public BZippedTurtleInputTest() {
+        super(".nt.bz2", new BZip2Codec());
+    }
+}

http://git-wip-us.apache.org/repos/asf/jena/blob/05c389be/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/compressed/turtle/DeflatedTurtleInputTest.java
----------------------------------------------------------------------
diff --git a/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/compressed/turtle/DeflatedTurtleInputTest.java b/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/compressed/turtle/DeflatedTurtleInputTest.java
new file mode 100644
index 0000000..eb5ee03
--- /dev/null
+++ b/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/compressed/turtle/DeflatedTurtleInputTest.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.io.input.compressed.turtle;
+
+import org.apache.hadoop.io.compress.DefaultCodec;
+
+/**
+ * Tests for Deflated NTriples input
+ * 
+ * 
+ * 
+ */
+public class DeflatedTurtleInputTest extends AbstractCompressedTurtleInputFormatTests {
+
+    /**
+     * Creates new tests
+     */
+    public DeflatedTurtleInputTest() {
+        super(".nt.deflate", new DefaultCodec());
+    }
+}

http://git-wip-us.apache.org/repos/asf/jena/blob/05c389be/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/compressed/turtle/GZippedTurtleInputTest.java
----------------------------------------------------------------------
diff --git a/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/compressed/turtle/GZippedTurtleInputTest.java b/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/compressed/turtle/GZippedTurtleInputTest.java
new file mode 100644
index 0000000..817805c
--- /dev/null
+++ b/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/compressed/turtle/GZippedTurtleInputTest.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.io.input.compressed.turtle;
+
+import org.apache.hadoop.io.compress.GzipCodec;
+
+/**
+ * Tests for GZipped NTriples input
+ * 
+ * 
+ * 
+ */
+public class GZippedTurtleInputTest extends AbstractCompressedTurtleInputFormatTests {
+
+    /**
+     * Creates new tests
+     */
+    public GZippedTurtleInputTest() {
+        super(".nt.gz", new GzipCodec());
+    }
+}

http://git-wip-us.apache.org/repos/asf/jena/blob/05c389be/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/util/AbstractTrackableInputStreamTests.java
----------------------------------------------------------------------
diff --git a/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/util/AbstractTrackableInputStreamTests.java b/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/util/AbstractTrackableInputStreamTests.java
new file mode 100644
index 0000000..9532d56
--- /dev/null
+++ b/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/util/AbstractTrackableInputStreamTests.java
@@ -0,0 +1,701 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.io.input.util;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+
+import org.apache.jena.hadoop.rdf.io.input.util.TrackableInputStream;
+import org.junit.Assert;
+import org.junit.Test;
+
+/**
+ * Abstract tests for {@link TrackableInputStream} implementations
+ * 
+ * 
+ * 
+ */
+public abstract class AbstractTrackableInputStreamTests {
+
+    protected static final int KILO = 1024;
+    protected static final int BYTES_PER_KB = KILO;
+    protected static final int BYTES_PER_MB = BYTES_PER_KB * KILO;
+
+    /**
+     * Gets the instance to test using the given input as the stream to track
+     * 
+     * @param input
+     *            Input Stream
+     * @return Trackable Input Stream
+     */
+    protected abstract TrackableInputStream getInstance(InputStream input);
+
+    /**
+     * Generates an input stream containing the given number of bytes
+     * 
+     * @param length
+     *            Number of bytes
+     * @return Input stream
+     */
+    protected final InputStream generateData(int length) {
+        ByteArrayOutputStream output = new ByteArrayOutputStream(length);
+        byte b = (byte) 'b';
+        for (int i = 0; i < length; i++) {
+            output.write(b);
+        }
+        return new ByteArrayInputStream(output.toByteArray());
+    }
+
+    protected final void testSingleByteRead(int length) throws IOException {
+        InputStream input = this.generateData(length);
+        TrackableInputStream trackable = this.getInstance(input);
+        long count = 0;
+        while (trackable.read() >= 0) {
+            count++;
+        }
+        Assert.assertEquals(length, count);
+        Assert.assertEquals(length, trackable.getBytesRead());
+        trackable.close();
+    }
+
+    /**
+     * Test reading byte by byte
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_read_single_01() throws IOException {
+        this.testSingleByteRead(0);
+    }
+
+    /**
+     * Test reading byte by byte
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_read_single_02() throws IOException {
+        this.testSingleByteRead(100);
+    }
+
+    /**
+     * Test reading byte by byte
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_read_single_03() throws IOException {
+        // 1KB
+        this.testSingleByteRead(BYTES_PER_KB);
+    }
+
+    /**
+     * Test reading byte by byte
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_read_single_04() throws IOException {
+        // 1 MB
+        this.testSingleByteRead(BYTES_PER_MB);
+    }
+
+    protected final void testMultiByteRead(int length, int bufferSize) throws IOException {
+        if (bufferSize < 1)
+            throw new IllegalArgumentException("bufferSize must be >= 1");
+        InputStream input = this.generateData(length);
+        TrackableInputStream trackable = this.getInstance(input);
+        long count = 0;
+        byte[] buffer = new byte[bufferSize];
+        long read;
+        do {
+            read = trackable.read(buffer);
+            if (read > 0)
+                count += read;
+        } while (read >= 0);
+        Assert.assertEquals(length, count);
+        Assert.assertEquals(length, trackable.getBytesRead());
+        trackable.close();
+    }
+
+    /**
+     * Test reading multiple bytes i.e. calling {@link InputStream#read(byte[])}
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_read_multiple_01() throws IOException {
+        this.testMultiByteRead(0, 1);
+    }
+
+    /**
+     * Test reading multiple bytes i.e. calling {@link InputStream#read(byte[])}
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_read_multiple_02() throws IOException {
+        this.testMultiByteRead(0, 16);
+    }
+
+    /**
+     * Test reading multiple bytes i.e. calling {@link InputStream#read(byte[])}
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_read_multiple_03() throws IOException {
+        this.testMultiByteRead(0, BYTES_PER_KB);
+    }
+
+    /**
+     * Test reading multiple bytes i.e. calling {@link InputStream#read(byte[])}
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_read_multiple_04() throws IOException {
+        // 1KB
+        this.testMultiByteRead(BYTES_PER_KB, 1);
+    }
+
+    /**
+     * Test reading multiple bytes i.e. calling {@link InputStream#read(byte[])}
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_read_multiple_05() throws IOException {
+        // 1KB
+        this.testMultiByteRead(BYTES_PER_KB, 16);
+    }
+
+    /**
+     * Test reading multiple bytes i.e. calling {@link InputStream#read(byte[])}
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_read_multiple_06() throws IOException {
+        // 1KB
+        this.testMultiByteRead(BYTES_PER_KB, BYTES_PER_KB);
+    }
+
+    /**
+     * Test reading multiple bytes i.e. calling {@link InputStream#read(byte[])}
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_read_multiple_07() throws IOException {
+        // 1MB
+        this.testMultiByteRead(BYTES_PER_MB, 1);
+    }
+
+    /**
+     * Test reading multiple bytes i.e. calling {@link InputStream#read(byte[])}
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_read_multiple_08() throws IOException {
+        // 1MB
+        this.testMultiByteRead(BYTES_PER_MB, 16);
+    }
+
+    /**
+     * Test reading multiple bytes i.e. calling {@link InputStream#read(byte[])}
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_read_multiple_09() throws IOException {
+        // 1MB
+        this.testMultiByteRead(BYTES_PER_MB, BYTES_PER_KB);
+    }
+
+    protected final void testMultiByteRead(int length, int bufferSize, int readSize) throws IOException {
+        if (bufferSize < 1)
+            throw new IllegalArgumentException("bufferSize must be >= 1");
+        if (readSize < 1 || readSize > bufferSize)
+            throw new IllegalArgumentException("readSize must be >= 1 and <= bufferSize");
+        InputStream input = this.generateData(length);
+        TrackableInputStream trackable = this.getInstance(input);
+        long count = 0;
+        byte[] buffer = new byte[bufferSize];
+        long read;
+        do {
+            read = trackable.read(buffer, 0, readSize);
+            if (read > 0)
+                count += read;
+        } while (read >= 0);
+        Assert.assertEquals(length, count);
+        Assert.assertEquals(length, trackable.getBytesRead());
+        trackable.close();
+    }
+
+    /**
+     * Test reading multiple bytes while reading less than the buffer size bytes
+     * i.e. calling {@link InputStream#read(byte[], int, int)}
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_read_multiple_partial_01() throws IOException {
+        this.testMultiByteRead(0, 1, 1);
+    }
+
+    /**
+     * Test reading multiple bytes while reading less than the buffer size bytes
+     * i.e. calling {@link InputStream#read(byte[], int, int)}
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_read_multiple_partial_02() throws IOException {
+        this.testMultiByteRead(0, 16, 1);
+    }
+
+    /**
+     * Test reading multiple bytes while reading less than the buffer size bytes
+     * i.e. calling {@link InputStream#read(byte[], int, int)}
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_read_multiple_partial_03() throws IOException {
+        this.testMultiByteRead(0, 16, 16);
+    }
+
+    /**
+     * Test reading multiple bytes while reading less than the buffer size bytes
+     * i.e. calling {@link InputStream#read(byte[], int, int)}
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_read_multiple_partial_04() throws IOException {
+        this.testMultiByteRead(0, BYTES_PER_KB, 1);
+    }
+
+    /**
+     * Test reading multiple bytes while reading less than the buffer size bytes
+     * i.e. calling {@link InputStream#read(byte[], int, int)}
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_read_multiple_partial_05() throws IOException {
+        this.testMultiByteRead(0, BYTES_PER_KB, 16);
+    }
+
+    /**
+     * Test reading multiple bytes while reading less than the buffer size bytes
+     * i.e. calling {@link InputStream#read(byte[], int, int)}
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_read_multiple_partial_06() throws IOException {
+        this.testMultiByteRead(0, BYTES_PER_KB, BYTES_PER_KB);
+    }
+
+    /**
+     * Test reading multiple bytes while reading less than the buffer size bytes
+     * i.e. calling {@link InputStream#read(byte[], int, int)}
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_read_multiple_partial_07() throws IOException {
+        // 1KB
+        this.testMultiByteRead(BYTES_PER_KB, 1, 1);
+    }
+
+    /**
+     * Test reading multiple bytes while reading less than the buffer size bytes
+     * i.e. calling {@link InputStream#read(byte[], int, int)}
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_read_multiple_partial_08() throws IOException {
+        // 1KB
+        this.testMultiByteRead(BYTES_PER_KB, 16, 1);
+    }
+
+    /**
+     * Test reading multiple bytes while reading less than the buffer size bytes
+     * i.e. calling {@link InputStream#read(byte[], int, int)}
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_read_multiple_partial_09() throws IOException {
+        // 1KB
+        this.testMultiByteRead(BYTES_PER_KB, 16, 16);
+    }
+
+    /**
+     * Test reading multiple bytes while reading less than the buffer size bytes
+     * i.e. calling {@link InputStream#read(byte[], int, int)}
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_read_multiple_partial_10() throws IOException {
+        // 1KB
+        this.testMultiByteRead(BYTES_PER_KB, BYTES_PER_KB, 1);
+    }
+
+    /**
+     * Test reading multiple bytes while reading less than the buffer size bytes
+     * i.e. calling {@link InputStream#read(byte[], int, int)}
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_read_multiple_partial_11() throws IOException {
+        // 1KB
+        this.testMultiByteRead(BYTES_PER_KB, BYTES_PER_KB, 16);
+    }
+
+    /**
+     * Test reading multiple bytes while reading less than the buffer size bytes
+     * i.e. calling {@link InputStream#read(byte[], int, int)}
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_read_multiple_partial_12() throws IOException {
+        // 1KB
+        this.testMultiByteRead(BYTES_PER_KB, BYTES_PER_KB, BYTES_PER_KB);
+    }
+
+    /**
+     * Test reading multiple bytes while reading less than the buffer size bytes
+     * i.e. calling {@link InputStream#read(byte[], int, int)}
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_read_multiple_partial_13() throws IOException {
+        // 1MB
+        this.testMultiByteRead(BYTES_PER_MB, 1, 1);
+    }
+
+    /**
+     * Test reading multiple bytes while reading less than the buffer size bytes
+     * i.e. calling {@link InputStream#read(byte[], int, int)}
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_read_multiple_partial_14() throws IOException {
+        // 1MB
+        this.testMultiByteRead(BYTES_PER_MB, 16, 1);
+    }
+
+    /**
+     * Test reading multiple bytes while reading less than the buffer size bytes
+     * i.e. calling {@link InputStream#read(byte[], int, int)}
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_read_multiple_partial_15() throws IOException {
+        // 1MB
+        this.testMultiByteRead(BYTES_PER_MB, 16, 16);
+    }
+
+    /**
+     * Test reading multiple bytes while reading less than the buffer size bytes
+     * i.e. calling {@link InputStream#read(byte[], int, int)}
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_read_multiple_partial_16() throws IOException {
+        // 1MB
+        this.testMultiByteRead(BYTES_PER_MB, BYTES_PER_KB, 1);
+    }
+
+    /**
+     * Test reading multiple bytes while reading less than the buffer size bytes
+     * i.e. calling {@link InputStream#read(byte[], int, int)}
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_read_multiple_partial_17() throws IOException {
+        // 1MB
+        this.testMultiByteRead(BYTES_PER_MB, BYTES_PER_KB, 16);
+    }
+
+    /**
+     * Test reading multiple bytes while reading less than the buffer size bytes
+     * i.e. calling {@link InputStream#read(byte[], int, int)}
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_read_multiple_partial_18() throws IOException {
+        // 1MB
+        this.testMultiByteRead(BYTES_PER_MB, BYTES_PER_KB, BYTES_PER_KB);
+    }
+
+    protected final void testSkip(int length, long skipSize) throws IOException {
+        if (skipSize < 1)
+            throw new IllegalArgumentException("skipSize must be >= 1");
+        InputStream input = this.generateData(length);
+        TrackableInputStream trackable = this.getInstance(input);
+        long count = 0;
+        long skipped;
+        do {
+            skipped = trackable.skip(skipSize);
+            if (skipped > 0)
+                count += skipped;
+        } while (skipped > 0);
+        Assert.assertEquals(length, count);
+        Assert.assertEquals(length, trackable.getBytesRead());
+        trackable.close();
+    }
+
+    /**
+     * Test skipping
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_skip_single_01() throws IOException {
+        this.testSkip(0, 1);
+    }
+
+    /**
+     * Test skipping
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_skip_single_02() throws IOException {
+        this.testSkip(100, 1);
+    }
+
+    /**
+     * Test skipping
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_skip_single_03() throws IOException {
+        this.testSkip(100, 16);
+    }
+
+    /**
+     * Test skipping
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_skip_single_04() throws IOException {
+        this.testSkip(100, BYTES_PER_KB);
+    }
+
+    /**
+     * Test skipping
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_skip_single_05() throws IOException {
+        // 1KB
+        this.testSkip(BYTES_PER_KB, 1);
+    }
+
+    /**
+     * Test skipping
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_skip_single_06() throws IOException {
+        // 1KB
+        this.testSkip(BYTES_PER_KB, 16);
+    }
+
+    /**
+     * Test skipping
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_skip_single_07() throws IOException {
+        // 1KB
+        this.testSkip(BYTES_PER_KB, BYTES_PER_KB);
+    }
+
+    /**
+     * Test skipping
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_skip_single_08() throws IOException {
+        // 1KB
+        this.testSkip(BYTES_PER_KB, BYTES_PER_MB);
+    }
+
+    /**
+     * Test skipping
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_skip_single_09() throws IOException {
+        // 1 MB
+        this.testSkip(BYTES_PER_MB, 1);
+    }
+
+    /**
+     * Test skipping
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_skip_single_10() throws IOException {
+        // 1 MB
+        this.testSkip(BYTES_PER_MB, 16);
+    }
+
+    /**
+     * Test skipping
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_skip_single_11() throws IOException {
+        // 1 MB
+        this.testSkip(BYTES_PER_MB, BYTES_PER_KB);
+    }
+
+    /**
+     * Test skipping
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_skip_single_12() throws IOException {
+        // 1 MB
+        this.testSkip(BYTES_PER_MB, BYTES_PER_MB);
+    }
+
+    /**
+     * Tests behaviour after closing
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_post_close_01() throws IOException {
+        InputStream input = this.generateData(0);
+        TrackableInputStream trackable = this.getInstance(input);
+        trackable.close();
+        Assert.assertEquals(-1, trackable.read());
+    }
+    
+    /**
+     * Tests behaviour after closing
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_post_close_02() throws IOException {
+        InputStream input = this.generateData(0);
+        TrackableInputStream trackable = this.getInstance(input);
+        trackable.close();
+        Assert.assertEquals(0, trackable.read(new byte[0]));
+    }
+    
+    /**
+     * Tests behaviour after closing
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_post_close_03() throws IOException {
+        InputStream input = this.generateData(0);
+        TrackableInputStream trackable = this.getInstance(input);
+        trackable.close();
+        Assert.assertEquals(-1, trackable.read(new byte[1]));
+    }
+    
+    /**
+     * Tests behaviour after closing
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_post_close_04() throws IOException {
+        InputStream input = this.generateData(0);
+        TrackableInputStream trackable = this.getInstance(input);
+        trackable.close();
+        Assert.assertEquals(0, trackable.read(new byte[16], 0, 0));
+    }
+    
+    /**
+     * Tests behaviour after closing
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_post_close_05() throws IOException {
+        InputStream input = this.generateData(0);
+        TrackableInputStream trackable = this.getInstance(input);
+        trackable.close();
+        Assert.assertEquals(-1, trackable.read(new byte[16], 0, 8));
+    }
+    
+    /**
+     * Tests behaviour after closing
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_post_close_06() throws IOException {
+        InputStream input = this.generateData(0);
+        TrackableInputStream trackable = this.getInstance(input);
+        trackable.close();
+        Assert.assertEquals(0, trackable.skip(0));
+    }
+    
+    /**
+     * Tests exceptions are thrown trying to perform actions after closing the
+     * input
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void trackable_input_post_close_07() throws IOException {
+        InputStream input = this.generateData(0);
+        TrackableInputStream trackable = this.getInstance(input);
+        trackable.close();
+        Assert.assertEquals(0, trackable.skip(1));
+    }
+}

http://git-wip-us.apache.org/repos/asf/jena/blob/05c389be/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/util/BlockInputStreamTest.java
----------------------------------------------------------------------
diff --git a/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/util/BlockInputStreamTest.java b/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/util/BlockInputStreamTest.java
new file mode 100644
index 0000000..f8819bc
--- /dev/null
+++ b/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/util/BlockInputStreamTest.java
@@ -0,0 +1,240 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.io.input.util;
+
+import java.io.IOException;
+import java.io.InputStream;
+
+import org.apache.jena.hadoop.rdf.io.input.util.BlockInputStream;
+import org.apache.jena.hadoop.rdf.io.input.util.TrackableInputStream;
+import org.junit.Assert;
+import org.junit.Test;
+
+/**
+ * Tests for the {@link BlockInputStream}
+ * 
+ * 
+ * 
+ */
+public class BlockInputStreamTest extends AbstractTrackableInputStreamTests {
+
+    @Override
+    protected TrackableInputStream getInstance(InputStream input) {
+        return new BlockInputStream(input, Long.MAX_VALUE);
+    }
+
+    /**
+     * Gets an instance of a block input stream
+     * 
+     * @param input
+     *            Underlying input stream
+     * @param limit
+     *            Limit on bytes to read
+     * @return Block input stream
+     */
+    protected BlockInputStream getInstance(InputStream input, long limit) {
+        return new BlockInputStream(input, limit);
+    }
+    
+    protected final void testSingleByteRead(int length, long limit) throws IOException {
+        InputStream input = this.generateData(length);
+        TrackableInputStream trackable = this.getInstance(input, limit);
+        long count = 0;
+        while (trackable.read() >= 0) {
+            count++;
+        }
+        int expected = (int) Math.min(length, limit);
+        Assert.assertEquals(expected, count);
+        Assert.assertEquals(expected, trackable.getBytesRead());
+        trackable.close();
+    }
+    
+    /**
+     * Test reading byte by byte
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void block_input_read_single_01() throws IOException {
+        this.testSingleByteRead(0, 0);
+    }
+
+    /**
+     * Test reading byte by byte
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void block_input_read_single_02() throws IOException {
+        this.testSingleByteRead(100, 0);
+    }
+    
+    /**
+     * Test reading byte by byte
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void block_input_read_single_03() throws IOException {
+        this.testSingleByteRead(100, 50);
+    }
+    
+    /**
+     * Test reading byte by byte
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void block_input_read_single_04() throws IOException {
+        this.testSingleByteRead(100, 100);
+    }
+
+    /**
+     * Test reading byte by byte
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void block_input_read_single_05() throws IOException {
+        // 1KB
+        this.testSingleByteRead(BYTES_PER_KB, 1);
+    }
+    
+    /**
+     * Test reading byte by byte
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void block_input_read_single_06() throws IOException {
+        // 1KB
+        this.testSingleByteRead(BYTES_PER_KB, 100);
+    }
+    
+    /**
+     * Test reading byte by byte
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void block_input_read_single_07() throws IOException {
+        // 1KB
+        this.testSingleByteRead(BYTES_PER_KB, BYTES_PER_KB / 2);
+    }
+    
+    /**
+     * Test reading byte by byte
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void block_input_read_single_08() throws IOException {
+        // 1KB
+        this.testSingleByteRead(BYTES_PER_KB, BYTES_PER_KB);
+    }
+    
+    /**
+     * Test reading byte by byte
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void block_input_read_single_09() throws IOException {
+        // 1KB
+        this.testSingleByteRead(BYTES_PER_KB, BYTES_PER_MB);
+    }
+    
+    /**
+     * Test reading byte by byte
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void block_input_read_single_10() throws IOException {
+        // 1KB
+        this.testSingleByteRead(BYTES_PER_KB, BYTES_PER_MB * 10);
+    }
+
+    /**
+     * Test reading byte by byte
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void block_input_read_single_11() throws IOException {
+        // 1MB
+        this.testSingleByteRead(BYTES_PER_MB, 1);
+    }
+    
+    /**
+     * Test reading byte by byte
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void block_input_read_single_12() throws IOException {
+        // 1MB
+        this.testSingleByteRead(BYTES_PER_MB, 100);
+    }
+    
+    /**
+     * Test reading byte by byte
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void block_input_read_single_13() throws IOException {
+        // 1MB
+        this.testSingleByteRead(BYTES_PER_MB, BYTES_PER_KB);
+    }
+    
+    /**
+     * Test reading byte by byte
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void block_input_read_single_14() throws IOException {
+        // 1MB
+        this.testSingleByteRead(BYTES_PER_MB, BYTES_PER_MB / 2);
+    }
+    
+    /**
+     * Test reading byte by byte
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void block_input_read_single_15() throws IOException {
+        // 1MB
+        this.testSingleByteRead(BYTES_PER_MB, BYTES_PER_MB);
+    }
+    
+    /**
+     * Test reading byte by byte
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void block_input_read_single_16() throws IOException {
+        // 1MB
+        this.testSingleByteRead(BYTES_PER_MB, BYTES_PER_MB * 10);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/jena/blob/05c389be/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/util/TrackedInputStreamTest.java
----------------------------------------------------------------------
diff --git a/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/util/TrackedInputStreamTest.java b/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/util/TrackedInputStreamTest.java
new file mode 100644
index 0000000..f453aa5
--- /dev/null
+++ b/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/input/util/TrackedInputStreamTest.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.io.input.util;
+
+import java.io.InputStream;
+
+import org.apache.jena.hadoop.rdf.io.input.util.TrackableInputStream;
+import org.apache.jena.hadoop.rdf.io.input.util.TrackedInputStream;
+
+/**
+ * Tests for the {@link TrackedInputStream}
+ * 
+ * 
+ * 
+ */
+public class TrackedInputStreamTest extends AbstractTrackableInputStreamTests {
+
+    @Override
+    protected TrackableInputStream getInstance(InputStream input) {
+        return new TrackedInputStream(input);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/jena/blob/05c389be/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/output/AbstractNodeTupleOutputFormatTests.java
----------------------------------------------------------------------
diff --git a/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/output/AbstractNodeTupleOutputFormatTests.java b/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/output/AbstractNodeTupleOutputFormatTests.java
new file mode 100644
index 0000000..31c3a67
--- /dev/null
+++ b/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/output/AbstractNodeTupleOutputFormatTests.java
@@ -0,0 +1,252 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.io.output;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.util.Iterator;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.LocatedFileStatus;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.RemoteIterator;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.OutputFormat;
+import org.apache.hadoop.mapreduce.RecordWriter;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.TaskAttemptID;
+import org.apache.hadoop.mapreduce.TaskType;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+import org.apache.hadoop.mapreduce.task.JobContextImpl;
+import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
+import org.apache.jena.hadoop.rdf.types.AbstractNodeTupleWritable;
+import org.apache.jena.riot.Lang;
+import org.apache.jena.riot.RDFDataMgr;
+import org.apache.jena.riot.lang.StreamRDFCounting;
+import org.apache.jena.riot.system.StreamRDFLib;
+import org.junit.Assert;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+
+/**
+ * Abstract node tuple output format tests
+ * 
+ * 
+ * @param <TValue>
+ *            Tuple type
+ * @param <T>
+ *            Writable tuple type
+ * 
+ */
+public abstract class AbstractNodeTupleOutputFormatTests<TValue, T extends AbstractNodeTupleWritable<TValue>> {
+
+    @SuppressWarnings("unused")
+    private static final Logger LOG = LoggerFactory.getLogger(AbstractNodeTupleOutputFormatTests.class);
+
+    protected static final int EMPTY_SIZE = 0, SMALL_SIZE = 100, LARGE_SIZE = 10000, VERY_LARGE_SIZE = 100000;
+
+    /**
+     * Temporary folder for the tests
+     */
+    @Rule
+    public TemporaryFolder folder = new TemporaryFolder();
+
+    /**
+     * Prepares a fresh configuration
+     * 
+     * @return Configuration
+     */
+    protected Configuration prepareConfiguration() {
+        Configuration config = new Configuration(true);
+        // Nothing else to do
+        return config;
+    }
+
+    /**
+     * Gets the extra file extension to add to the filenames
+     * 
+     * @return File extension
+     */
+    protected abstract String getFileExtension();
+
+    /**
+     * Generates tuples to be output for testing
+     * 
+     * @param num
+     *            Number of tuples to generate
+     * @return Iterator of tuples
+     */
+    protected abstract Iterator<T> generateTuples(int num);
+
+    /**
+     * Counts tuples in the output file
+     * 
+     * @param f
+     *            Output file
+     * @return Tuple count
+     */
+    protected final long countTuples(File f) {
+        StreamRDFCounting counter = StreamRDFLib.count();
+        RDFDataMgr.parse(counter, f.getAbsolutePath(), this.getRdfLanguage(), null);
+        return counter.count();
+    }
+
+    /**
+     * Checks that tuples are as expected
+     * @param f File
+     * @param expected Expected number of tuples
+     */
+    protected void checkTuples(File f, long expected) {
+        Assert.assertEquals(expected, this.countTuples(f));
+    }
+
+    /**
+     * Gets the RDF language of the produced output which is used to parse back
+     * in the output to validate the correct amount of output was produced
+     * 
+     * @return RDF language
+     */
+    protected abstract Lang getRdfLanguage();
+
+    /**
+     * Gets the output format to test
+     * 
+     * @return Output format
+     */
+    protected abstract OutputFormat<NullWritable, T> getOutputFormat();
+
+    /**
+     * Adds an output path to the job configuration
+     * 
+     * @param f
+     *            File
+     * @param config
+     *            Configuration
+     * @param job
+     *            Job
+     * @throws IOException
+     */
+    protected void addOutputPath(File f, Configuration config, Job job) throws IOException {
+        FileSystem fs = FileSystem.getLocal(config);
+        Path outputPath = fs.makeQualified(new Path(f.getAbsolutePath()));
+        FileOutputFormat.setOutputPath(job, outputPath);
+    }
+    
+    protected File findOutputFile(File dir, JobContext context) throws FileNotFoundException, IOException {
+        Path outputPath = FileOutputFormat.getOutputPath(context);
+        RemoteIterator<LocatedFileStatus> files = outputPath.getFileSystem(context.getConfiguration()).listFiles(outputPath, true);
+        while (files.hasNext()) {
+            LocatedFileStatus status = files.next();
+            if (status.isFile() && !status.getPath().getName().startsWith("_")) {
+                return new File(status.getPath().toUri());
+            }
+        }
+        return null;
+    }
+
+    /**
+     * Tests output
+     * 
+     * @param f
+     *            File to output to
+     * @param num
+     *            Number of tuples to output
+     * @throws IOException
+     * @throws InterruptedException 
+     */
+    protected final void testOutput(File f, int num) throws IOException, InterruptedException {
+        // Prepare configuration
+        Configuration config = this.prepareConfiguration();
+
+        // Set up fake job
+        OutputFormat<NullWritable, T> outputFormat = this.getOutputFormat();
+        Job job = Job.getInstance(config);
+        job.setOutputFormatClass(outputFormat.getClass());
+        this.addOutputPath(f, job.getConfiguration(), job);
+        JobContext context = new JobContextImpl(job.getConfiguration(), job.getJobID());
+        Assert.assertNotNull(FileOutputFormat.getOutputPath(context));
+        
+        // Output the data
+        TaskAttemptID id = new TaskAttemptID("outputTest", 1, TaskType.MAP, 1, 1);
+        TaskAttemptContext taskContext = new TaskAttemptContextImpl(job.getConfiguration(), id);
+        RecordWriter<NullWritable, T> writer = outputFormat.getRecordWriter(taskContext);
+        Iterator<T> tuples = this.generateTuples(num);
+        while (tuples.hasNext()) {
+            writer.write(NullWritable.get(), tuples.next());
+        }
+        writer.close(taskContext);
+        
+        // Check output
+        File outputFile = this.findOutputFile(this.folder.getRoot(), context);
+        Assert.assertNotNull(outputFile);
+        this.checkTuples(outputFile, num);
+    }
+
+    /**
+     * Basic output tests
+     * 
+     * @throws IOException
+     * @throws InterruptedException 
+     */
+    @Test
+    public final void output_01() throws IOException, InterruptedException {
+        this.testOutput(this.folder.getRoot(), EMPTY_SIZE);
+    }
+    
+    /**
+     * Basic output tests
+     * 
+     * @throws IOException
+     * @throws InterruptedException 
+     */
+    @Test
+    public final void output_02() throws IOException, InterruptedException {
+        this.testOutput(this.folder.getRoot(), SMALL_SIZE);
+    }
+    
+    /**
+     * Basic output tests
+     * 
+     * @throws IOException
+     * @throws InterruptedException 
+     */
+    @Test
+    public final void output_03() throws IOException, InterruptedException {
+        this.testOutput(this.folder.getRoot(), LARGE_SIZE);
+    }
+    
+    /**
+     * Basic output tests
+     * 
+     * @throws IOException
+     * @throws InterruptedException 
+     */
+    @Test
+    public final void output_04() throws IOException, InterruptedException {
+        this.testOutput(this.folder.getRoot(), VERY_LARGE_SIZE);
+    }
+}

http://git-wip-us.apache.org/repos/asf/jena/blob/05c389be/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/output/AbstractQuadOutputFormatTests.java
----------------------------------------------------------------------
diff --git a/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/output/AbstractQuadOutputFormatTests.java b/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/output/AbstractQuadOutputFormatTests.java
new file mode 100644
index 0000000..f1822f6
--- /dev/null
+++ b/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/output/AbstractQuadOutputFormatTests.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.io.output;
+
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+
+import org.apache.jena.hadoop.rdf.types.QuadWritable;
+
+import com.hp.hpl.jena.datatypes.xsd.XSDDatatype;
+import com.hp.hpl.jena.graph.NodeFactory;
+import com.hp.hpl.jena.sparql.core.Quad;
+
+/**
+ * Abstract tests for quad output formats
+ * 
+ * 
+ * 
+ */
+public abstract class AbstractQuadOutputFormatTests extends AbstractNodeTupleOutputFormatTests<Quad, QuadWritable> {
+
+    @Override
+    protected Iterator<QuadWritable> generateTuples(int num) {
+        List<QuadWritable> qs = new ArrayList<QuadWritable>();
+        for (int i = 0; i < num; i++) {
+            Quad q = new Quad(NodeFactory.createURI("http://example.org/graphs/" + i),
+                    NodeFactory.createURI("http://example.org/subjects/" + i),
+                    NodeFactory.createURI("http://example.org/predicate"), NodeFactory.createLiteral(Integer.toString(i),
+                            XSDDatatype.XSDinteger));
+            qs.add(new QuadWritable(q));
+        }
+        return qs.iterator();
+    }
+}

http://git-wip-us.apache.org/repos/asf/jena/blob/05c389be/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/output/AbstractTripleOutputFormatTests.java
----------------------------------------------------------------------
diff --git a/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/output/AbstractTripleOutputFormatTests.java b/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/output/AbstractTripleOutputFormatTests.java
new file mode 100644
index 0000000..90eb531
--- /dev/null
+++ b/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/output/AbstractTripleOutputFormatTests.java
@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.io.output;
+
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+
+import org.apache.jena.hadoop.rdf.types.TripleWritable;
+
+import com.hp.hpl.jena.datatypes.xsd.XSDDatatype;
+import com.hp.hpl.jena.graph.NodeFactory;
+import com.hp.hpl.jena.graph.Triple;
+
+/**
+ * Abstract tests for triple output formats
+ * 
+ *
+ */
+public abstract class AbstractTripleOutputFormatTests extends AbstractNodeTupleOutputFormatTests<Triple, TripleWritable> {
+
+    @Override
+    protected Iterator<TripleWritable> generateTuples(int num) {
+        List<TripleWritable> ts = new ArrayList<TripleWritable>();
+        for (int i = 0; i < num; i++) {
+            Triple t = new Triple(NodeFactory.createURI("http://example.org/subjects/" + i), NodeFactory.createURI("http://example.org/predicate"), NodeFactory.createLiteral(Integer.toString(i), XSDDatatype.XSDinteger));
+            ts.add(new TripleWritable(t));
+        }
+        return ts.iterator();
+    }
+}

http://git-wip-us.apache.org/repos/asf/jena/blob/05c389be/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/output/BatchedTriGOutputTest.java
----------------------------------------------------------------------
diff --git a/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/output/BatchedTriGOutputTest.java b/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/output/BatchedTriGOutputTest.java
new file mode 100644
index 0000000..924cac1
--- /dev/null
+++ b/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/output/BatchedTriGOutputTest.java
@@ -0,0 +1,92 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.io.output;
+
+import java.util.Arrays;
+import java.util.Collection;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.mapreduce.OutputFormat;
+import org.apache.jena.hadoop.rdf.io.RdfIOConstants;
+import org.apache.jena.hadoop.rdf.io.output.BatchedTriGOutputFormat;
+import org.apache.jena.hadoop.rdf.types.QuadWritable;
+import org.apache.jena.riot.Lang;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+
+
+/**
+ * Tests for TriG output
+ * 
+ * 
+ * 
+ */
+@RunWith(Parameterized.class)
+public class BatchedTriGOutputTest extends AbstractQuadOutputFormatTests {
+
+    static long $bs1 = RdfIOConstants.DEFAULT_OUTPUT_BATCH_SIZE;
+    static long $bs2 = 1000;
+    static long $bs3 = 100;
+    static long $bs4 = 1;
+
+    /**
+     * @return Test parameters
+     */
+    @Parameters
+    public static Collection<Object[]> data() {
+        return Arrays.asList(new Object[][] { { $bs1 }, { $bs2 }, { $bs3 }, { $bs4 } });
+    }
+
+    private final long batchSize;
+
+    /**
+     * Creates new tests
+     * 
+     * @param batchSize
+     *            Batch size
+     */
+    public BatchedTriGOutputTest(long batchSize) {
+        this.batchSize = batchSize;
+    }
+
+    @Override
+    protected String getFileExtension() {
+        return ".trig";
+    }
+
+    @Override
+    protected Lang getRdfLanguage() {
+        return Lang.TRIG;
+    }
+
+    @Override
+    protected Configuration prepareConfiguration() {
+        Configuration config = super.prepareConfiguration();
+        config.setLong(RdfIOConstants.OUTPUT_BATCH_SIZE, this.batchSize);
+        return config;
+    }
+
+    @Override
+    protected OutputFormat<NullWritable, QuadWritable> getOutputFormat() {
+        return new BatchedTriGOutputFormat<NullWritable>();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/jena/blob/05c389be/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/output/BatchedTurtleOutputTest.java
----------------------------------------------------------------------
diff --git a/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/output/BatchedTurtleOutputTest.java b/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/output/BatchedTurtleOutputTest.java
new file mode 100644
index 0000000..f0b8490
--- /dev/null
+++ b/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/output/BatchedTurtleOutputTest.java
@@ -0,0 +1,91 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.io.output;
+
+import java.util.Arrays;
+import java.util.Collection;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.mapreduce.OutputFormat;
+import org.apache.jena.hadoop.rdf.io.RdfIOConstants;
+import org.apache.jena.hadoop.rdf.types.TripleWritable;
+import org.apache.jena.riot.Lang;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+
+
+/**
+ * Tests for Turtle output
+ * 
+ * 
+ * 
+ */
+@RunWith(Parameterized.class)
+public class BatchedTurtleOutputTest extends AbstractTripleOutputFormatTests {
+
+    static long $bs1 = RdfIOConstants.DEFAULT_OUTPUT_BATCH_SIZE;
+    static long $bs2 = 1000;
+    static long $bs3 = 100;
+    static long $bs4 = 1;
+
+    /**
+     * @return Test parameters
+     */
+    @Parameters
+    public static Collection<Object[]> data() {
+        return Arrays.asList(new Object[][] { { $bs1 }, { $bs2 }, { $bs3 }, { $bs4 } });
+    }
+
+    private final long batchSize;
+
+    /**
+     * Creates new tests
+     * 
+     * @param batchSize
+     *            Batch size
+     */
+    public BatchedTurtleOutputTest(long batchSize) {
+        this.batchSize = batchSize;
+    }
+
+    @Override
+    protected String getFileExtension() {
+        return ".ttl";
+    }
+
+    @Override
+    protected Lang getRdfLanguage() {
+        return Lang.TURTLE;
+    }
+    
+    @Override
+    protected Configuration prepareConfiguration() {
+        Configuration config = super.prepareConfiguration();
+        config.setLong(RdfIOConstants.OUTPUT_BATCH_SIZE, this.batchSize);
+        return config;
+    }
+
+    @Override
+    protected OutputFormat<NullWritable, TripleWritable> getOutputFormat() {
+        return new BatchedTurtleOutputFormat<NullWritable>();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/jena/blob/05c389be/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/output/NQuadsOutputTest.java
----------------------------------------------------------------------
diff --git a/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/output/NQuadsOutputTest.java b/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/output/NQuadsOutputTest.java
new file mode 100644
index 0000000..b228715
--- /dev/null
+++ b/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/output/NQuadsOutputTest.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.io.output;
+
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.mapreduce.OutputFormat;
+import org.apache.jena.hadoop.rdf.io.output.NQuadsOutputFormat;
+import org.apache.jena.hadoop.rdf.types.QuadWritable;
+import org.apache.jena.riot.Lang;
+
+
+/**
+ * Tests for NQuads output format
+ * 
+ * 
+ * 
+ */
+public class NQuadsOutputTest extends AbstractQuadOutputFormatTests {
+
+    @Override
+    protected String getFileExtension() {
+        return ".nq";
+    }
+
+    @Override
+    protected Lang getRdfLanguage() {
+        return Lang.NQUADS;
+    }
+
+    @Override
+    protected OutputFormat<NullWritable, QuadWritable> getOutputFormat() {
+        return new NQuadsOutputFormat<NullWritable>();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/jena/blob/05c389be/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/output/NTriplesOutputTest.java
----------------------------------------------------------------------
diff --git a/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/output/NTriplesOutputTest.java b/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/output/NTriplesOutputTest.java
new file mode 100644
index 0000000..df1ee97
--- /dev/null
+++ b/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/output/NTriplesOutputTest.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.io.output;
+
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.mapreduce.OutputFormat;
+import org.apache.jena.hadoop.rdf.io.output.NTriplesOutputFormat;
+import org.apache.jena.hadoop.rdf.types.TripleWritable;
+import org.apache.jena.riot.Lang;
+
+
+/**
+ * Tests for NTriples output format
+ * 
+ * 
+ * 
+ */
+public class NTriplesOutputTest extends AbstractTripleOutputFormatTests {
+
+    @Override
+    protected String getFileExtension() {
+        return ".nt";
+    }
+
+    @Override
+    protected Lang getRdfLanguage() {
+        return Lang.NTRIPLES;
+    }
+
+    @Override
+    protected OutputFormat<NullWritable, TripleWritable> getOutputFormat() {
+        return new NTriplesOutputFormat<NullWritable>();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/jena/blob/05c389be/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/output/RdfJsonOutputTest.java
----------------------------------------------------------------------
diff --git a/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/output/RdfJsonOutputTest.java b/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/output/RdfJsonOutputTest.java
new file mode 100644
index 0000000..50947f2
--- /dev/null
+++ b/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/output/RdfJsonOutputTest.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.io.output;
+
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.mapreduce.OutputFormat;
+import org.apache.jena.hadoop.rdf.io.output.RdfJsonOutputFormat;
+import org.apache.jena.hadoop.rdf.types.TripleWritable;
+import org.apache.jena.riot.Lang;
+
+
+/**
+ * Tests for RDF/JSON output
+ * 
+ * 
+ * 
+ */
+public class RdfJsonOutputTest extends AbstractTripleOutputFormatTests {
+
+    @Override
+    protected String getFileExtension() {
+        return ".rj";
+    }
+
+    @Override
+    protected Lang getRdfLanguage() {
+        return Lang.RDFJSON;
+    }
+
+    @Override
+    protected OutputFormat<NullWritable, TripleWritable> getOutputFormat() {
+        return new RdfJsonOutputFormat<NullWritable>();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/jena/blob/05c389be/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/output/RdfXmlOutputTest.java
----------------------------------------------------------------------
diff --git a/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/output/RdfXmlOutputTest.java b/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/output/RdfXmlOutputTest.java
new file mode 100644
index 0000000..128380c
--- /dev/null
+++ b/hadoop-rdf/hadoop-rdf-io/src/test/java/org/apache/jena/hadoop/rdf/io/output/RdfXmlOutputTest.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.io.output;
+
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.mapreduce.OutputFormat;
+import org.apache.jena.hadoop.rdf.io.output.RdfXmlOutputFormat;
+import org.apache.jena.hadoop.rdf.types.TripleWritable;
+import org.apache.jena.riot.Lang;
+
+
+/**
+ * Tests for RDF/XML output
+ * 
+ * 
+ * 
+ */
+public class RdfXmlOutputTest extends AbstractTripleOutputFormatTests {
+
+    @Override
+    protected String getFileExtension() {
+        return ".rdf";
+    }
+
+    @Override
+    protected Lang getRdfLanguage() {
+        return Lang.RDFXML;
+    }
+
+    @Override
+    protected OutputFormat<NullWritable, TripleWritable> getOutputFormat() {
+        return new RdfXmlOutputFormat<NullWritable>();
+    }
+
+}


Mime
View raw message