clerezza-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From r...@apache.org
Subject [3/6] clerezza-rdf-core git commit: Back to git
Date Sun, 15 Mar 2015 18:51:58 GMT
http://git-wip-us.apache.org/repos/asf/clerezza-rdf-core/blob/43651da1/api/src/site/xdoc/code-standards.xml
----------------------------------------------------------------------
diff --git a/api/src/site/xdoc/code-standards.xml b/api/src/site/xdoc/code-standards.xml
new file mode 100644
index 0000000..6cc29a3
--- /dev/null
+++ b/api/src/site/xdoc/code-standards.xml
@@ -0,0 +1,187 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<document>
+
+ <properties>
+  <title>Coding Standards</title>
+  <author email="jon@latchkey.com">Jon S. Stevens</author>
+  <author email="jvanzyl@apache.org">Jason van Zyl</author>
+ </properties>
+
+<body>
+
+<section name="Coding Standards">
+
+<p>
+This document describes a list of coding conventions that are required
+for code submissions to the project. By default, the coding conventions
+for most Open Source Projects should follow the existing coding conventions
+in the code that you are working on. For example, if the bracket is on
+the same line as the if statement, then you should write all your code
+to have that convention.
+</p>
+
+<p>
+<strong>If you commit code that does not follow these conventions, you
+are responsible for also fixing your own code.</strong>
+</p>
+
+<p>
+Below is a list of coding conventions that are specific to Apache Commons Net
+everything else not specificially mentioned here should follow the official
+<a href="http://java.sun.com/docs/codeconv/html/CodeConvTOC.doc.html">Sun
+Java Coding Conventions</a>.
+</p>
+
+<p>
+1. Brackets should begin and end on a new line and should exist even
+for one line statements. Examples:
+</p>
+
+<source test=""><![CDATA[
+if ( foo )
+{
+    // code here
+}
+
+try
+{
+    // code here
+}
+catch (Exception bar)
+{
+    // code here
+}
+finally
+{
+    // code here
+}
+
+while ( true )
+{
+    // code here
+}
+]]></source>
+
+<p>
+2. Though it's considered okay to include spaces inside parens, the
+preference is to not include them. Both of the following are okay:
+</p>
+
+<source test=""><![CDATA[
+if (foo)
+
+or
+
+if ( foo )
+]]></source>
+
+<p>
+3. 4 space indent. <strong>NO tabs</strong>. Period. We understand
+that many developers like to use tabs, but the fact of the matter is
+that in a distributed development environment where diffs are sent to
+the mailing lists by both developers and the version control system
+(which sends commit log messages), the use tabs makes it impossible to
+preserve legibility.
+</p>
+
+<p>
+In Emacs-speak, this translates to the following command:
+</p>
+
+<source><![CDATA[
+(setq-default tab-width 4 indent-tabs-mode nil)
+]]></source>
+
+<p>
+4. Native linefeeds (svn:eol-style native) for all .java source code and text files.
+Platform specific files should have the platform specific linefeeds.
+</p>
+
+<p>
+5. JavaDoc <strong>MUST</strong> exist on all public and protected methods.
+JavaDoc on private and default access methods and members is preferred and
+encouraged.  If your code modifications use an existing class/method/variable
+which lacks JavaDoc, it is required that you add it.  This will improve the
+project as a whole.
+</p>
+
+<p>
+6. The Apache License header <strong>MUST</strong> be placed at the top
+of each and every file.
+</p>
+
+<p>
+9. Import statements must be fully qualified for clarity.
+</p>
+
+<source><![CDATA[
+import java.util.ArrayList;
+import java.util.Hashtable;
+
+import org.apache.foo.Bar;
+import org.apache.bar.Foo;
+]]></source>
+
+<p>
+And not
+</p>
+
+<source><![CDATA[
+import java.util.*;
+import org.apache.foo.*;
+import org.apache.bar.*;
+]]></source>
+
+<hr noshade="true" size="1"/>
+
+<p>
+X/Emacs users might appreciate this in their .emacs file.
+</p>
+
+<source><![CDATA[
+(defun apache-jakarta-mode ()
+  "The Java mode specialization for Apache Jakarta projects."
+  (if (not (assoc "apache-jakarta" c-style-alist))
+      ;; Define the Apache Jakarta cc-mode style.
+      (c-add-style "apache-jakarta" '("java" (indent-tabs-mode . nil))))
+
+  (c-set-style "apache-jakarta")
+  (c-set-offset 'substatement-open 0 nil)
+  (setq mode-name "Apache Jakarta")
+
+  ;; Turn on syntax highlighting when X is running.
+  (if (boundp 'window-system)
+      (progn (setq font-lock-support-mode 'lazy-lock-mode)
+             (font-lock-mode t))))
+
+;; Activate Jakarta mode.
+(if (fboundp 'jde-mode)
+    (add-hook 'jde-mode-hook 'apache-jakarta-mode)
+  (add-hook 'java-mode-hook 'apache-jakarta-mode))
+]]></source>
+
+<p>
+Thanks for your cooperation.
+</p>
+
+</section>
+
+</body>
+</document>

http://git-wip-us.apache.org/repos/asf/clerezza-rdf-core/blob/43651da1/api/src/site/xdoc/index.xml
----------------------------------------------------------------------
diff --git a/api/src/site/xdoc/index.xml b/api/src/site/xdoc/index.xml
new file mode 100644
index 0000000..7a7a1ec
--- /dev/null
+++ b/api/src/site/xdoc/index.xml
@@ -0,0 +1,48 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<document>
+
+    <properties>
+        <title>Overview</title>
+        <author email="dev@commons.apache.org">Apache Commons Documentation Team</author>
+    </properties>
+
+    <body>
+
+        <section name="Apache Commons RDF">
+            <p>
+                Apache Commons RDF provides an API modelling the RDF data model as defined by 
+                http://www.w3.org/TR/rdf11-concepts/
+            </p>
+        </section>
+        <section name="Features">
+            <p>
+                This Library shall
+                <ul>
+                    <li>Allow to access, modify nad monitor RDF data</li>
+                    <li>Support all concepts defined by the RDF Abstract Syntax</li>
+                    <li>Provide an easy to use API</li>
+                    <li>Support both triple stores as well as the wrapping other sources</li>
+                    <li>Allow to seamlessly use instances coming from different sources</li>
+                    <li>Integrate well with existing Java APIs</li>
+                </ul>
+            </p>
+        </section>
+
+</document>
+

http://git-wip-us.apache.org/repos/asf/clerezza-rdf-core/blob/43651da1/doap_rdf.rdf
----------------------------------------------------------------------
diff --git a/doap_rdf.rdf b/doap_rdf.rdf
new file mode 100644
index 0000000..a7a5e17
--- /dev/null
+++ b/doap_rdf.rdf
@@ -0,0 +1,40 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<rdf:RDF xmlns="http://usefulinc.com/ns/doap#" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:asfext="http://projects.apache.org/ns/asfext#" xmlns:rdfs="http://www.w3.org/2000/01/rdf-schema#" xmlns:doap="http://usefulinc.com/ns/doap#" xml:lang="en">
+  <Project rdf:about="http://commons.apache.org/rdf/">
+    <name>Apache Commons RDF</name>
+    <homepage rdf:resource="http://commons.apache.org/rdf/"/>
+    <programming-language>Java</programming-language>
+    <category rdf:resource="http://projects.apache.org/category/library"/>
+    <license rdf:resource="http://usefulinc.com/doap/licenses/asl20"/>
+    <bug-database rdf:resource="https://issues.apache.org/jira/browse/RDF"/>
+    <download-page rdf:resource="http://commons.apache.org/rdf/download_net.cgi"/>
+    <asfext:pmc rdf:resource="http://commons.apache.org/"/>
+    <shortdesc xml:lang="en">Apache Commons Rdf</shortdesc>
+    <description xml:lang="en"/>
+    <repository>
+      <SVNRepository>
+        <browse rdf:resource="http://svn.apache.org/repos/asf/commons/proper/rdf/trunk"/>
+        <location rdf:resource="http://svn.apache.org/repos/asf/commons/proper/rdf"/>
+      </SVNRepository>
+    </repository>
+    <release>
+    </release>
+    <mailing-list rdf:resource="http://commons.apache.org/mail-lists.html"/>
+  </Project>
+</rdf:RDF>

http://git-wip-us.apache.org/repos/asf/clerezza-rdf-core/blob/43651da1/impl.sparql/pom.xml
----------------------------------------------------------------------
diff --git a/impl.sparql/pom.xml b/impl.sparql/pom.xml
new file mode 100644
index 0000000..8fe7681
--- /dev/null
+++ b/impl.sparql/pom.xml
@@ -0,0 +1,53 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <groupId>org.apache.commons</groupId>
+        <artifactId>commons-parent</artifactId>
+        <version>37</version>
+        <relativePath />
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+    <groupId>commons-rdf</groupId>
+    <artifactId>commons-rdf-impl-sparql</artifactId>
+    <version>1.0.0-SNAPSHOT</version>
+    <packaging>jar</packaging>
+    <name>Apache Commons RDF SPARQL backed implementation.</name>
+    <description>An implementation of the rdf commons API backed by a sparql 
+        endpoint. STATUS: Incomplete, currecnt code only supports reading 
+        graphs and does not yet support BlankNodes.</description>
+    <properties>
+        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+        <commons.encoding>UTF-8</commons.encoding>
+        <maven.compiler.source>1.7</maven.compiler.source>
+        <maven.compiler.target>1.7</maven.compiler.target>
+    </properties>
+    <dependencies>
+        <dependency>
+            <groupId>org.apache.httpcomponents</groupId>
+            <artifactId>httpclient</artifactId>
+            <version>4.4</version>
+        </dependency>
+        <dependency>
+            <groupId>commons-rdf</groupId>
+            <artifactId>commons-rdf-api</artifactId>
+            <version>0.1-SNAPSHOT</version>
+        </dependency>
+        <dependency>
+            <groupId>commons-rdf</groupId>
+            <artifactId>commons-rdf-impl-utils</artifactId>
+            <version>0.1-SNAPSHOT</version>
+        </dependency>
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <version>4.12</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.jena</groupId>
+            <artifactId>jena-fuseki</artifactId>
+            <version>1.1.1</version>
+            <scope>test</scope>
+        </dependency>
+    </dependencies>
+</project>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/clerezza-rdf-core/blob/43651da1/impl.sparql/src/main/java/org/apache/commons/rdf/impl/sparql/SparqlBNode.java
----------------------------------------------------------------------
diff --git a/impl.sparql/src/main/java/org/apache/commons/rdf/impl/sparql/SparqlBNode.java b/impl.sparql/src/main/java/org/apache/commons/rdf/impl/sparql/SparqlBNode.java
new file mode 100644
index 0000000..c74aa06
--- /dev/null
+++ b/impl.sparql/src/main/java/org/apache/commons/rdf/impl/sparql/SparqlBNode.java
@@ -0,0 +1,74 @@
+/*
+ * Copyright 2015 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.commons.rdf.impl.sparql;
+
+import java.util.Collection;
+import java.util.Objects;
+import org.apache.commons.rdf.BlankNode;
+import org.apache.commons.rdf.BlankNodeOrIri;
+import org.apache.commons.rdf.ImmutableGraph;
+import org.apache.commons.rdf.Iri;
+import org.apache.commons.rdf.RdfTerm;
+import org.apache.commons.rdf.Triple;
+import org.apache.commons.rdf.impl.utils.TripleImpl;
+import org.apache.commons.rdf.impl.utils.simple.SimpleGraph;
+
+/**
+ *
+ * @author developer
+ */
+class SparqlBNode extends BlankNode {
+    
+    private final static Iri internalBNodeId = new Iri("urn:x-internalid:fdmpoihdfw");
+    
+    final ImmutableGraph context;
+    private final int isoDistinguisher;
+
+    SparqlBNode(BlankNode node, Collection<Triple> context, int isoDistinguisher) {
+        this.isoDistinguisher = isoDistinguisher;
+        final SimpleGraph contextBuider = new SimpleGraph();
+        for (Triple triple : context) {
+            BlankNodeOrIri subject = triple.getSubject();
+            RdfTerm object = triple.getObject();
+            contextBuider.add(new TripleImpl(subject.equals(node) ? internalBNodeId : subject, 
+                    triple.getPredicate(), 
+                    object.equals(node) ? internalBNodeId : object));
+        }
+        this.context = contextBuider.getImmutableGraph();
+    }
+
+    @Override
+    public int hashCode() {
+        int hash = 7+isoDistinguisher;
+        hash = 61 * hash + Objects.hashCode(this.context);
+        return hash;
+    }
+
+    @Override
+    public boolean equals(Object obj) {
+        if (obj == null) {
+            return false;
+        }
+        if (getClass() != obj.getClass()) {
+            return false;
+        }
+        final SparqlBNode other = (SparqlBNode) obj;
+        if (isoDistinguisher != other.isoDistinguisher) {
+            return false;
+        }
+        return Objects.equals(this.context, other.context);
+    }
+}

http://git-wip-us.apache.org/repos/asf/clerezza-rdf-core/blob/43651da1/impl.sparql/src/main/java/org/apache/commons/rdf/impl/sparql/SparqlClient.java
----------------------------------------------------------------------
diff --git a/impl.sparql/src/main/java/org/apache/commons/rdf/impl/sparql/SparqlClient.java b/impl.sparql/src/main/java/org/apache/commons/rdf/impl/sparql/SparqlClient.java
new file mode 100644
index 0000000..ca3f619
--- /dev/null
+++ b/impl.sparql/src/main/java/org/apache/commons/rdf/impl/sparql/SparqlClient.java
@@ -0,0 +1,222 @@
+/*
+ * To change this license header, choose License Headers in Project Properties.
+ * To change this template file, choose Tools | Templates
+ * and open the template in the editor.
+ */
+package org.apache.commons.rdf.impl.sparql;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.UnsupportedEncodingException;
+import java.util.ArrayList;
+import java.util.Enumeration;
+import java.util.HashMap;
+import java.util.Hashtable;
+import java.util.List;
+import java.util.Map;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import org.apache.http.HttpEntity;
+import org.apache.http.NameValuePair;
+import org.apache.http.client.entity.UrlEncodedFormEntity;
+import org.apache.http.client.methods.CloseableHttpResponse;
+import org.apache.http.client.methods.HttpPost;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.impl.client.HttpClients;
+import org.apache.http.message.BasicNameValuePair;
+import org.apache.http.util.EntityUtils;
+import javax.xml.parsers.*;
+import org.apache.commons.rdf.BlankNode;
+import org.apache.commons.rdf.BlankNodeOrIri;
+import org.apache.commons.rdf.Iri;
+import org.apache.commons.rdf.Language;
+import org.apache.commons.rdf.Literal;
+import org.apache.commons.rdf.RdfTerm;
+import org.apache.commons.rdf.Triple;
+import org.apache.commons.rdf.impl.utils.AbstractLiteral;
+import org.xml.sax.*;
+import org.xml.sax.helpers.*;
+
+/**
+ *
+ * @author developer
+ */
+public class SparqlClient {
+
+    final String endpoint;
+
+    public SparqlClient(final String endpoint) {
+        this.endpoint = endpoint;
+    }
+
+    List<Map<String, RdfTerm>> queryResultSet(final String query) throws IOException {
+        CloseableHttpClient httpclient = HttpClients.createDefault();
+        HttpPost httpPost = new HttpPost(endpoint);
+        List<NameValuePair> nvps = new ArrayList<NameValuePair>();
+        nvps.add(new BasicNameValuePair("query", query));
+        httpPost.setEntity(new UrlEncodedFormEntity(nvps));
+        CloseableHttpResponse response2 = httpclient.execute(httpPost);
+
+        try {
+            HttpEntity entity2 = response2.getEntity();
+            InputStream in = entity2.getContent();
+            SAXParserFactory spf = SAXParserFactory.newInstance();
+            spf.setNamespaceAware(true);
+            SAXParser saxParser = spf.newSAXParser();
+            XMLReader xmlReader = saxParser.getXMLReader();
+            final SparqlsResultsHandler sparqlsResultsHandler = new SparqlsResultsHandler();
+            xmlReader.setContentHandler(sparqlsResultsHandler);
+            xmlReader.parse(new InputSource(in));
+            /*
+             for (int ch = in.read(); ch != -1; ch = in.read()) {
+             System.out.print((char)ch);
+             }
+             */
+            // do something useful with the response body
+            // and ensure it is fully consumed
+            EntityUtils.consume(entity2);
+            return sparqlsResultsHandler.getResults();
+        } catch (ParserConfigurationException ex) {
+            throw new RuntimeException(ex);
+        } catch (SAXException ex) {
+            throw new RuntimeException(ex);
+        } finally {
+            response2.close();
+        }
+
+    }
+
+    final public static class SparqlsResultsHandler extends DefaultHandler {
+
+        private String currentBindingName;
+        private Map<String, RdfTerm> currentResult = null;
+        private final List<Map<String, RdfTerm>> results = new ArrayList<>();
+        private boolean readingValue;
+        private String lang; //the xml:lang attribute of a literal
+        private String value;
+        private Map<String, BlankNode> bNodeMap = new HashMap<>();
+        private static final Iri XSD_STRING = new Iri("http://www.w3.org/2001/XMLSchema#string");
+
+        private RdfTerm getBNode(String value) {
+            if (!bNodeMap.containsKey(value)) {
+                bNodeMap.put(value, new BlankNode());
+            }
+            return bNodeMap.get(value);
+        }
+
+        private List<Map<String, RdfTerm>> getResults() {
+            return results;
+        }
+
+        enum BindingType {
+
+            uri, bnode, literal;
+        }
+
+        @Override
+        public void startDocument() throws SAXException {
+
+        }
+
+        @Override
+        public void startElement(String namespaceURI,
+                String localName,
+                String qName,
+                Attributes atts)
+                throws SAXException {
+            if ("http://www.w3.org/2005/sparql-results#".equals(namespaceURI)) {
+                if ("result".equals(localName)) {
+                    if (currentResult != null) {
+                        throw new SAXException("unexpected tag <result>");
+                    }
+                    currentResult = new HashMap<>();
+                } else if ("binding".equals(localName)) {
+                    if (currentResult == null) {
+                        throw new SAXException("unexpected tag <binding>");
+                    }
+                    currentBindingName = atts.getValue("name");
+                } else if ("uri".equals(localName) || "bnode".equals(localName) || "literal".equals(localName)) {
+                    if (readingValue) {
+                        throw new SAXException("unexpected tag <" + localName + ">");
+                    }
+                    lang = atts.getValue("http://www.w3.org/XML/1998/namespace", "lang");
+                    readingValue = true;
+                }
+            }
+
+            //System.out.println(namespaceURI);
+            //System.out.println(qName);
+        }
+
+        @Override
+        public void characters(char[] chars, int start, int length) throws SAXException {
+            if (readingValue) {
+                value = new String(chars, start, length);
+                //System.err.println(value + start + ", " + length);
+            }
+        }
+
+        @Override
+        public void endElement(String namespaceURI,
+                String localName,
+                String qName)
+                throws SAXException {
+            if ("http://www.w3.org/2005/sparql-results#".equals(namespaceURI)) {
+                if ("result".equals(localName)) {
+                    results.add(currentResult);
+                    currentResult = null;
+                } else if ("binding".equals(localName)) {
+                    if (currentBindingName == null) {
+                        throw new SAXException("unexpected tag </binding>");
+                    }
+                    currentBindingName = null;
+                } else {
+                    try {
+                        BindingType b = BindingType.valueOf(localName);
+                        RdfTerm rdfTerm = null;
+                        final Language language = lang == null? null : new Language(lang);;
+                        switch (b) {
+                            case uri:
+                                rdfTerm = new Iri(value);
+                                break;
+                            case bnode:
+                                rdfTerm = getBNode(value);
+                                break;
+                            case literal:
+                                final String lf = value;
+                                rdfTerm = new AbstractLiteral() {
+
+                                    @Override
+                                    public String getLexicalForm() {
+                                        return lf;
+                                    }
+
+                                    @Override
+                                    public Iri getDataType() {
+                                        //TODO implement
+                                        return XSD_STRING;
+                                    }
+
+                                    @Override
+                                    public Language getLanguage() {
+                                        return language;
+                                    }
+                                };
+                                break;
+                        }
+                        currentResult.put(currentBindingName, rdfTerm);
+                        readingValue = false;
+                    } catch (IllegalArgumentException e) {
+                            //not uri|bnode|literal
+                    }
+                }
+            }
+        }
+
+        public void endDocument() throws SAXException {
+            //System.out.println("results: " + results.size());
+        }
+
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/clerezza-rdf-core/blob/43651da1/impl.sparql/src/main/java/org/apache/commons/rdf/impl/sparql/SparqlGraph.java
----------------------------------------------------------------------
diff --git a/impl.sparql/src/main/java/org/apache/commons/rdf/impl/sparql/SparqlGraph.java b/impl.sparql/src/main/java/org/apache/commons/rdf/impl/sparql/SparqlGraph.java
new file mode 100644
index 0000000..64b6133
--- /dev/null
+++ b/impl.sparql/src/main/java/org/apache/commons/rdf/impl/sparql/SparqlGraph.java
@@ -0,0 +1,439 @@
+/*
+ * Copyright 2015 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.commons.rdf.impl.sparql;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.NoSuchElementException;
+import java.util.Set;
+import java.util.concurrent.Callable;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import org.apache.commons.rdf.BlankNode;
+import org.apache.commons.rdf.BlankNodeOrIri;
+import org.apache.commons.rdf.Iri;
+import org.apache.commons.rdf.Literal;
+import org.apache.commons.rdf.RdfTerm;
+import org.apache.commons.rdf.Triple;
+import org.apache.commons.rdf.impl.utils.AbstractGraph;
+import org.apache.commons.rdf.impl.utils.TripleImpl;
+
+/**
+ *
+ * @author reto
+ */
+public class SparqlGraph extends AbstractGraph {
+
+    private static final int MAX_ISOMORPHIC_BNODES = 1000;
+    private static final Logger log = Logger.getLogger(SparqlGraph.class.getName());
+
+    final SparqlClient sparqlClient;
+
+    /**
+     * Constructs a Graph representing the default graph at the specified
+     * endpoint
+     */
+    public SparqlGraph(final String endpoint) {
+        sparqlClient = new SparqlClient(endpoint);
+    }
+
+    @Override
+    protected Iterator<Triple> performFilter(final BlankNodeOrIri filterSubject,
+            final Iri filterPredicate, final RdfTerm filterObject) {
+        try {
+            final StringBuilder queryBuilder = new StringBuilder();
+            queryBuilder.append("SELECT ?s ?p ?o WHERE { ");
+            if (filterSubject == null) {
+                queryBuilder.append("?s");
+            } else {
+                queryBuilder.append(asSparqlTerm(filterSubject));
+            }
+            queryBuilder.append(' ');
+            if (filterPredicate == null) {
+                queryBuilder.append("?p");
+            } else {
+                queryBuilder.append(asSparqlTerm(filterPredicate));
+            }
+            queryBuilder.append(' ');
+            if (filterObject == null) {
+                queryBuilder.append("?o");
+            } else {
+                queryBuilder.append(asSparqlTerm(filterObject));
+            }
+            queryBuilder.append(" }");
+            final List<Map<String, RdfTerm>> sparqlResults = sparqlClient.queryResultSet(queryBuilder.toString());
+            //first to triples without bnode-conversion
+            //rawTripkles contains the triples with the BNodes from the result set
+            final Collection<Triple> rawTriples = new ArrayList<>();
+            for (Map<String, RdfTerm> result : sparqlResults) {
+                rawTriples.add(new TripleImpl(filterSubject != null ? filterSubject : (BlankNodeOrIri) result.get("s"),
+                        filterPredicate != null ? filterPredicate : (Iri) result.get("p"),
+                        filterObject != null ? filterObject : result.get("o")));
+
+            }
+            //then bnode conversion
+            final Iterator<Triple> rawTriplesIter = rawTriples.iterator();
+            //this is basically just wokring around the lack of (named) nested functions
+            return (new Callable<Iterator<Triple>>() {
+
+                final Map<BlankNode, SparqlBNode> nodeMap = new HashMap<>();
+
+                private RdfTerm useSparqlNode(RdfTerm node) throws IOException {
+                    if (node instanceof BlankNodeOrIri) {
+                        return useSparqlNode((BlankNodeOrIri) node);
+                    }
+                    return node;
+                }
+
+                private BlankNodeOrIri useSparqlNode(BlankNodeOrIri node) throws IOException {
+                    if (node instanceof BlankNode) {
+                        if (!nodeMap.containsKey(node)) {
+                            createBlankNodesForcontext((BlankNode) node);
+                        }
+                        if (!nodeMap.containsKey(node)) {
+                            throw new RuntimeException("no Bnode created");
+                        }
+                        return nodeMap.get(node);
+                    } else {
+                        return node;
+                    }
+                }
+
+                private void createBlankNodesForcontext(final BlankNode node) throws IOException {
+                    final Collection<Triple> context = getContext(node);
+                    final Set<BlankNode> rawNodes = new HashSet<>();
+                    for (Triple triple : context) {
+                        {
+                            final BlankNodeOrIri subject = triple.getSubject();
+                            if (subject instanceof BlankNode) {
+                                rawNodes.add((BlankNode) subject);
+                            }
+                        }
+                        {
+                            final RdfTerm object = triple.getObject();
+                            if (object instanceof BlankNode) {
+                                rawNodes.add((BlankNode) object);
+                            }
+                        }
+                    }
+                    final Set<SparqlBNode> createdSparqlNodes = new HashSet<>();
+                    //final Map<BlankNode, SparqlBNode> preliminaryNodes = new HashMap<>();
+                    for (BlankNode rawNode : rawNodes) {
+                        for (int i = 0; i < MAX_ISOMORPHIC_BNODES; i++) {
+                            SparqlBNode sparqlBNode = new SparqlBNode(rawNode, context, i);
+                            if (!createdSparqlNodes.contains(sparqlBNode)) {
+                                nodeMap.put(rawNode, sparqlBNode);
+                                createdSparqlNodes.add(sparqlBNode);
+                                break;
+                            }
+                        }
+                    }
+                }
+
+                private Collection<Triple> getContext(final BlankNode node) throws IOException {
+                    //we need to get the cntext of the BNode
+                    //if the filter was for (null, null, null) we have the whole
+                    //bnode context in the reuslt set, otherwise we need to get 
+                    //more triples from the endpoint,
+                    //let's first handle the easy case
+                    if ((filterSubject == null) && (filterPredicate == null)
+                            && (filterObject == null)) {
+                        return getContextInRaw(node);
+                    } else {
+                        final Collection<Triple> startContext = getContextInRaw(node);
+                        final Set<Collection<Triple>> expandedContexts = expandContext(startContext);
+                        //expand bnode context
+                        //note that there might be different contexts for 
+                        //a bnode as present in the current result set
+                        //in this case we just haveto make sure we don't 
+                        //pick the same context for different bnodes in the resultset
+
+                        //TODO make sure we don't take one that has already been used
+                        return expandedContexts.iterator().next();
+                    }
+
+                }
+
+                private Collection<Triple> getContextInRaw(BlankNode node) {
+                    final Collection<Triple> context = new ArrayList<>();
+                    for (Triple rawTriple : rawTriples) {
+                        BlankNodeOrIri rawSubject = rawTriple.getSubject();
+                        RdfTerm rawObject = rawTriple.getObject();
+                        if (rawSubject.equals(node) || rawObject.equals(node)) {
+                            context.add(rawTriple);
+                        }
+                    }
+                    return context;
+                }
+
+                @Override
+                public Iterator<Triple> call() throws Exception {
+                    return new Iterator<Triple>() {
+
+                        @Override
+                        public boolean hasNext() {
+                            return rawTriplesIter.hasNext();
+                        }
+
+                        @Override
+                        public Triple next() {
+                            try {
+                                Triple rawTriple = rawTriplesIter.next();
+                                return new TripleImpl(useSparqlNode(rawTriple.getSubject()),
+                                        rawTriple.getPredicate(),
+                                        useSparqlNode(rawTriple.getObject()));
+                            } catch (IOException ex) {
+                                throw new RuntimeException(ex);
+                            }
+                        }
+                    };
+                }
+
+                /**
+                 * returns all MSGs that are supergraphs of startContext
+                 *
+                 * @param startContext
+                 * @return
+                 */
+                private Set<Collection<Triple>> expandContext(Collection<Triple> startContext) throws IOException {
+                    final Collection<String> triplePatterns = new ArrayList<>();
+                    int varCounter = 0;
+                    final Map<BlankNode, String> bNodeVarNameMap = new HashMap<>();
+                    for (Triple t : startContext) {
+                        final StringBuilder builder = new StringBuilder();
+                        {
+                            final BlankNodeOrIri s = t.getSubject();
+                            String varName;
+                            if (s instanceof BlankNode) {
+                                if (bNodeVarNameMap.containsKey(s)) {
+                                    varName = bNodeVarNameMap.get(s);
+                                } else {
+                                    varName = "v" + (varCounter++);
+                                    bNodeVarNameMap.put((BlankNode) s, varName);
+                                    
+                                }
+                                builder.append('?');
+                                builder.append(varName);
+                            } else {
+                                builder.append(asSparqlTerm(s));
+                            }
+                        }
+                        builder.append(' ');
+                        builder.append(asSparqlTerm(t.getPredicate()));
+                        builder.append(' ');
+                        {
+                            final RdfTerm o = t.getObject();
+                            String varName;
+                            if (o instanceof BlankNode) {
+                                if (bNodeVarNameMap.containsKey(o)) {
+                                    varName = bNodeVarNameMap.get(o);
+                                } else {
+                                    varName = "v" + (varCounter++);
+                                    bNodeVarNameMap.put((BlankNode) o, varName);
+                                }
+                                builder.append('?');
+                                builder.append(varName);
+                            } else {
+                                builder.append(asSparqlTerm(o));
+                            }
+                        }
+                        builder.append('.');
+                        triplePatterns.add(builder.toString());
+
+                    }
+                    final StringBuilder queryBuilder = new StringBuilder();
+                    queryBuilder.append("SELECT * WHERE {\n ");
+                    for (String triplePattern : triplePatterns) {
+                        queryBuilder.append(triplePattern);
+                        queryBuilder.append('\n');
+                    }
+                    Set<BlankNode> bNodesInContext = bNodeVarNameMap.keySet();
+                    for (BlankNode bNode : bNodesInContext) {
+                        final String bNodeVarLabel = bNodeVarNameMap.get(bNode);
+                        //looking for outgoing properties of the bnode
+                        queryBuilder.append("OPTIONAL { ");
+                        queryBuilder.append('?');
+                        queryBuilder.append(bNodeVarLabel);
+                        queryBuilder.append(' ');
+                        queryBuilder.append("?po");
+                        queryBuilder.append(bNodeVarLabel);
+                        queryBuilder.append(" ?o");
+                        queryBuilder.append(bNodeVarLabel);
+                        queryBuilder.append(" } .\n");
+                        //looking for incoming properties of the bnode
+                        queryBuilder.append("OPTIONAL { ");
+                        queryBuilder.append("?s");
+                        queryBuilder.append(bNodeVarLabel);
+                        queryBuilder.append(' ');
+                        queryBuilder.append("?pi");
+                        queryBuilder.append(bNodeVarLabel);
+                        queryBuilder.append(" ?");
+                        queryBuilder.append(bNodeVarLabel);
+                        queryBuilder.append(" } .\n");
+                    }
+                    queryBuilder.append(" }");
+                    final List<Map<String, RdfTerm>> expansionQueryResults = sparqlClient.queryResultSet(queryBuilder.toString());
+                    Set<Collection<Triple>> expandedContexts = new HashSet<>();
+                    //the query results may or may be from disjoint supergraphs
+                    //we expand them all as if they are different which may lead
+                    //us to the same MSG multiple times
+                    RESULTS:
+                    for (Map<String, RdfTerm> expansionQueryResult : expansionQueryResults) {
+                        Collection<Triple> expandedContext = new HashSet<>();
+                        Map<BlankNode, BlankNode> newBNodesToOldBNodes = new HashMap<>();
+                        for (BlankNode oldBNode : bNodesInContext) {
+                            final String bNodeVarLabel = bNodeVarNameMap.get(oldBNode);
+                            final RdfTerm newNode = expansionQueryResult.get(bNodeVarLabel);
+                            if (!(newNode instanceof BlankNode)) {
+                                //this subgraph is't a match
+                                continue RESULTS;
+                            }
+                            newBNodesToOldBNodes.put((BlankNode) newNode, oldBNode);
+                        }
+                        expandedContext.addAll(startContext);
+                        boolean newBNodeIntroduced = false;
+                        boolean newTripleAdded = false;
+                        for (BlankNode oldBNode : bNodesInContext) {
+                            final String bNodeVarLabel = bNodeVarNameMap.get(oldBNode);
+                            {
+                                final Iri newPredicate = (Iri) expansionQueryResult.get("po" + bNodeVarLabel);
+                                if (newPredicate != null) {
+                                    RdfTerm newObject = expansionQueryResult.get("o" + bNodeVarLabel);
+                                    if (newObject instanceof BlankNode) {
+                                        if (newBNodesToOldBNodes.containsKey(newObject)) {
+                                            //point back to BNode in startContext
+                                            newObject = newBNodesToOldBNodes.get(newObject);
+                                        } else {
+                                            newBNodeIntroduced = true;
+                                        }
+                                    }
+                                    if (expandedContext.add(new TripleImpl(oldBNode, newPredicate, newObject))) {
+                                        newTripleAdded = true;
+                                    }
+                                }
+                            }
+                            {
+                                final Iri newPredicate = (Iri) expansionQueryResult.get("pi" + bNodeVarLabel);
+                                if (newPredicate != null) {
+                                    RdfTerm newSubject = expansionQueryResult.get("s" + bNodeVarLabel);
+                                    if (newSubject instanceof BlankNode) {
+                                        if (newBNodesToOldBNodes.containsKey(newSubject)) {
+                                            //point back to BNode in startContext
+                                            newSubject = newBNodesToOldBNodes.get(newSubject);
+                                        } else {
+                                            newBNodeIntroduced = true;
+                                        }
+                                    }
+                                    if (expandedContext.add(new TripleImpl((BlankNodeOrIri) newSubject, newPredicate, oldBNode))) {
+                                        newTripleAdded = true;
+                                    }
+                                }
+                            }
+                        }
+                        if (newBNodeIntroduced) {
+                            //we could be more efficient than this ans just expand the newly introduced bnodes
+                            expandedContexts.addAll(expandContext(expandedContext));
+                        } else {
+                            if (newTripleAdded) {
+                                //look for more results
+                                expandedContexts.addAll(expandContext(expandedContext));
+                                //expandedContexts.add(expandedContext);
+                            }
+                        }
+
+                    }
+                    if (expandedContexts.isEmpty()) {
+                        expandedContexts.add(startContext);
+                    }
+                    return expandedContexts;
+                }
+
+            }).call();
+        } catch (AlienBNodeException e) {
+            return new Iterator<Triple>() {
+
+                @Override
+                public boolean hasNext() {
+                    return false;
+                }
+
+                @Override
+                public Triple next() {
+                    throw new NoSuchElementException();
+                }
+            };
+        } catch (IOException ex) {
+            throw new RuntimeException(ex);
+        } catch (Exception ex) {
+            throw new RuntimeException(ex);
+        }
+    }
+
+    @Override
+    protected int performSize() {
+        try {
+            return sparqlClient.queryResultSet("SELECT * WHERE { ?s ?p ?o}").size();
+        } catch (IOException ex) {
+            throw new RuntimeException(ex);
+        }
+    }
+
+    private String asSparqlTerm(Iri iri) {
+        return "<" + iri.getUnicodeString() + ">";
+    }
+
+    private String asSparqlTerm(Literal literal) {
+        //TODO langauge and datatype
+        return "\"" + literal.getLexicalForm() + "\"";
+    }
+
+    private String asSparqlTerm(BlankNode bnode) {
+        if (!(bnode instanceof SparqlBNode)) {
+            throw new AlienBNodeException();
+        }
+        //this requires adding additional clauses to the graph pattern
+        throw new UnsupportedOperationException("Not supported yet.");
+    }
+
+    private String asSparqlTerm(BlankNodeOrIri term) {
+        if (term instanceof Iri) {
+            return asSparqlTerm((Iri) term);
+        } else {
+            return asSparqlTerm((BlankNode) term);
+        }
+    }
+
+    private String asSparqlTerm(RdfTerm term) {
+        if (term instanceof BlankNodeOrIri) {
+            return asSparqlTerm((BlankNodeOrIri) term);
+        } else {
+            return asSparqlTerm((Literal) term);
+        }
+    }
+
+    private static class AlienBNodeException extends RuntimeException {
+
+        public AlienBNodeException() {
+        }
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/clerezza-rdf-core/blob/43651da1/impl.sparql/src/test/java/org/apache/commons/rdf/impl/sparql/BNodeCircleTest.java
----------------------------------------------------------------------
diff --git a/impl.sparql/src/test/java/org/apache/commons/rdf/impl/sparql/BNodeCircleTest.java b/impl.sparql/src/test/java/org/apache/commons/rdf/impl/sparql/BNodeCircleTest.java
new file mode 100644
index 0000000..5208d34
--- /dev/null
+++ b/impl.sparql/src/test/java/org/apache/commons/rdf/impl/sparql/BNodeCircleTest.java
@@ -0,0 +1,101 @@
+/*
+ * To change this license header, choose License Headers in Project Properties.
+ * To change this template file, choose Tools | Templates
+ * and open the template in the editor.
+ */
+package org.apache.commons.rdf.impl.sparql;
+
+import com.hp.hpl.jena.query.DatasetAccessor;
+import com.hp.hpl.jena.query.DatasetAccessorFactory;
+import java.io.File;
+import java.io.IOException;
+import java.net.ServerSocket;
+import org.apache.jena.fuseki.EmbeddedFusekiServer;
+import com.hp.hpl.jena.rdf.model.Model;
+import com.hp.hpl.jena.rdf.model.ModelFactory;
+import java.io.InputStream;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.Set;
+import org.apache.commons.rdf.BlankNode;
+import org.apache.commons.rdf.BlankNodeOrIri;
+import org.apache.commons.rdf.Graph;
+import org.apache.commons.rdf.Iri;
+import org.apache.commons.rdf.Language;
+import org.apache.commons.rdf.Literal;
+import org.apache.commons.rdf.RdfTerm;
+import org.apache.commons.rdf.Triple;
+import org.apache.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Ignore;
+import org.junit.Test;
+
+/**
+ *
+ * @author reto
+ */
+public class BNodeCircleTest {
+
+    final static int serverPort = findFreePort();
+    static EmbeddedFusekiServer server;
+
+    @BeforeClass
+    public static void prepare() throws IOException {
+        final String serviceURI = "http://localhost:" + serverPort + "/ds/data";
+        final DatasetAccessor accessor = DatasetAccessorFactory.createHTTP(serviceURI);
+        final InputStream in = BNodeCircleTest.class.getResourceAsStream("bnode-circle.ttl");
+        final Model m = ModelFactory.createDefaultModel();
+        String base = "http://example.org/";
+        m.read(in, base, "TURTLE");
+        server = EmbeddedFusekiServer.memTDB(serverPort, "/ds");//dataSet.getAbsolutePath());
+        server.start();
+        System.out.println("Started fuseki on port " + serverPort);
+        accessor.putModel(m);
+    }
+
+    @AfterClass
+    public static void cleanup() {
+        server.stop();
+    }
+
+    @Test
+    public void graphSize() {
+        final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query");
+        Assert.assertEquals("Graph not of the exepected size", 2, graph.size());
+    }
+
+    
+    
+    @Test
+    public void nullFilter() {
+        final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query");
+        
+        final Iri foafKnows = new Iri("http://xmlns.com/foaf/0.1/knows");
+
+        final Iterator<Triple> iter = graph.filter(null, null, null);
+        Assert.assertTrue(iter.hasNext());
+        final Triple triple1 = iter.next();
+        final BlankNodeOrIri subject = triple1.getSubject();
+        final RdfTerm object = triple1.getObject();
+        Assert.assertTrue(subject instanceof BlankNode);
+        Assert.assertTrue(object instanceof BlankNode);
+        Assert.assertNotEquals(subject, object);
+        Assert.assertTrue(iter.hasNext());
+    }
+    
+
+    
+
+    public static int findFreePort() {
+        int port = 0;
+        try (ServerSocket server = new ServerSocket(0);) {
+            port = server.getLocalPort();
+        } catch (Exception e) {
+            throw new RuntimeException("unable to find a free port");
+        }
+        return port;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/clerezza-rdf-core/blob/43651da1/impl.sparql/src/test/java/org/apache/commons/rdf/impl/sparql/BNodeTest.java
----------------------------------------------------------------------
diff --git a/impl.sparql/src/test/java/org/apache/commons/rdf/impl/sparql/BNodeTest.java b/impl.sparql/src/test/java/org/apache/commons/rdf/impl/sparql/BNodeTest.java
new file mode 100644
index 0000000..a376fcd
--- /dev/null
+++ b/impl.sparql/src/test/java/org/apache/commons/rdf/impl/sparql/BNodeTest.java
@@ -0,0 +1,137 @@
+/*
+ * To change this license header, choose License Headers in Project Properties.
+ * To change this template file, choose Tools | Templates
+ * and open the template in the editor.
+ */
+package org.apache.commons.rdf.impl.sparql;
+
+import com.hp.hpl.jena.query.DatasetAccessor;
+import com.hp.hpl.jena.query.DatasetAccessorFactory;
+import java.io.File;
+import java.io.IOException;
+import java.net.ServerSocket;
+import org.apache.jena.fuseki.EmbeddedFusekiServer;
+import com.hp.hpl.jena.rdf.model.Model;
+import com.hp.hpl.jena.rdf.model.ModelFactory;
+import java.io.InputStream;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.Set;
+import org.apache.commons.rdf.BlankNode;
+import org.apache.commons.rdf.BlankNodeOrIri;
+import org.apache.commons.rdf.Graph;
+import org.apache.commons.rdf.Iri;
+import org.apache.commons.rdf.Language;
+import org.apache.commons.rdf.Literal;
+import org.apache.commons.rdf.RdfTerm;
+import org.apache.commons.rdf.Triple;
+import org.apache.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Ignore;
+import org.junit.Test;
+
+/**
+ *
+ * @author reto
+ */
+public class BNodeTest {
+
+    final static int serverPort = findFreePort();
+    static EmbeddedFusekiServer server;
+
+    @BeforeClass
+    public static void prepare() throws IOException {
+        final String serviceURI = "http://localhost:" + serverPort + "/ds/data";
+        final DatasetAccessor accessor = DatasetAccessorFactory.createHTTP(serviceURI);
+        final InputStream in = BNodeTest.class.getResourceAsStream("simple-bnode.ttl");
+        final Model m = ModelFactory.createDefaultModel();
+        String base = "http://example.org/";
+        m.read(in, base, "TURTLE");
+        server = EmbeddedFusekiServer.memTDB(serverPort, "/ds");//dataSet.getAbsolutePath());
+        server.start();
+        System.out.println("Started fuseki on port " + serverPort);
+        accessor.putModel(m);
+    }
+
+    @AfterClass
+    public static void cleanup() {
+        server.stop();
+    }
+
+    @Test
+    public void graphSize() {
+        final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query");
+        Assert.assertEquals("Graph not of the exepected size", 3, graph.size());
+    }
+
+    /* Filtering with a Bode that cannot be in graph
+    */
+    @Test
+    public void filterAlienBNode() {
+        final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query");
+        
+        final BlankNode blankNode = new BlankNode();
+        final Iterator<Triple> iter = graph.filter(blankNode, null, null);
+        Assert.assertFalse(iter.hasNext());
+    }
+    
+    @Test
+    public void bNodeIdentity() {
+        final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query");
+        
+        final Iri foafPerson = new Iri("http://xmlns.com/foaf/0.1/Person");
+        final Iri foafName = new Iri("http://xmlns.com/foaf/0.1/name");
+        final Iri foafKnows = new Iri("http://xmlns.com/foaf/0.1/knows");
+        final Iri rdfType = new Iri("http://www.w3.org/1999/02/22-rdf-syntax-ns#type");
+
+        final Iterator<Triple> iter = graph.filter(null, foafName, null);
+        Assert.assertTrue(iter.hasNext());
+        final BlankNodeOrIri namedThing = iter.next().getSubject();
+        Assert.assertTrue(namedThing instanceof BlankNode);
+        
+        final Iterator<Triple> iter2 = graph.filter(null, rdfType, foafPerson);
+        Assert.assertTrue(iter2.hasNext());
+        final BlankNodeOrIri person = iter2.next().getSubject();
+        Assert.assertTrue(person instanceof BlankNode);
+        Assert.assertEquals(namedThing, person);
+        
+        final Iterator<Triple> iter3 = graph.filter(null, foafKnows, null);
+        Assert.assertTrue(iter3.hasNext());
+        final RdfTerm knownThing = iter3.next().getObject();
+        Assert.assertTrue(knownThing instanceof BlankNode);
+        Assert.assertEquals(knownThing, person);
+        Assert.assertEquals(namedThing, knownThing);
+    }
+    
+    @Ignore
+    @Test
+    public void filter1() {
+        final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query");
+        
+        final Iri foafPerson = new Iri("http://xmlns.com/foaf/0.1/Person");
+        final Iri foafName = new Iri("http://xmlns.com/foaf/0.1/name");
+        final Iri rdfType = new Iri("http://www.w3.org/1999/02/22-rdf-syntax-ns#type");
+
+        final Iterator<Triple> iter = graph.filter(null, foafName, null);
+        Assert.assertTrue(iter.hasNext());
+        final BlankNodeOrIri person = iter.next().getSubject();
+        Assert.assertTrue(person instanceof BlankNode);
+        
+        final Iterator<Triple> iter2 = graph.filter(person, rdfType, null);
+        Assert.assertTrue(iter2.hasNext());
+    }
+    
+
+    public static int findFreePort() {
+        int port = 0;
+        try (ServerSocket server = new ServerSocket(0);) {
+            port = server.getLocalPort();
+        } catch (Exception e) {
+            throw new RuntimeException("unable to find a free port");
+        }
+        return port;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/clerezza-rdf-core/blob/43651da1/impl.sparql/src/test/java/org/apache/commons/rdf/impl/sparql/SparqlGraphTest.java
----------------------------------------------------------------------
diff --git a/impl.sparql/src/test/java/org/apache/commons/rdf/impl/sparql/SparqlGraphTest.java b/impl.sparql/src/test/java/org/apache/commons/rdf/impl/sparql/SparqlGraphTest.java
new file mode 100644
index 0000000..e75bc2c
--- /dev/null
+++ b/impl.sparql/src/test/java/org/apache/commons/rdf/impl/sparql/SparqlGraphTest.java
@@ -0,0 +1,104 @@
+/*
+ * To change this license header, choose License Headers in Project Properties.
+ * To change this template file, choose Tools | Templates
+ * and open the template in the editor.
+ */
+package org.apache.commons.rdf.impl.sparql;
+
+import com.hp.hpl.jena.query.DatasetAccessor;
+import com.hp.hpl.jena.query.DatasetAccessorFactory;
+import java.io.File;
+import java.io.IOException;
+import java.net.ServerSocket;
+import org.apache.jena.fuseki.EmbeddedFusekiServer;
+import com.hp.hpl.jena.rdf.model.Model;
+import com.hp.hpl.jena.rdf.model.ModelFactory;
+import java.io.InputStream;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.Set;
+import org.apache.commons.rdf.Graph;
+import org.apache.commons.rdf.Iri;
+import org.apache.commons.rdf.Language;
+import org.apache.commons.rdf.Literal;
+import org.apache.commons.rdf.RdfTerm;
+import org.apache.commons.rdf.Triple;
+import org.apache.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+/**
+ *
+ * @author reto
+ */
+public class SparqlGraphTest {
+
+    final static int serverPort = findFreePort();
+    static EmbeddedFusekiServer server;
+
+    @BeforeClass
+    public static void prepare() throws IOException {
+        final String serviceURI = "http://localhost:" + serverPort + "/ds/data";
+        final DatasetAccessor accessor = DatasetAccessorFactory.createHTTP(serviceURI);
+        final InputStream in = SparqlGraphTest.class.getResourceAsStream("grounded.ttl");
+        final Model m = ModelFactory.createDefaultModel();
+        String base = "http://example.org/";
+        m.read(in, base, "TURTLE");
+        server = EmbeddedFusekiServer.memTDB(serverPort, "/ds");//dataSet.getAbsolutePath());
+        server.start();
+        System.out.println("Started fuseki on port " + serverPort);
+        accessor.putModel(m);
+    }
+
+    @AfterClass
+    public static void cleanup() {
+        server.stop();
+    }
+
+    @Test
+    public void graphSize() {
+        final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query");
+        Assert.assertEquals("Graph not of the exepected size", 8, graph.size());
+    }
+
+    @Test
+    public void filter1() {
+        final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query");
+        final Iri spiderman = new Iri("http://example.org/#spiderman");
+        final Iri greenGoblin = new Iri("http://example.org/#green-goblin");
+        final Iri enemyOf = new Iri("http://www.perceive.net/schemas/relationship/enemyOf");
+        final Iri foafName = new Iri("http://xmlns.com/foaf/0.1/name");
+        {
+            final Iterator<Triple> iter = graph.filter(spiderman, null, greenGoblin);
+            Assert.assertTrue(iter.hasNext());
+            Assert.assertEquals(enemyOf, iter.next().getPredicate());
+            Assert.assertFalse(iter.hasNext());
+        }
+        {
+            final Iterator<Triple> iter = graph.filter(spiderman, foafName, null);
+            Set<Literal> names = new HashSet<>();
+            for (int i = 0; i < 2; i++) {
+                Assert.assertTrue(iter.hasNext());
+                RdfTerm name = iter.next().getObject();
+                Assert.assertTrue(name instanceof Literal);
+                names.add((Literal)name);
+            }
+            Assert.assertFalse(iter.hasNext());
+            Assert.assertTrue(names.contains(new PlainLiteralImpl("Spiderman")));
+            Assert.assertTrue(names.contains(new PlainLiteralImpl("Человек-паук", new Language("ru"))));
+        }
+    }
+
+    public static int findFreePort() {
+        int port = 0;
+        try (ServerSocket server = new ServerSocket(0);) {
+            port = server.getLocalPort();
+        } catch (Exception e) {
+            throw new RuntimeException("unable to find a free port");
+        }
+        return port;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/clerezza-rdf-core/blob/43651da1/impl.sparql/src/test/resources/org/apache/commons/rdf/impl/sparql/bnode-circle.ttl
----------------------------------------------------------------------
diff --git a/impl.sparql/src/test/resources/org/apache/commons/rdf/impl/sparql/bnode-circle.ttl b/impl.sparql/src/test/resources/org/apache/commons/rdf/impl/sparql/bnode-circle.ttl
new file mode 100644
index 0000000..f03ab4d
--- /dev/null
+++ b/impl.sparql/src/test/resources/org/apache/commons/rdf/impl/sparql/bnode-circle.ttl
@@ -0,0 +1,7 @@
+@base <http://example.org/> .
+@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
+@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
+@prefix foaf: <http://xmlns.com/foaf/0.1/> .
+
+_:a foaf:knows _:b .
+_:b foaf:knows _:a .
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/clerezza-rdf-core/blob/43651da1/impl.sparql/src/test/resources/org/apache/commons/rdf/impl/sparql/grounded.ttl
----------------------------------------------------------------------
diff --git a/impl.sparql/src/test/resources/org/apache/commons/rdf/impl/sparql/grounded.ttl b/impl.sparql/src/test/resources/org/apache/commons/rdf/impl/sparql/grounded.ttl
new file mode 100644
index 0000000..ccc39c4
--- /dev/null
+++ b/impl.sparql/src/test/resources/org/apache/commons/rdf/impl/sparql/grounded.ttl
@@ -0,0 +1,16 @@
+@base <http://example.org/> .
+@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
+@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
+@prefix foaf: <http://xmlns.com/foaf/0.1/> .
+@prefix rel: <http://www.perceive.net/schemas/relationship/> .
+
+<#green-goblin>
+    rel:enemyOf <#spiderman> ;
+    a foaf:Person ;    # in the context of the Marvel universe
+    foaf:name "Green Goblin" ;
+    foaf:age 128 .
+
+<#spiderman>
+    rel:enemyOf <#green-goblin> ;
+    a foaf:Person ;
+    foaf:name "Spiderman", "Человек-паук"@ru .
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/clerezza-rdf-core/blob/43651da1/impl.sparql/src/test/resources/org/apache/commons/rdf/impl/sparql/simple-bnode.ttl
----------------------------------------------------------------------
diff --git a/impl.sparql/src/test/resources/org/apache/commons/rdf/impl/sparql/simple-bnode.ttl b/impl.sparql/src/test/resources/org/apache/commons/rdf/impl/sparql/simple-bnode.ttl
new file mode 100644
index 0000000..6bcf67f
--- /dev/null
+++ b/impl.sparql/src/test/resources/org/apache/commons/rdf/impl/sparql/simple-bnode.ttl
@@ -0,0 +1,7 @@
+@base <http://example.org/> .
+@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
+@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
+@prefix foaf: <http://xmlns.com/foaf/0.1/> .
+
+<http://example.org/#me> foaf:knows [ a foaf:Person;
+  foaf:name "Alice Barker"].
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/clerezza-rdf-core/blob/43651da1/impl.utils/pom.xml
----------------------------------------------------------------------
diff --git a/impl.utils/pom.xml b/impl.utils/pom.xml
new file mode 100644
index 0000000..9a17a90
--- /dev/null
+++ b/impl.utils/pom.xml
@@ -0,0 +1,398 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+
+-->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+    <parent>
+        <groupId>org.apache.commons</groupId>
+        <artifactId>commons-parent</artifactId>
+        <version>37</version>
+        <relativePath />
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+    <groupId>commons-rdf</groupId>
+    <artifactId>commons-rdf-impl-utils</artifactId>
+    <version>0.1-SNAPSHOT</version>
+    <name>Apache Commons RDF Implementation Utils</name>
+    <description>
+        Apache Commons RDF provides an API modelling the RDF data model as defined by 
+        http://www.w3.org/TR/rdf11-concepts/
+    </description>
+    <url>http://commons.apache.org/sandbox/commons-rdf/</url>
+    <!-- TBD
+    <issueManagement>
+        <system>jira</system>
+        <url>http://issues.apache.org/jira/browse/RDF</url>
+    </issueManagement>
+    -->
+    <inceptionYear>2014</inceptionYear>
+    <scm>
+        <connection>scm:svn:http://svn.apache.org/repos/asf/commons/proper/rdf/trunk</connection>
+        <developerConnection>scm:svn:https://svn.apache.org/repos/asf/commons/proper/rdf/trunk</developerConnection>
+        <url>http://svn.apache.org/viewvc/commons/proper/rdf/trunk</url>
+    </scm>
+
+
+    <dependencies>
+        <dependency>
+            <groupId>commons-rdf</groupId>
+            <artifactId>commons-rdf-api</artifactId>
+            <version>0.1-SNAPSHOT</version>
+        </dependency>
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <version>4.12</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>slf4j-api</artifactId>
+            <version>1.7.7</version>
+            <type>jar</type>
+        </dependency>
+    </dependencies>
+
+    <distributionManagement>
+      <site>
+        <id>apache.website</id>
+        <name>Apache Commons Site</name>
+        <url>${commons.deployment.protocol}://people.apache.org/www/commons.apache.org/${commons.componentid}</url>
+      </site>
+    </distributionManagement>
+
+    <properties>
+        <maven.compiler.source>1.7</maven.compiler.source>
+        <maven.compiler.target>1.7</maven.compiler.target>
+        <commons.javadoc.java.link>http://docs.oracle.com/javase/7/docs/api/</commons.javadoc.java.link>
+        <commons.componentid>rdf</commons.componentid>
+        <commons.release.version>${project.version}</commons.release.version>
+<!--        <commons.rc.version>RC1</commons.rc.version>
+        <commons.release.desc>(Requires Java ${maven.compiler.target} or later)</commons.release.desc>
+        <commons.release.2.version>1.4.1</commons.release.2.version>
+        <commons.release.2.binary.suffix />
+        <commons.release.2.desc>(Requires Java 1.3 or later)</commons.release.2.desc> -->
+        <!-- TBD <commons.jira.id>RDF</commons.jira.id>
+        <commons.jira.pid></commons.jira.pid> -->
+    </properties>
+
+    <build>
+        <pluginManagement>
+          <plugins>
+            <plugin>
+              <groupId>org.apache.maven.plugins</groupId>
+              <artifactId>maven-compiler-plugin</artifactId>
+              <version>${commons.compiler.version}</version>
+              <configuration>
+                <!-- Fix incremental compiler bug, see https://jira.codehaus.org/browse/MCOMPILER-205 etc. -->
+                <excludes>
+                  <exclude>**/package-info.java</exclude>
+                </excludes>
+              </configuration>
+            </plugin>
+          </plugins>
+        </pluginManagement>
+        <plugins>
+
+            <!-- Exclude examples from binary jar -->
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-jar-plugin</artifactId>
+                <configuration>
+                    <excludes>
+                        <exclude>examples/**</exclude>
+                    </excludes>
+                </configuration>
+            </plugin>
+
+            <!-- Exclude examples from source jar -->
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-source-plugin</artifactId>
+                <configuration>
+                    <excludes>
+                        <exclude>examples/**</exclude>
+                    </excludes>
+                </configuration>
+            </plugin>
+
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-surefire-plugin</artifactId>
+                <configuration>
+                    <excludes>
+                        <exclude>**/*FunctionalTest.java</exclude>
+                        <exclude>**/POP3*Test.java</exclude>
+                    </excludes>
+                </configuration>
+            </plugin>
+
+            <plugin>
+                <artifactId>maven-assembly-plugin</artifactId>
+                <configuration>
+                    <descriptors>
+                        <descriptor>src/main/assembly/bin.xml</descriptor>
+                        <descriptor>src/main/assembly/src.xml</descriptor>
+                    </descriptors>
+                    <tarLongFileMode>gnu</tarLongFileMode>
+                </configuration>
+            </plugin>
+
+            <plugin>
+                <artifactId>maven-antrun-plugin</artifactId>
+                <executions>
+                    <execution>
+                        <phase>package</phase>
+                        <configuration>
+                            <target>
+                                <jar destfile="target/commons-net-ftp-${project.version}.jar">
+                                    <metainf dir="${basedir}" includes="NOTICE.txt,LICENSE.txt" />
+                                    <manifest>
+                                        <attribute name="Extension-Name" value="org.apache.commons.net" />
+                                        <attribute name="Specification-Title" value="${project.name}" />
+                                        <attribute name="Implementation-Title" value="${project.name}" />
+                                        <attribute name="Implementation-Vendor" value="${project.organization.name}" />
+                                        <attribute name="Implementation-Version" value="${project.version}" />
+                                        <attribute name="Implementation-Vendor-Id" value="org.apache" />
+                                        <attribute name="Implementation-Build" value="${implementation.build}"/>
+                                        <attribute name="X-Compile-Source-JDK" value="${maven.compiler.source}" />
+                                        <attribute name="X-Compile-Target-JDK" value="${maven.compiler.target}" />
+                                    </manifest>
+                                    <fileset dir="target/classes" includes="org/apache/commons/net/ftp/**,org/apache/commons/net/*,org/apache/commons/net/io/*,org/apache/commons/net/util/*" />
+                                </jar>
+                                <!--
+                                    Create the binary examples jar, which will be added to the binary zip/tgz,
+                                    but not deployed independently to Maven
+                                -->
+                                <jar destfile="target/commons-net-examples-${project.version}.jar">
+                                    <metainf dir="${basedir}" includes="NOTICE.txt,LICENSE.txt" />
+                                    <manifest>
+                                        <attribute name="Extension-Name" value="org.apache.commons.net" />
+                                        <attribute name="Specification-Title" value="${project.name}" />
+                                        <attribute name="Implementation-Title" value="${project.name}" />
+                                        <attribute name="Implementation-Vendor" value="${project.organization.name}" />
+                                        <attribute name="Implementation-Version" value="${project.version}" />
+                                        <attribute name="Implementation-Vendor-Id" value="org.apache" />
+                                        <attribute name="Implementation-Build" value="${implementation.build}"/>
+                                        <attribute name="X-Compile-Source-JDK" value="${maven.compiler.source}" />
+                                        <attribute name="X-Compile-Target-JDK" value="${maven.compiler.target}" />
+                                        <!-- Helper application -->
+                                        <attribute name="Main-Class" value="examples/Main" />
+                                        <!-- Allow java -jar examples.jar to work -->
+                                        <attribute name="Class-Path" value="commons-net-${project.version}.jar" />
+                                    </manifest>
+                                    <fileset dir="target/classes" includes="examples/**" />
+                                </jar>
+                            </target>
+                        </configuration>
+                        <goals>
+                            <goal>run</goal>
+                        </goals>
+                    </execution>
+                </executions>
+            </plugin>
+            <!--  Attaches the commons-net-ftp and examples JARs to the Maven lifecycle
+                  to ensure they will be signed and deployed as normal -->
+            <plugin>
+                <groupId>org.codehaus.mojo</groupId>
+                <artifactId>build-helper-maven-plugin</artifactId>
+                <version>1.8</version>
+                <executions>
+                    <execution>
+                        <id>attach-artifacts</id>
+                        <phase>package</phase>
+                        <goals>
+                            <goal>attach-artifact</goal>
+                        </goals>
+                        <configuration>
+                            <artifacts>
+                                <artifact>
+                                    <file>target/commons-net-ftp-${project.version}.jar</file>
+                                    <type>jar</type>
+                                    <classifier>ftp</classifier>
+                                </artifact>
+                                <artifact>
+                                    <file>target/commons-net-examples-${project.version}.jar</file>
+                                    <type>jar</type>
+                                    <classifier>examples</classifier>
+                                </artifact>
+                            </artifacts>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+
+            <!-- Exclude examples from Javadoc jar -->
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-javadoc-plugin</artifactId>
+                <configuration>
+                    <excludePackageNames>examples.*</excludePackageNames>
+                </configuration>
+            </plugin>
+
+            <!-- Copy the examples sources -->
+            <plugin>
+              <artifactId>maven-resources-plugin</artifactId>
+              <executions>
+                <execution>
+                  <id>copy-resources</id>
+                  <phase>pre-site</phase>
+                  <goals>
+                    <goal>copy-resources</goal>
+                  </goals>
+                  <configuration>
+                    <outputDirectory>${basedir}/target/site/examples</outputDirectory>
+                    <resources>
+                      <resource>
+                        <directory>src/main/java/examples</directory>
+                        <excludes>
+                          <exclude>**/Main.java</exclude>
+                        </excludes>
+                        <filtering>false</filtering>
+                      </resource>
+                    </resources>
+                  </configuration>
+                </execution>
+              </executions>
+            </plugin>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-scm-publish-plugin</artifactId>
+            <configuration>
+              <ignorePathsToDelete>
+                <ignorePathToDelete>javadocs</ignorePathToDelete>
+              </ignorePathsToDelete>
+            </configuration>
+          </plugin>
+
+        </plugins>
+
+    </build>
+
+    <reporting>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-changes-plugin</artifactId>
+                <version>${commons.changes.version}</version>
+                <configuration>
+                    <issueLinkTemplatePerSystem>
+                        <default>%URL%/%ISSUE%</default>
+                    </issueLinkTemplatePerSystem>
+                    <template>release-notes.vm</template>
+                    <templateDirectory>src/changes</templateDirectory>
+                    <!--  Add sample JIRA report - 'mvn changes:jira-report' or 'mvn site' -->
+                    <onlyCurrentVersion>false</onlyCurrentVersion>
+                    <columnNames>Fix Version,Key,Component,Summary,Type,Resolution,Status</columnNames>
+                    <!-- Sort cols have to be reversed in JIRA 4 -->
+                    <sortColumnNames>Key DESC,Type,Fix Version DESC</sortColumnNames>
+                    <resolutionIds>Fixed</resolutionIds>
+                    <statusIds>Resolved,Closed</statusIds>
+                    <!-- Don't include sub-task -->
+                    <typeIds>Bug,New Feature,Task,Improvement,Wish,Test</typeIds>
+                    <fixVersionIds>${commons.release.version}</fixVersionIds>
+                    <!-- The default is 100 -->
+                    <maxEntries>100</maxEntries>
+                </configuration>
+                <reportSets>
+                    <reportSet>
+                        <reports>
+                            <report>changes-report</report>
+                            <report>jira-report</report>
+                        </reports>
+                    </reportSet>
+                </reportSets>
+            </plugin>
+
+            <plugin>
+                <groupId>org.codehaus.mojo</groupId>
+                <artifactId>findbugs-maven-plugin</artifactId>
+                <version>2.5.5</version>
+                <configuration>
+                    <excludeFilterFile>findbugs-exclude-filter.xml</excludeFilterFile>
+                </configuration>
+            </plugin>
+
+            <plugin>
+                <groupId>org.codehaus.mojo</groupId>
+                <artifactId>clirr-maven-plugin</artifactId>
+                <configuration>
+                    <excludes>
+                      <exclude>examples/**</exclude>
+                    </excludes>
+                </configuration>
+            </plugin>
+
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-javadoc-plugin</artifactId>
+                <configuration>
+                    <excludePackageNames>examples.*</excludePackageNames>
+                </configuration>
+                <reportSets>
+                  <reportSet>
+                    <reports>
+                      <report>javadoc</report>
+                    </reports>
+                  </reportSet>
+                </reportSets>
+            </plugin>
+
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-checkstyle-plugin</artifactId>
+                <version>2.11</version>
+                <configuration>
+                    <configLocation>${basedir}/checkstyle.xml</configLocation>
+                    <suppressionsLocation>${basedir}/checkstyle-suppressions.xml</suppressionsLocation>
+                    <enableRulesSummary>false</enableRulesSummary>
+                </configuration>
+            </plugin>
+
+            <plugin>
+              <groupId>org.apache.rat</groupId>
+              <artifactId>apache-rat-plugin</artifactId>
+              <!-- Parent pom does not (yet) exclude these -->
+              <configuration>
+                <excludes>
+                  <exclude>.checkstyle</exclude>
+                  <exclude>.fbprefs</exclude>
+                </excludes>
+              </configuration>
+            </plugin>
+
+        </plugins>
+    </reporting>
+
+    <profiles>
+      <profile>
+        <!-- Temporary hack to suppress Javadoc 8 errors -->
+        <id>javadoc_8</id>
+        <activation>
+          <jdk>1.8</jdk>
+        </activation>
+        <properties>
+          <!-- Disable Xdoclint, until JavaDoc issues are fixed -->
+          <additionalparam>-Xdoclint:none</additionalparam>
+        </properties>
+      </profile>
+    </profiles>
+
+</project>


Mime
View raw message