clerezza-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From r...@apache.org
Subject [3/4] clerezza git commit: Merged in clerezza-rdf-core
Date Sat, 30 Dec 2017 11:58:35 GMT
http://git-wip-us.apache.org/repos/asf/clerezza/blob/2109cae5/impl.sparql/src/main/java/org/apache/clerezza/commons/rdf/impl/sparql/SparqlGraph.java
----------------------------------------------------------------------
diff --git a/impl.sparql/src/main/java/org/apache/clerezza/commons/rdf/impl/sparql/SparqlGraph.java b/impl.sparql/src/main/java/org/apache/clerezza/commons/rdf/impl/sparql/SparqlGraph.java
new file mode 100644
index 0000000..28eb28e
--- /dev/null
+++ b/impl.sparql/src/main/java/org/apache/clerezza/commons/rdf/impl/sparql/SparqlGraph.java
@@ -0,0 +1,507 @@
+/*
+ * Copyright 2015 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.clerezza.commons.rdf.impl.sparql;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.NoSuchElementException;
+import java.util.Set;
+import java.util.concurrent.Callable;
+import java.util.logging.Logger;
+import org.apache.clerezza.commons.rdf.BlankNode;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.Literal;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.impl.utils.AbstractGraph;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;
+
+/**
+ *
+ * @author reto
+ */
+public class SparqlGraph extends AbstractGraph {
+
+    private static final int MAX_ISOMORPHIC_BNODES = 1000;
+    private static final Logger log = Logger.getLogger(SparqlGraph.class.getName());
+
+    final SparqlClient sparqlClient;
+
+    /**
+     * Constructs a Graph representing the default graph at the specified
+     * endpoint
+     */
+    public SparqlGraph(final String endpoint) {
+        sparqlClient = new SparqlClient(endpoint);
+    }
+
+    @Override
+    protected Iterator<Triple> performFilter(final BlankNodeOrIRI filterSubject,
+            final IRI filterPredicate, final RDFTerm filterObject) {
+        try {
+            String query = createQuery(filterSubject, filterPredicate, filterObject);
+            final List<Map<String, RDFTerm>> sparqlResults = sparqlClient.queryResultSet(query);
+            //first to triples without bnode-conversion
+            //rawTriples contains the triples with the BNodes from the result set
+            final Collection<Triple> rawTriples = new ArrayList<>();
+            for (Map<String, RDFTerm> result : sparqlResults) {
+                rawTriples.add(new TripleImpl(filterSubject != null ? filterSubject : (BlankNodeOrIRI) result.get("s"),
+                        filterPredicate != null ? filterPredicate : (IRI) result.get("p"),
+                        filterObject != null ? filterObject : result.get("o")));
+
+            }
+            //then bnode conversion
+            final Iterator<Triple> rawTriplesIter = rawTriples.iterator();
+            //this is basically just wokring around the lack of (named) nested functions
+            return (new Callable<Iterator<Triple>>() {
+
+                final Map<BlankNode, SparqlBNode> nodeMap = new HashMap<>();
+                final Set<ImmutableGraph> usedContext = new HashSet<>();
+
+                private RDFTerm useSparqlNode(RDFTerm node) throws IOException {
+                    if (node instanceof BlankNodeOrIRI) {
+                        return useSparqlNode((BlankNodeOrIRI) node);
+                    }
+                    return node;
+                }
+
+                private BlankNodeOrIRI useSparqlNode(BlankNodeOrIRI node) throws IOException {
+                    if (node instanceof BlankNode) {
+                        if (!nodeMap.containsKey(node)) {
+                            createBlankNodesForcontext((BlankNode) node);
+                        }
+                        if (!nodeMap.containsKey(node)) {
+                            throw new RuntimeException("no Bnode created");
+                        }
+                        return nodeMap.get(node);
+                    } else {
+                        return node;
+                    }
+                }
+
+                private void createBlankNodesForcontext(final BlankNode node) throws IOException {
+                    final Collection<Triple> context = getContext(node);
+                    final Set<BlankNode> rawNodes = new HashSet<>();
+                    for (Triple triple : context) {
+                        {
+                            final BlankNodeOrIRI subject = triple.getSubject();
+                            if (subject instanceof BlankNode) {
+                                rawNodes.add((BlankNode) subject);
+                            }
+                        }
+                        {
+                            final RDFTerm object = triple.getObject();
+                            if (object instanceof BlankNode) {
+                                rawNodes.add((BlankNode) object);
+                            }
+                        }
+                    }
+                    final Set<SparqlBNode> createdSparqlNodes = new HashSet<>();
+                    //final Map<BlankNode, SparqlBNode> preliminaryNodes = new HashMap<>();
+                    for (BlankNode rawNode : rawNodes) {
+                        for (int i = 0; i < MAX_ISOMORPHIC_BNODES; i++) {
+                            SparqlBNode sparqlBNode = new SparqlBNode(rawNode, context, i);
+                            if (!createdSparqlNodes.contains(sparqlBNode)) {
+                                nodeMap.put(rawNode, sparqlBNode);
+                                createdSparqlNodes.add(sparqlBNode);
+                                break;
+                            }
+                        }
+                    }
+                }
+
+                private ImmutableGraph getContext(final BlankNode node) throws IOException {
+                    //we need to get the cntext of the BNode
+                    //if the filter was for (null, null, null) we have the whole
+                    //bnode context in the reuslt set, otherwise we need to get 
+                    //more triples from the endpoint,
+                    //let's first handle the easy case
+                    if ((filterSubject == null) && (filterPredicate == null)
+                            && (filterObject == null)) {
+                        return getContextInRaw(node);
+                    } else {
+                        final ImmutableGraph startContext = getContextInRaw(node);
+                        final Set<ImmutableGraph> expandedContexts = expandContext(startContext);
+                        //expand bnode context
+                        //note that there might be different contexts for 
+                        //a bnode as present in the current result set
+                        //in this case we just haveto make sure we don't 
+                        //pick the same context for different bnodes in the resultset
+                        ImmutableGraph result = null;
+                        for (ImmutableGraph expandedContext : expandedContexts) {
+                            if (!usedContext.contains(expandedContext)) {
+                                result = expandedContext;
+                                break;
+                            }
+                        }
+                        if (result == null) {
+                            log.warning("he underlying sparql graph seems to contain redundant triples, this might cause unexpected results");
+                            result = expandedContexts.iterator().next();
+                        } else {
+                            usedContext.add(result);
+                        }
+                        return result;
+                    }
+
+                }
+
+                private ImmutableGraph getContextInRaw(BlankNode node) {
+                    final Graph contextBuilder = new SimpleGraph();
+                    for (Triple rawTriple : rawTriples) {
+                        BlankNodeOrIRI rawSubject = rawTriple.getSubject();
+                        RDFTerm rawObject = rawTriple.getObject();
+                        if (rawSubject.equals(node) || rawObject.equals(node)) {
+                            contextBuilder.add(rawTriple);
+                        }
+                    }
+                    return contextBuilder.getImmutableGraph();
+                }
+
+                @Override
+                public Iterator<Triple> call() throws Exception {
+                    return new Iterator<Triple>() {
+
+                        @Override
+                        public boolean hasNext() {
+                            return rawTriplesIter.hasNext();
+                        }
+
+                        @Override
+                        public Triple next() {
+                            try {
+                                Triple rawTriple = rawTriplesIter.next();
+                                return new TripleImpl(useSparqlNode(rawTriple.getSubject()),
+                                        rawTriple.getPredicate(),
+                                        useSparqlNode(rawTriple.getObject()));
+                            } catch (IOException ex) {
+                                throw new RuntimeException(ex);
+                            }
+                        }
+                    };
+                }
+
+                /**
+                 * returns all MSGs that are supergraphs of startContext
+                 *
+                 * @param startContext
+                 * @return
+                 */
+                private Set<ImmutableGraph> expandContext(Collection<Triple> startContext) throws IOException {
+
+                    final StringBuilder queryBuilder = new StringBuilder();
+                    queryBuilder.append("SELECT * WHERE {\n ");
+                    Map<BlankNode, String> bNodeVarNameMap = writeTriplePattern(queryBuilder, startContext);
+                    Set<BlankNode> bNodesInContext = bNodeVarNameMap.keySet();
+                    for (BlankNode bNode : bNodesInContext) {
+                        final String bNodeVarLabel = bNodeVarNameMap.get(bNode);
+                        //looking for outgoing properties of the bnode
+                        queryBuilder.append("OPTIONAL { ");
+                        queryBuilder.append('?');
+                        queryBuilder.append(bNodeVarLabel);
+                        queryBuilder.append(' ');
+                        queryBuilder.append("?po");
+                        queryBuilder.append(bNodeVarLabel);
+                        queryBuilder.append(" ?o");
+                        queryBuilder.append(bNodeVarLabel);
+                        queryBuilder.append(" } .\n");
+                        //looking for incoming properties of the bnode
+                        queryBuilder.append("OPTIONAL { ");
+                        queryBuilder.append("?s");
+                        queryBuilder.append(bNodeVarLabel);
+                        queryBuilder.append(' ');
+                        queryBuilder.append("?pi");
+                        queryBuilder.append(bNodeVarLabel);
+                        queryBuilder.append(" ?");
+                        queryBuilder.append(bNodeVarLabel);
+                        queryBuilder.append(" } .\n");
+                    }
+                    queryBuilder.append(" }");
+                    final List<Map<String, RDFTerm>> expansionQueryResults = sparqlClient.queryResultSet(queryBuilder.toString());
+                    Set<ImmutableGraph> expandedContexts = new HashSet<>();
+                    //the query results may or may be from disjoint supergraphs
+                    //we expand them all as if they are different which may lead
+                    //us to the same MSG multiple times
+                    RESULTS:
+                    for (Map<String, RDFTerm> expansionQueryResult : expansionQueryResults) {
+                        Collection<Triple> expandedContext = new HashSet<>();
+                        Map<BlankNode, BlankNode> newBNodesToOldBNodes = new HashMap<>();
+                        for (BlankNode oldBNode : bNodesInContext) {
+                            final String bNodeVarLabel = bNodeVarNameMap.get(oldBNode);
+                            final RDFTerm newNode = expansionQueryResult.get(bNodeVarLabel);
+                            if (!(newNode instanceof BlankNode)) {
+                                //this subgraph is't a match
+                                continue RESULTS;
+                            }
+                            newBNodesToOldBNodes.put((BlankNode) newNode, oldBNode);
+                        }
+                        expandedContext.addAll(startContext);
+                        boolean newBNodeIntroduced = false;
+                        boolean newTripleAdded = false;
+                        for (BlankNode oldBNode : bNodesInContext) {
+                            final String bNodeVarLabel = bNodeVarNameMap.get(oldBNode);
+                            {
+                                final IRI newPredicate = (IRI) expansionQueryResult.get("po" + bNodeVarLabel);
+                                if (newPredicate != null) {
+                                    RDFTerm newObject = expansionQueryResult.get("o" + bNodeVarLabel);
+                                    if (newObject instanceof BlankNode) {
+                                        if (newBNodesToOldBNodes.containsKey(newObject)) {
+                                            //point back to BNode in startContext
+                                            newObject = newBNodesToOldBNodes.get(newObject);
+                                        } else {
+                                            newBNodeIntroduced = true;
+                                        }
+                                    }
+                                    if (expandedContext.add(new TripleImpl(oldBNode, newPredicate, newObject))) {
+                                        newTripleAdded = true;
+                                    }
+                                }
+                            }
+                            {
+                                final IRI newPredicate = (IRI) expansionQueryResult.get("pi" + bNodeVarLabel);
+                                if (newPredicate != null) {
+                                    RDFTerm newSubject = expansionQueryResult.get("s" + bNodeVarLabel);
+                                    if (newSubject instanceof BlankNode) {
+                                        if (newBNodesToOldBNodes.containsKey(newSubject)) {
+                                            //point back to BNode in startContext
+                                            newSubject = newBNodesToOldBNodes.get(newSubject);
+                                        } else {
+                                            newBNodeIntroduced = true;
+                                        }
+                                    }
+                                    if (expandedContext.add(new TripleImpl((BlankNodeOrIRI) newSubject, newPredicate, oldBNode))) {
+                                        newTripleAdded = true;
+                                    }
+                                }
+                            }
+                        }
+                        if (newBNodeIntroduced) {
+                            //we could be more efficient than this ans just expand the newly introduced bnodes
+                            expandedContexts.addAll(expandContext(expandedContext));
+                        } else {
+                            if (newTripleAdded) {
+                                //look for more results
+                                expandedContexts.addAll(expandContext(expandedContext));
+                                //continued iteration obsoleted by recursion
+                                break;
+                            }
+                        }
+
+                    }
+                    if (expandedContexts.isEmpty()) {
+                        expandedContexts.add(new SimpleGraph(startContext).getImmutableGraph());
+                    }
+                    return expandedContexts;
+                }
+
+            }).call();
+        } catch (AlienBNodeException e) {
+            return new Iterator<Triple>() {
+
+                @Override
+                public boolean hasNext() {
+                    return false;
+                }
+
+                @Override
+                public Triple next() {
+                    throw new NoSuchElementException();
+                }
+            };
+        } catch (IOException ex) {
+            throw new RuntimeException(ex);
+        } catch (Exception ex) {
+            throw new RuntimeException(ex);
+        }
+    }
+
+    private String createQuery(final BlankNodeOrIRI filterSubject, final IRI filterPredicate, final RDFTerm filterObject) {
+        final StringBuilder selectBuilder = new StringBuilder();
+        selectBuilder.append("SELECT ");
+        final StringBuilder whereBuilder = new StringBuilder();
+        whereBuilder.append("WHERE { ");
+        if (filterSubject == null) {
+            whereBuilder.append("?s");
+            selectBuilder.append("?s ");
+        } else {
+            if (filterSubject instanceof SparqlBNode) {
+                whereBuilder.append("?sn");
+            } else {
+                whereBuilder.append(asSparqlTerm(filterSubject));
+            }
+        }
+        whereBuilder.append(' ');
+        if (filterPredicate == null) {
+            whereBuilder.append("?p");
+            selectBuilder.append("?p ");
+        } else {
+            whereBuilder.append(asSparqlTerm(filterPredicate));
+        }
+        whereBuilder.append(' ');
+        if (filterObject == null) {
+            whereBuilder.append("?o");
+            selectBuilder.append("?o ");
+        } else {
+            if (filterObject instanceof SparqlBNode) {
+                whereBuilder.append("?on");
+            } else {
+                whereBuilder.append(asSparqlTerm(filterObject));
+            }
+        }
+        whereBuilder.append(" .\n");
+        if (filterSubject instanceof SparqlBNode) {
+            //expand bnode context
+            writeTriplePattern(whereBuilder, ((SparqlBNode) filterSubject).context, "sn");
+        }
+        
+        if (filterObject instanceof SparqlBNode) {
+            //expand bnode context
+            writeTriplePattern(whereBuilder, ((SparqlBNode) filterObject).context, "on");
+        }
+
+        whereBuilder.append(" }");
+        return selectBuilder.append(whereBuilder).toString();
+    }
+
+    @Override
+    protected int performSize() {
+        try {
+            //TODO replace this with count
+            return sparqlClient.queryResultSet("SELECT * WHERE { ?s ?p ?o}").size();
+        } catch (IOException ex) {
+            throw new RuntimeException(ex);
+        }
+    }
+
+    private String asSparqlTerm(IRI iri) {
+        return "<" + iri.getUnicodeString() + ">";
+    }
+
+    private String asSparqlTerm(Literal literal) {
+        //TODO langauge and datatype
+        return "\"" + literal.getLexicalForm().replace("\n", "\\n").replace("\"", "\\\"") + "\"";
+    }
+
+    private String asSparqlTerm(BlankNode bnode) {
+        if (!(bnode instanceof SparqlBNode)) {
+            throw new AlienBNodeException();
+        }
+        //this requires adding additional clauses to the graph pattern
+        throw new RuntimeException("SparqlBNodes should have been handled earlier");
+    }
+
+    private String asSparqlTerm(BlankNodeOrIRI term) {
+        if (term instanceof IRI) {
+            return asSparqlTerm((IRI) term);
+        } else {
+            return asSparqlTerm((BlankNode) term);
+        }
+    }
+
+    private String asSparqlTerm(RDFTerm term) {
+        if (term instanceof BlankNodeOrIRI) {
+            return asSparqlTerm((BlankNodeOrIRI) term);
+        } else {
+            return asSparqlTerm((Literal) term);
+        }
+    }
+
+
+    private Map<BlankNode, String> writeTriplePattern(StringBuilder queryBuilder, Collection<Triple> triples) {
+        return writeTriplePattern(queryBuilder, triples, null);
+    }
+        
+    private Map<BlankNode, String> writeTriplePattern(StringBuilder queryBuilder, Collection<Triple> triples, String varLabelForInternalBNodeId) {
+        final Collection<String> triplePatterns = new ArrayList<>();
+        int varCounter = 0;
+        final Map<BlankNode, String> bNodeVarNameMap = new HashMap<>();
+        for (Triple t : triples) {
+            final StringBuilder builder = new StringBuilder();
+            {
+                final BlankNodeOrIRI s = t.getSubject();
+                String varName;
+                if (s instanceof BlankNode) {
+                    if (bNodeVarNameMap.containsKey(s)) {
+                        varName = bNodeVarNameMap.get(s);
+                    } else {
+                        varName = "v" + (varCounter++);
+                        bNodeVarNameMap.put((BlankNode) s, varName);
+                    }
+                    builder.append('?');
+                    builder.append(varName);
+                } else {
+                    if (s.equals(SparqlBNode.internalBNodeId)) {
+                        builder.append('?');
+                        builder.append(varLabelForInternalBNodeId);
+                    } else {
+                        builder.append(asSparqlTerm(s));
+                    }
+                    
+                }
+            }
+            builder.append(' ');
+            builder.append(asSparqlTerm(t.getPredicate()));
+            builder.append(' ');
+            {
+                final RDFTerm o = t.getObject();
+                String varName;
+                if (o instanceof BlankNode) {
+                    if (bNodeVarNameMap.containsKey(o)) {
+                        varName = bNodeVarNameMap.get(o);
+                    } else {
+                        varName = "v" + (varCounter++);
+                        bNodeVarNameMap.put((BlankNode) o, varName);
+                    }
+                    builder.append('?');
+                    builder.append(varName);
+                } else {
+                    if (o.equals(SparqlBNode.internalBNodeId)) {
+                        builder.append('?');
+                        builder.append(varLabelForInternalBNodeId);
+                    } else {
+                        builder.append(asSparqlTerm(o));
+                    }
+                }
+            }
+            builder.append('.');
+            triplePatterns.add(builder.toString());
+
+        }
+        for (String triplePattern : triplePatterns) {
+
+            queryBuilder.append(triplePattern);
+            queryBuilder.append('\n');
+        }
+        return bNodeVarNameMap;
+
+    }
+
+    private static class AlienBNodeException extends RuntimeException {
+
+        public AlienBNodeException() {
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/clerezza/blob/2109cae5/impl.sparql/src/main/java/org/apache/clerezza/commons/rdf/impl/sparql/SparqlResultParser.java
----------------------------------------------------------------------
diff --git a/impl.sparql/src/main/java/org/apache/clerezza/commons/rdf/impl/sparql/SparqlResultParser.java b/impl.sparql/src/main/java/org/apache/clerezza/commons/rdf/impl/sparql/SparqlResultParser.java
new file mode 100644
index 0000000..43d48f0
--- /dev/null
+++ b/impl.sparql/src/main/java/org/apache/clerezza/commons/rdf/impl/sparql/SparqlResultParser.java
@@ -0,0 +1,225 @@
+/*
+ * Copyright 2016 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.clerezza.commons.rdf.impl.sparql;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.StringWriter;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import javax.xml.parsers.ParserConfigurationException;
+import javax.xml.parsers.SAXParser;
+import javax.xml.parsers.SAXParserFactory;
+import org.apache.clerezza.commons.rdf.BlankNode;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.Language;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.impl.utils.AbstractLiteral;
+import org.apache.http.util.EntityUtils;
+import org.xml.sax.Attributes;
+import org.xml.sax.InputSource;
+import org.xml.sax.SAXException;
+import org.xml.sax.XMLReader;
+import org.xml.sax.helpers.DefaultHandler;
+
+/**
+ *
+ * @author user
+ */
+public class SparqlResultParser {
+
+    static Object parse(InputStream in) throws IOException {
+        try {
+            SAXParserFactory spf = SAXParserFactory.newInstance();
+            spf.setNamespaceAware(true);
+            SAXParser saxParser = spf.newSAXParser();
+            XMLReader xmlReader = saxParser.getXMLReader();
+            final SparqlsResultsHandler sparqlsResultsHandler = new SparqlsResultsHandler();
+            xmlReader.setContentHandler(sparqlsResultsHandler);
+            xmlReader.parse(new InputSource(in));
+            return sparqlsResultsHandler.getResults();
+        } catch (ParserConfigurationException | SAXException ex) {
+            throw new RuntimeException(ex);
+        }
+    }
+
+    final public static class SparqlsResultsHandler extends DefaultHandler {
+
+        private String currentBindingName;
+        private Map<String, RDFTerm> currentResult = null;
+        private Object results = null;
+        private boolean readingValue;
+        private String lang; //the xml:lang attribute of a literal
+        private StringWriter valueWriter;
+        private Map<String, BlankNode> bNodeMap = new HashMap<>();
+        private static final IRI XSD_STRING = new IRI("http://www.w3.org/2001/XMLSchema#string");
+        private static final IRI RDF_LANG_STRING = new IRI("http://www.w3.org/1999/02/22-rdf-syntax-ns#langString");
+
+        private RDFTerm getBNode(String value) {
+            if (!bNodeMap.containsKey(value)) {
+                bNodeMap.put(value, new BlankNode());
+            }
+            return bNodeMap.get(value);
+        }
+
+        private Object getResults() {
+            return results;
+        }
+
+        private List<Map<String, RDFTerm>> getResultValueMaps() {
+            return (List<Map<String, RDFTerm>>) results;
+        }
+
+        enum BindingType {
+
+            uri, bnode, literal;
+        }
+
+        @Override
+        public void startDocument() throws SAXException {
+
+        }
+
+        @Override
+        public void startElement(String namespaceURI,
+                String localName,
+                String qName,
+                Attributes atts)
+                throws SAXException {
+            if ("http://www.w3.org/2005/sparql-results#".equals(namespaceURI)) {
+                if ("boolean".equals(localName)) {
+                    if (results != null) {
+                        throw new SAXException("unexpected tag <boolean>");
+                    }
+                    //results will have Boolean value assigned once value is read
+                    readingValue = true;
+                    valueWriter = new StringWriter();
+                } else if ("results".equals(localName)) {
+                    if (results != null) {
+                        throw new SAXException("unexpected tag <result>");
+                    }
+                    results = new ArrayList<Map<String, RDFTerm>>();
+                } else if ("result".equals(localName)) {
+                    if (currentResult != null) {
+                        throw new SAXException("unexpected tag <result>");
+                    }
+                    currentResult = new HashMap<String, RDFTerm>();
+                } else if ("binding".equals(localName)) {
+                    if (currentResult == null) {
+                        throw new SAXException("unexpected tag <binding>");
+                    }
+                    currentBindingName = atts.getValue("name");
+                } else if ("uri".equals(localName) || "bnode".equals(localName) || "literal".equals(localName)) {
+                    if (readingValue) {
+                        throw new SAXException("unexpected tag <" + localName + ">");
+                    }
+                    lang = atts.getValue("http://www.w3.org/XML/1998/namespace", "lang");
+                    readingValue = true;
+                    valueWriter = new StringWriter();
+                }
+            }
+
+            //System.out.println(namespaceURI);
+            //System.out.println(qName);
+        }
+
+        @Override
+        public void characters(char[] chars, int start, int length) throws SAXException {
+            if (readingValue) {
+                valueWriter.write(chars, start, length);
+                //System.err.println(value + start + ", " + length);
+            }
+        }
+
+        @Override
+        public void endElement(String namespaceURI,
+                String localName,
+                String qName)
+                throws SAXException {
+            if ("http://www.w3.org/2005/sparql-results#".equals(namespaceURI)) {
+                if ("result".equals(localName)) {
+                    ((List<Map<String, RDFTerm>>) results).add(currentResult);
+                    currentResult = null;
+                } else if ("binding".equals(localName)) {
+                    if (currentBindingName == null) {
+                        throw new SAXException("unexpected tag </binding>");
+                    }
+                    currentBindingName = null;
+                } else if ("boolean".equals(localName)) {
+                    results = new Boolean(valueWriter.toString());
+                    valueWriter = null;
+                    readingValue = false;
+                } else {
+                    try {
+                        BindingType b = BindingType.valueOf(localName);
+                        RDFTerm rdfTerm = null;
+                        final Language language = lang == null ? null : new Language(lang);;
+                        switch (b) {
+                            case uri:
+                                rdfTerm = new IRI(valueWriter.toString());
+                                valueWriter = null;
+                                break;
+                            case bnode:
+                                rdfTerm = getBNode(valueWriter.toString());
+                                valueWriter = null;
+                                break;
+                            case literal:
+                                final String lf = valueWriter.toString();
+                                rdfTerm = new AbstractLiteral() {
+
+                                    @Override
+                                    public String getLexicalForm() {
+                                        return lf;
+                                    }
+
+                                    @Override
+                                    public IRI getDataType() {
+                                        if (language != null) {
+                                            return RDF_LANG_STRING;
+                                        }
+                                        //TODO implement
+                                        return XSD_STRING;
+                                    }
+
+                                    @Override
+                                    public Language getLanguage() {
+                                        return language;
+                                    }
+
+                                    @Override
+                                    public String toString() {
+                                        return "\"" + getLexicalForm() + "\"@" + getLanguage();
+                                    }
+                                };
+                                break;
+                        }
+                        currentResult.put(currentBindingName, rdfTerm);
+                        readingValue = false;
+                    } catch (IllegalArgumentException e) {
+                        //not uri|bnode|literal
+                    }
+                }
+            }
+        }
+
+        public void endDocument() throws SAXException {
+            //System.out.println("results: " + results.size());
+        }
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/clerezza/blob/2109cae5/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/BNodeCircleTest.java
----------------------------------------------------------------------
diff --git a/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/BNodeCircleTest.java b/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/BNodeCircleTest.java
new file mode 100644
index 0000000..3bf063d
--- /dev/null
+++ b/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/BNodeCircleTest.java
@@ -0,0 +1,118 @@
+/*
+ * Copyright 2015 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.clerezza.commons.rdf.impl.sparql;
+
+import com.hp.hpl.jena.query.DatasetAccessor;
+import com.hp.hpl.jena.query.DatasetAccessorFactory;
+import java.io.IOException;
+import java.net.ServerSocket;
+import org.apache.jena.fuseki.EmbeddedFusekiServer;
+import com.hp.hpl.jena.rdf.model.Model;
+import com.hp.hpl.jena.rdf.model.ModelFactory;
+import java.io.InputStream;
+import java.util.Iterator;
+import org.apache.clerezza.commons.rdf.BlankNode;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+/**
+ *
+ * @author reto
+ */
+public class BNodeCircleTest {
+
+    final static int serverPort = findFreePort();
+    static EmbeddedFusekiServer server;
+
+    @BeforeClass
+    public static void prepare() throws IOException {
+        final String serviceURI = "http://localhost:" + serverPort + "/ds/data";
+        final DatasetAccessor accessor = DatasetAccessorFactory.createHTTP(serviceURI);
+        final InputStream in = BNodeCircleTest.class.getResourceAsStream("bnode-circle.ttl");
+        final Model m = ModelFactory.createDefaultModel();
+        String base = "http://example.org/";
+        m.read(in, base, "TURTLE");
+        server = EmbeddedFusekiServer.memTDB(serverPort, "/ds");//dataSet.getAbsolutePath());
+        server.start();
+        System.out.println("Started fuseki on port " + serverPort);
+        accessor.putModel(m);
+    }
+
+    @AfterClass
+    public static void cleanup() {
+        server.stop();
+    }
+
+    @Test
+    public void graphSize() {
+        final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query");
+        Assert.assertEquals("Graph not of the exepected size", 2, graph.size());
+    }
+
+    
+    
+    @Test
+    public void nullFilter() {
+        final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query");
+        final Iterator<Triple> iter = graph.filter(null, null, null);
+        Assert.assertTrue(iter.hasNext());
+        final Triple triple1 = iter.next();
+        final BlankNodeOrIRI subject = triple1.getSubject();
+        final RDFTerm object = triple1.getObject();
+        Assert.assertTrue(subject instanceof BlankNode);
+        Assert.assertTrue(object instanceof BlankNode);
+        Assert.assertNotEquals(subject, object);
+        Assert.assertTrue(iter.hasNext());
+    }
+    
+    @Test
+    public void foafKnowsFilter() {
+        final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query");
+        
+        final IRI foafKnows = new IRI("http://xmlns.com/foaf/0.1/knows");
+
+        final Iterator<Triple> iter = graph.filter(null, foafKnows, null);
+        Assert.assertTrue(iter.hasNext());
+        final Triple triple1 = iter.next();
+        final BlankNodeOrIRI subject = triple1.getSubject();
+        final RDFTerm object = triple1.getObject();
+        Assert.assertTrue(subject instanceof BlankNode);
+        Assert.assertTrue(object instanceof BlankNode);
+        Assert.assertNotEquals(subject, object);
+        Assert.assertTrue(iter.hasNext());
+    }
+    
+
+    
+
+    public static int findFreePort() {
+        int port = 0;
+        try (ServerSocket server = new ServerSocket(0);) {
+            port = server.getLocalPort();
+        } catch (Exception e) {
+            throw new RuntimeException("unable to find a free port");
+        }
+        return port;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/clerezza/blob/2109cae5/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/BNodeTest.java
----------------------------------------------------------------------
diff --git a/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/BNodeTest.java b/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/BNodeTest.java
new file mode 100644
index 0000000..168ef74
--- /dev/null
+++ b/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/BNodeTest.java
@@ -0,0 +1,139 @@
+/*
+ * Copyright 2015 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.clerezza.commons.rdf.impl.sparql;
+
+import com.hp.hpl.jena.query.DatasetAccessor;
+import com.hp.hpl.jena.query.DatasetAccessorFactory;
+import java.io.IOException;
+import java.net.ServerSocket;
+import org.apache.jena.fuseki.EmbeddedFusekiServer;
+import com.hp.hpl.jena.rdf.model.Model;
+import com.hp.hpl.jena.rdf.model.ModelFactory;
+import java.io.InputStream;
+import java.util.Iterator;
+import org.apache.clerezza.commons.rdf.BlankNode;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+/**
+ *
+ * @author reto
+ */
+public class BNodeTest {
+
+    final static int serverPort = findFreePort();
+    static EmbeddedFusekiServer server;
+
+    @BeforeClass
+    public static void prepare() throws IOException {
+        final String serviceURI = "http://localhost:" + serverPort + "/ds/data";
+        final DatasetAccessor accessor = DatasetAccessorFactory.createHTTP(serviceURI);
+        final InputStream in = BNodeTest.class.getResourceAsStream("simple-bnode.ttl");
+        final Model m = ModelFactory.createDefaultModel();
+        String base = "http://example.org/";
+        m.read(in, base, "TURTLE");
+        server = EmbeddedFusekiServer.memTDB(serverPort, "/ds");//dataSet.getAbsolutePath());
+        server.start();
+        System.out.println("Started fuseki on port " + serverPort);
+        accessor.putModel(m);
+    }
+
+    @AfterClass
+    public static void cleanup() {
+        server.stop();
+    }
+
+    @Test
+    public void graphSize() {
+        final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query");
+        Assert.assertEquals("Graph not of the expected size", 3, graph.size());
+    }
+
+    /* Filtering with a Bode that cannot be in graph
+    */
+    @Test
+    public void filterAlienBNode() {
+        final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query");
+        
+        final BlankNode blankNode = new BlankNode();
+        final Iterator<Triple> iter = graph.filter(blankNode, null, null);
+        Assert.assertFalse(iter.hasNext());
+    }
+    
+    @Test
+    public void bNodeIdentity() {
+        final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query");
+        
+        final IRI foafPerson = new IRI("http://xmlns.com/foaf/0.1/Person");
+        final IRI foafName = new IRI("http://xmlns.com/foaf/0.1/name");
+        final IRI foafKnows = new IRI("http://xmlns.com/foaf/0.1/knows");
+        final IRI rdfType = new IRI("http://www.w3.org/1999/02/22-rdf-syntax-ns#type");
+
+        final Iterator<Triple> iter = graph.filter(null, foafName, null);
+        Assert.assertTrue(iter.hasNext());
+        final BlankNodeOrIRI namedThing = iter.next().getSubject();
+        Assert.assertTrue(namedThing instanceof BlankNode);
+        
+        final Iterator<Triple> iter2 = graph.filter(null, rdfType, foafPerson);
+        Assert.assertTrue(iter2.hasNext());
+        final BlankNodeOrIRI person = iter2.next().getSubject();
+        Assert.assertTrue(person instanceof BlankNode);
+        Assert.assertEquals(namedThing, person);
+        
+        final Iterator<Triple> iter3 = graph.filter(null, foafKnows, null);
+        Assert.assertTrue(iter3.hasNext());
+        final RDFTerm knownThing = iter3.next().getObject();
+        Assert.assertTrue(knownThing instanceof BlankNode);
+        Assert.assertEquals(knownThing, person);
+        Assert.assertEquals(namedThing, knownThing);
+    }
+    
+    @Test
+    public void filter1() {
+        final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query");
+        
+        final IRI foafPerson = new IRI("http://xmlns.com/foaf/0.1/Person");
+        final IRI foafName = new IRI("http://xmlns.com/foaf/0.1/name");
+        final IRI rdfType = new IRI("http://www.w3.org/1999/02/22-rdf-syntax-ns#type");
+
+        final Iterator<Triple> iter = graph.filter(null, foafName, null);
+        Assert.assertTrue(iter.hasNext());
+        final BlankNodeOrIRI person = iter.next().getSubject();
+        Assert.assertTrue(person instanceof BlankNode);
+        
+        final Iterator<Triple> iter2 = graph.filter(person, rdfType, null);
+        Assert.assertTrue(iter2.hasNext());
+    }
+    
+
+    public static int findFreePort() {
+        int port = 0;
+        try (ServerSocket server = new ServerSocket(0);) {
+            port = server.getLocalPort();
+        } catch (Exception e) {
+            throw new RuntimeException("unable to find a free port");
+        }
+        return port;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/clerezza/blob/2109cae5/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/Dadmin2Test.java
----------------------------------------------------------------------
diff --git a/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/Dadmin2Test.java b/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/Dadmin2Test.java
new file mode 100644
index 0000000..5b954c6
--- /dev/null
+++ b/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/Dadmin2Test.java
@@ -0,0 +1,94 @@
+/*
+ * Copyright 2015 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.clerezza.commons.rdf.impl.sparql;
+
+import com.hp.hpl.jena.query.DatasetAccessor;
+import com.hp.hpl.jena.query.DatasetAccessorFactory;
+import java.io.IOException;
+import java.net.ServerSocket;
+import org.apache.jena.fuseki.EmbeddedFusekiServer;
+import com.hp.hpl.jena.rdf.model.Model;
+import com.hp.hpl.jena.rdf.model.ModelFactory;
+import java.io.InputStream;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.Set;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.Language;
+import org.apache.clerezza.commons.rdf.Literal;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.rdf.core.serializedform.Serializer;
+import org.apache.clerezza.rdf.core.serializedform.SupportedFormat;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+/**
+ *
+ * @author reto
+ */
+public class Dadmin2Test {
+
+    final static int serverPort = findFreePort();
+    static EmbeddedFusekiServer server;
+
+    @BeforeClass
+    public static void prepare() throws IOException {
+        final String serviceURI = "http://localhost:" + serverPort + "/ds/data";
+        final DatasetAccessor accessor = DatasetAccessorFactory.createHTTP(serviceURI);
+        final InputStream in = Dadmin2Test.class.getResourceAsStream("dadmin2.ttl");
+        final Model m = ModelFactory.createDefaultModel();
+        String base = "http://example.org/";
+        m.read(in, base, "TURTLE");
+        server = EmbeddedFusekiServer.memTDB(serverPort, "/ds");//dataSet.getAbsolutePath());
+        server.start();
+        System.out.println("Started fuseki on port " + serverPort);
+        accessor.putModel(m);
+    }
+
+    @AfterClass
+    public static void cleanup() {
+        server.stop();
+    }
+
+    @Test
+    public void graphSize() {
+        final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query");
+        Assert.assertEquals("Graph not of the exepected size", 12, graph.size());
+    }
+
+    @Test
+    public void dump() {
+        final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query");
+        Serializer serializer = Serializer.getInstance();
+        serializer.serialize(System.out, graph, SupportedFormat.TURTLE);
+    }
+
+    public static int findFreePort() {
+        int port = 0;
+        try (ServerSocket server = new ServerSocket(0);) {
+            port = server.getLocalPort();
+        } catch (Exception e) {
+            throw new RuntimeException("unable to find a free port");
+        }
+        return port;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/clerezza/blob/2109cae5/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/DadminTest.java
----------------------------------------------------------------------
diff --git a/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/DadminTest.java b/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/DadminTest.java
new file mode 100644
index 0000000..d65a327
--- /dev/null
+++ b/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/DadminTest.java
@@ -0,0 +1,94 @@
+/*
+ * Copyright 2015 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.clerezza.commons.rdf.impl.sparql;
+
+import com.hp.hpl.jena.query.DatasetAccessor;
+import com.hp.hpl.jena.query.DatasetAccessorFactory;
+import java.io.IOException;
+import java.net.ServerSocket;
+import org.apache.jena.fuseki.EmbeddedFusekiServer;
+import com.hp.hpl.jena.rdf.model.Model;
+import com.hp.hpl.jena.rdf.model.ModelFactory;
+import java.io.InputStream;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.Set;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.Language;
+import org.apache.clerezza.commons.rdf.Literal;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.rdf.core.serializedform.Serializer;
+import org.apache.clerezza.rdf.core.serializedform.SupportedFormat;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+/**
+ *
+ * @author reto
+ */
+public class DadminTest {
+
+    final static int serverPort = findFreePort();
+    static EmbeddedFusekiServer server;
+
+    @BeforeClass
+    public static void prepare() throws IOException {
+        final String serviceURI = "http://localhost:" + serverPort + "/ds/data";
+        final DatasetAccessor accessor = DatasetAccessorFactory.createHTTP(serviceURI);
+        final InputStream in = DadminTest.class.getResourceAsStream("dadmin.ttl");
+        final Model m = ModelFactory.createDefaultModel();
+        String base = "http://example.org/";
+        m.read(in, base, "TURTLE");
+        server = EmbeddedFusekiServer.memTDB(serverPort, "/ds");//dataSet.getAbsolutePath());
+        server.start();
+        System.out.println("Started fuseki on port " + serverPort);
+        accessor.putModel(m);
+    }
+
+    @AfterClass
+    public static void cleanup() {
+        server.stop();
+    }
+
+    @Test
+    public void graphSize() {
+        final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query");
+        Assert.assertEquals("Graph not of the exepected size", 1, graph.size());
+    }
+
+    @Test
+    public void dump() {
+        final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query");
+        Serializer serializer = Serializer.getInstance();
+        serializer.serialize(System.out, graph, SupportedFormat.TURTLE);
+    }
+
+    public static int findFreePort() {
+        int port = 0;
+        try (ServerSocket server = new ServerSocket(0);) {
+            port = server.getLocalPort();
+        } catch (Exception e) {
+            throw new RuntimeException("unable to find a free port");
+        }
+        return port;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/clerezza/blob/2109cae5/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/SimilarBNodes.java
----------------------------------------------------------------------
diff --git a/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/SimilarBNodes.java b/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/SimilarBNodes.java
new file mode 100644
index 0000000..975084f
--- /dev/null
+++ b/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/SimilarBNodes.java
@@ -0,0 +1,104 @@
+/*
+ * Copyright 2015 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.clerezza.commons.rdf.impl.sparql;
+
+import com.hp.hpl.jena.query.DatasetAccessor;
+import com.hp.hpl.jena.query.DatasetAccessorFactory;
+import java.io.IOException;
+import java.net.ServerSocket;
+import org.apache.jena.fuseki.EmbeddedFusekiServer;
+import com.hp.hpl.jena.rdf.model.Model;
+import com.hp.hpl.jena.rdf.model.ModelFactory;
+import java.io.InputStream;
+import java.util.Iterator;
+import org.apache.clerezza.commons.rdf.BlankNode;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+/**
+ *
+ * @author reto
+ */
+public class SimilarBNodes {
+
+    final static int serverPort = findFreePort();
+    static EmbeddedFusekiServer server;
+
+    @BeforeClass
+    public static void prepare() throws IOException {
+        final String serviceURI = "http://localhost:" + serverPort + "/ds/data";
+        final DatasetAccessor accessor = DatasetAccessorFactory.createHTTP(serviceURI);
+        final InputStream in = SimilarBNodes.class.getResourceAsStream("similar-bnodes.ttl");
+        final Model m = ModelFactory.createDefaultModel();
+        String base = "http://example.org/";
+        m.read(in, base, "TURTLE");
+        server = EmbeddedFusekiServer.memTDB(serverPort, "/ds");//dataSet.getAbsolutePath());
+        server.start();
+        System.out.println("Started fuseki on port " + serverPort);
+        accessor.putModel(m);
+    }
+
+    @AfterClass
+    public static void cleanup() {
+        server.stop();
+    }
+
+    @Test
+    public void graphSize() {
+        final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query");
+        Assert.assertEquals("Graph not of the exepected size", 2, graph.size());
+    }
+
+    
+    
+    @Test
+    public void foafKnowsFilter() {
+        final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query");
+        
+        final IRI foafKnows = new IRI("http://xmlns.com/foaf/0.1/knows");
+
+        final Iterator<Triple> iter = graph.filter(null, foafKnows, null);
+        Assert.assertTrue(iter.hasNext());
+        final Triple triple1 = iter.next();
+        final BlankNodeOrIRI subject1 = triple1.getSubject();
+        Assert.assertTrue(subject1 instanceof BlankNode);
+        Assert.assertTrue(iter.hasNext());
+        final Triple triple2 = iter.next();
+        final BlankNodeOrIRI subject2 = triple2.getSubject();
+        Assert.assertTrue(subject2 instanceof BlankNode);
+        Assert.assertNotEquals(subject1, subject2);
+    }
+    
+
+    
+
+    public static int findFreePort() {
+        int port = 0;
+        try (ServerSocket server = new ServerSocket(0);) {
+            port = server.getLocalPort();
+        } catch (Exception e) {
+            throw new RuntimeException("unable to find a free port");
+        }
+        return port;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/clerezza/blob/2109cae5/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/SparqlClientTest.java
----------------------------------------------------------------------
diff --git a/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/SparqlClientTest.java b/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/SparqlClientTest.java
new file mode 100644
index 0000000..ff1d794
--- /dev/null
+++ b/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/SparqlClientTest.java
@@ -0,0 +1,104 @@
+/*
+ * Copyright 2015 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.clerezza.commons.rdf.impl.sparql;
+
+import com.hp.hpl.jena.query.DatasetAccessor;
+import com.hp.hpl.jena.query.DatasetAccessorFactory;
+import java.io.IOException;
+import java.net.ServerSocket;
+import org.apache.jena.fuseki.EmbeddedFusekiServer;
+import com.hp.hpl.jena.rdf.model.Model;
+import com.hp.hpl.jena.rdf.model.ModelFactory;
+import java.io.InputStream;
+import java.util.List;
+import java.util.Map;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+/**
+ *
+ * @author reto
+ */
+public class SparqlClientTest {
+
+    final static int serverPort = findFreePort();
+    static EmbeddedFusekiServer server;
+
+    @BeforeClass
+    public static void prepare() throws IOException {
+        final String serviceURI = "http://localhost:" + serverPort + "/ds/data";
+        final DatasetAccessor accessor = DatasetAccessorFactory.createHTTP(serviceURI);
+        final InputStream in = SparqlClientTest.class.getResourceAsStream("grounded.ttl");
+        final Model m = ModelFactory.createDefaultModel();
+        String base = "http://example.org/";
+        m.read(in, base, "TURTLE");
+        server = EmbeddedFusekiServer.memTDB(serverPort, "/ds");//dataSet.getAbsolutePath());
+        server.start();
+        System.out.println("Started fuseki on port " + serverPort);
+        accessor.putModel(m);
+    }
+
+    @AfterClass
+    public static void cleanup() {
+        server.stop();
+    }
+    
+    @Test
+    public void select() throws IOException {
+        final SparqlClient sparqlClient = new SparqlClient(
+                "http://localhost:" + serverPort + "/ds/query");
+        List<Map<String, RDFTerm>> result = sparqlClient.queryResultSet(
+                "SELECT ?name WHERE { "
+                        + "<http://example.org/#spiderman> "
+                        + "<http://xmlns.com/foaf/0.1/name> ?name}");
+        Assert.assertEquals("There should be two names", 2, result.size());
+    }
+    
+    @Test
+    public void ask() throws IOException {
+        final SparqlClient sparqlClient = new SparqlClient(
+                "http://localhost:" + serverPort + "/ds/query");
+        Object result = sparqlClient.queryResult(
+                "ASK { "
+                        + "<http://example.org/#spiderman> "
+                        + "<http://xmlns.com/foaf/0.1/name> ?name}");
+        Assert.assertEquals("ASK should result to true", Boolean.TRUE, result);
+    }
+
+    @Test
+    public void desribe() throws IOException {
+        final SparqlClient sparqlClient = new SparqlClient(
+                "http://localhost:" + serverPort + "/ds/query");
+        Object result = sparqlClient.queryResult(
+                "DESCRIBE <http://example.org/#spiderman>");
+        Assert.assertTrue("DESCRIBE should return a graph", result instanceof Graph);
+    }
+
+    public static int findFreePort() {
+        int port = 0;
+        try (ServerSocket server = new ServerSocket(0);) {
+            port = server.getLocalPort();
+        } catch (Exception e) {
+            throw new RuntimeException("unable to find a free port");
+        }
+        return port;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/clerezza/blob/2109cae5/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/SparqlGraphTest.java
----------------------------------------------------------------------
diff --git a/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/SparqlGraphTest.java b/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/SparqlGraphTest.java
new file mode 100644
index 0000000..0461f3e
--- /dev/null
+++ b/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/SparqlGraphTest.java
@@ -0,0 +1,113 @@
+/*
+ * Copyright 2015 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.clerezza.commons.rdf.impl.sparql;
+
+import com.hp.hpl.jena.query.DatasetAccessor;
+import com.hp.hpl.jena.query.DatasetAccessorFactory;
+import java.io.IOException;
+import java.net.ServerSocket;
+import org.apache.jena.fuseki.EmbeddedFusekiServer;
+import com.hp.hpl.jena.rdf.model.Model;
+import com.hp.hpl.jena.rdf.model.ModelFactory;
+import java.io.InputStream;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.Set;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.Language;
+import org.apache.clerezza.commons.rdf.Literal;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+/**
+ *
+ * @author reto
+ */
+public class SparqlGraphTest {
+
+    final static int serverPort = findFreePort();
+    static EmbeddedFusekiServer server;
+
+    @BeforeClass
+    public static void prepare() throws IOException {
+        final String serviceURI = "http://localhost:" + serverPort + "/ds/data";
+        final DatasetAccessor accessor = DatasetAccessorFactory.createHTTP(serviceURI);
+        final InputStream in = SparqlGraphTest.class.getResourceAsStream("grounded.ttl");
+        final Model m = ModelFactory.createDefaultModel();
+        String base = "http://example.org/";
+        m.read(in, base, "TURTLE");
+        server = EmbeddedFusekiServer.memTDB(serverPort, "/ds");//dataSet.getAbsolutePath());
+        server.start();
+        System.out.println("Started fuseki on port " + serverPort);
+        accessor.putModel(m);
+    }
+
+    @AfterClass
+    public static void cleanup() {
+        server.stop();
+    }
+
+    @Test
+    public void graphSize() {
+        final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query");
+        Assert.assertEquals("Graph not of the exepected size", 8, graph.size());
+    }
+
+    @Test
+    public void filter1() {
+        final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query");
+        final IRI spiderman = new IRI("http://example.org/#spiderman");
+        final IRI greenGoblin = new IRI("http://example.org/#green-goblin");
+        final IRI enemyOf = new IRI("http://www.perceive.net/schemas/relationship/enemyOf");
+        final IRI foafName = new IRI("http://xmlns.com/foaf/0.1/name");
+        {
+            final Iterator<Triple> iter = graph.filter(spiderman, null, greenGoblin);
+            Assert.assertTrue(iter.hasNext());
+            Assert.assertEquals(enemyOf, iter.next().getPredicate());
+            Assert.assertFalse(iter.hasNext());
+        }
+        {
+            final Iterator<Triple> iter = graph.filter(spiderman, foafName, null);
+            Set<Literal> names = new HashSet<>();
+            for (int i = 0; i < 2; i++) {
+                Assert.assertTrue(iter.hasNext());
+                RDFTerm name = iter.next().getObject();
+                Assert.assertTrue(name instanceof Literal);
+                names.add((Literal)name);
+            }
+            Assert.assertFalse(iter.hasNext());
+            Assert.assertTrue(names.contains(new PlainLiteralImpl("Spiderman")));
+            Assert.assertTrue(names.contains(new PlainLiteralImpl("Человек-паук", new Language("ru"))));
+        }
+    }
+
+    public static int findFreePort() {
+        int port = 0;
+        try (ServerSocket server = new ServerSocket(0);) {
+            port = server.getLocalPort();
+        } catch (Exception e) {
+            throw new RuntimeException("unable to find a free port");
+        }
+        return port;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/clerezza/blob/2109cae5/impl.sparql/src/test/resources/org/apache/clerezza/commons/rdf/impl/sparql/bnode-circle.ttl
----------------------------------------------------------------------
diff --git a/impl.sparql/src/test/resources/org/apache/clerezza/commons/rdf/impl/sparql/bnode-circle.ttl b/impl.sparql/src/test/resources/org/apache/clerezza/commons/rdf/impl/sparql/bnode-circle.ttl
new file mode 100644
index 0000000..f03ab4d
--- /dev/null
+++ b/impl.sparql/src/test/resources/org/apache/clerezza/commons/rdf/impl/sparql/bnode-circle.ttl
@@ -0,0 +1,7 @@
+@base <http://example.org/> .
+@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
+@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
+@prefix foaf: <http://xmlns.com/foaf/0.1/> .
+
+_:a foaf:knows _:b .
+_:b foaf:knows _:a .
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/clerezza/blob/2109cae5/impl.sparql/src/test/resources/org/apache/clerezza/commons/rdf/impl/sparql/dadmin.ttl
----------------------------------------------------------------------
diff --git a/impl.sparql/src/test/resources/org/apache/clerezza/commons/rdf/impl/sparql/dadmin.ttl b/impl.sparql/src/test/resources/org/apache/clerezza/commons/rdf/impl/sparql/dadmin.ttl
new file mode 100644
index 0000000..1213792
--- /dev/null
+++ b/impl.sparql/src/test/resources/org/apache/clerezza/commons/rdf/impl/sparql/dadmin.ttl
@@ -0,0 +1,5 @@
+@prefix rdf:	<http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
+@prefix ns1:	<http://schema.org/> .
+
+_:b10521
+	ns1:articleBody	"\n  <li><a href=\"http://data.admin.ch/datasets\" title=\"Available Data\">Data</a></li>\n  <li><a href=\"http://data.admin.ch/apps\" title=\"Applications\">Applications</a></li>\n  <li><a href=\"http://data.admin.ch/sparql\" title=\"SPARQL Endpoint\">SPARQL</a></li>\n  <li><a title=\"About the Portal\" href=\"http://data.admin.ch/about\">About the Portal</a></li>\n  <li><a title=\"Contact Us\" href=\"http://data.admin.ch/contact\">Contact</a></li>\n" .
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/clerezza/blob/2109cae5/impl.sparql/src/test/resources/org/apache/clerezza/commons/rdf/impl/sparql/dadmin2.ttl
----------------------------------------------------------------------
diff --git a/impl.sparql/src/test/resources/org/apache/clerezza/commons/rdf/impl/sparql/dadmin2.ttl b/impl.sparql/src/test/resources/org/apache/clerezza/commons/rdf/impl/sparql/dadmin2.ttl
new file mode 100644
index 0000000..df46a1a
--- /dev/null
+++ b/impl.sparql/src/test/resources/org/apache/clerezza/commons/rdf/impl/sparql/dadmin2.ttl
@@ -0,0 +1,17 @@
+@prefix rdf:	<http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
+@prefix ns1:	<http://data.admin.ch/> .
+@prefix ns2:	<http://schema.org/> .
+ns1:apps	rdf:type	ns2:WebPage ;
+	ns2:hasPart	_:b10511 .
+@prefix ns3:	<http://schema.zazuko.com/site/> .
+ns1:apps	ns3:hasIntro	_:b10510 ;
+	ns3:hasMenu	_:b10509 .
+_:b10509	rdf:type	ns3:Menu ;
+	ns2:articleBody	"\n  <li><a href=\"http://data.admin.ch/datasets\" title=\"Available Data\">Data</a></li>\n  <li><a href=\"http://data.admin.ch/apps\" title=\"Applications\">Applications</a></li>\n  <li><a href=\"http://data.admin.ch/sparql\" title=\"SPARQL Endpoint\">SPARQL</a></li>\n  <li><a title=\"About the Portal\" href=\"http://data.admin.ch/about\">About the Portal</a></li>\n  <li><a title=\"Contact Us\" href=\"http://data.admin.ch/contact\">Contact</a></li>\n" .
+_:b10510	rdf:type	ns3:Intro ;
+	ns2:articleBody	"\n<p><a href=\"http://data.admin.ch/map/\">Sample application</a> based on STATTAB-SDMX-01-2A01+2011 data. This visualization was made within the prototype phase in colaboration with <a href=\"http://www.ti.bfh.ch/\">Berner Fachhochschule</a>. There are currently no other STATPOP datasets available as RDF.</p>\n" ;
+	ns3:title	"Choropleth" .
+_:b10511	rdf:type	ns3:Footer ;
+	ns2:articleBody	"<p>Do you have questions about the Linked Data pilot portal? Contact us via our\n<a title=\"github\" href=\"https://github.com/zazuko/fso-lod\">Github page</a>.</p>" .
+@prefix xsd:	<http://www.w3.org/2001/XMLSchema#> .
+_:b10511	ns2:dateCreated	"2015-11-15+01:00"^^xsd:date .

http://git-wip-us.apache.org/repos/asf/clerezza/blob/2109cae5/impl.sparql/src/test/resources/org/apache/clerezza/commons/rdf/impl/sparql/grounded.ttl
----------------------------------------------------------------------
diff --git a/impl.sparql/src/test/resources/org/apache/clerezza/commons/rdf/impl/sparql/grounded.ttl b/impl.sparql/src/test/resources/org/apache/clerezza/commons/rdf/impl/sparql/grounded.ttl
new file mode 100644
index 0000000..ccc39c4
--- /dev/null
+++ b/impl.sparql/src/test/resources/org/apache/clerezza/commons/rdf/impl/sparql/grounded.ttl
@@ -0,0 +1,16 @@
+@base <http://example.org/> .
+@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
+@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
+@prefix foaf: <http://xmlns.com/foaf/0.1/> .
+@prefix rel: <http://www.perceive.net/schemas/relationship/> .
+
+<#green-goblin>
+    rel:enemyOf <#spiderman> ;
+    a foaf:Person ;    # in the context of the Marvel universe
+    foaf:name "Green Goblin" ;
+    foaf:age 128 .
+
+<#spiderman>
+    rel:enemyOf <#green-goblin> ;
+    a foaf:Person ;
+    foaf:name "Spiderman", "Человек-паук"@ru .
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/clerezza/blob/2109cae5/impl.sparql/src/test/resources/org/apache/clerezza/commons/rdf/impl/sparql/similar-bnodes.ttl
----------------------------------------------------------------------
diff --git a/impl.sparql/src/test/resources/org/apache/clerezza/commons/rdf/impl/sparql/similar-bnodes.ttl b/impl.sparql/src/test/resources/org/apache/clerezza/commons/rdf/impl/sparql/similar-bnodes.ttl
new file mode 100644
index 0000000..16c1ceb
--- /dev/null
+++ b/impl.sparql/src/test/resources/org/apache/clerezza/commons/rdf/impl/sparql/similar-bnodes.ttl
@@ -0,0 +1,8 @@
+@base <http://example.org/> .
+@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
+@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
+@prefix foaf: <http://xmlns.com/foaf/0.1/> .
+
+
+[] foaf:knows [ foaf:name "Alice"] .
+[] foaf:knows [ foaf:name "Bob" ] .
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/clerezza/blob/2109cae5/impl.sparql/src/test/resources/org/apache/clerezza/commons/rdf/impl/sparql/simple-bnode.ttl
----------------------------------------------------------------------
diff --git a/impl.sparql/src/test/resources/org/apache/clerezza/commons/rdf/impl/sparql/simple-bnode.ttl b/impl.sparql/src/test/resources/org/apache/clerezza/commons/rdf/impl/sparql/simple-bnode.ttl
new file mode 100644
index 0000000..6bcf67f
--- /dev/null
+++ b/impl.sparql/src/test/resources/org/apache/clerezza/commons/rdf/impl/sparql/simple-bnode.ttl
@@ -0,0 +1,7 @@
+@base <http://example.org/> .
+@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
+@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
+@prefix foaf: <http://xmlns.com/foaf/0.1/> .
+
+<http://example.org/#me> foaf:knows [ a foaf:Person;
+  foaf:name "Alice Barker"].
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/clerezza/blob/2109cae5/impl.utils/pom.xml
----------------------------------------------------------------------
diff --git a/impl.utils/pom.xml b/impl.utils/pom.xml
new file mode 100644
index 0000000..b217d7e
--- /dev/null
+++ b/impl.utils/pom.xml
@@ -0,0 +1,85 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+
+-->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    
+    <parent>
+        <groupId>org.apache.clerezza</groupId>
+        <artifactId>clerezza</artifactId>
+        <version>7</version>
+        <relativePath />
+    </parent>
+
+    <groupId>org.apache.clerezza.commons-rdf</groupId>
+    <artifactId>commons-rdf-impl-utils</artifactId>
+    <version>0.3-SNAPSHOT</version>
+    <packaging>bundle</packaging>
+    <name>Clerezza Commons RDF Implementation Utils</name>
+    <description>
+        Apache Commons RDF provides an API modelling the RDF data model as defined by 
+        http://www.w3.org/TR/rdf11-concepts/
+    </description>
+
+    <url>http://clerezza.apache.org/commons-rdf/</url>
+    
+    <issueManagement>
+        <system>jira</system>
+        <url>http://issues.apache.org/jira/browse/CLEREZZA</url>
+    </issueManagement>
+    
+    <inceptionYear>2015</inceptionYear>
+    <scm>
+        <connection>scm:git:https://git-wip-us.apache.org/repos/asf/clerezza-rdf-core.git</connection>
+        <developerConnection>scm:git:https://git-wip-us.apache.org/repos/asf/clerezza-rdf-core.git</developerConnection>
+        <url>https://git-wip-us.apache.org/repos/asf/clerezza-rdf-core.git</url>
+      <tag>HEAD</tag>
+  </scm>
+    
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.felix</groupId>
+                <artifactId>maven-bundle-plugin</artifactId>
+                <extensions>true</extensions>
+                <configuration>
+                    <instructions>
+                        <Export-Package>org.apache.clerezza.commons.rdf.impl.utils.*</Export-Package>
+                    </instructions>
+                </configuration>
+            </plugin>
+        </plugins>
+    </build>
+
+
+    <dependencies>
+        <dependency>
+            <groupId>org.apache.clerezza.commons-rdf</groupId>
+            <artifactId>commons-rdf-api</artifactId>
+            <version>0.3-SNAPSHOT</version>
+        </dependency>
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <scope>test</scope>
+        </dependency>
+    </dependencies>
+
+</project>

http://git-wip-us.apache.org/repos/asf/clerezza/blob/2109cae5/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils/AbstractGraph.java
----------------------------------------------------------------------
diff --git a/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils/AbstractGraph.java b/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils/AbstractGraph.java
new file mode 100644
index 0000000..15efce6
--- /dev/null
+++ b/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils/AbstractGraph.java
@@ -0,0 +1,316 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.clerezza.commons.rdf.impl.utils;
+
+import java.lang.ref.WeakReference;
+import java.util.AbstractCollection;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Iterator;
+
+import java.util.Set;
+import java.util.concurrent.locks.Lock;
+import java.util.concurrent.locks.ReadWriteLock;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.WatchableGraph;
+import org.apache.clerezza.commons.rdf.event.AddEvent;
+import org.apache.clerezza.commons.rdf.event.FilterTriple;
+import org.apache.clerezza.commons.rdf.event.GraphEvent;
+import org.apache.clerezza.commons.rdf.event.GraphListener;
+import org.apache.clerezza.commons.rdf.event.RemoveEvent;
+import org.apache.clerezza.commons.rdf.impl.utils.debug.ReentrantReadWriteLockTracker;
+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleImmutableGraph;
+
+/**
+ * An abstract implementation of <code>Graph</code> implementing
+ * <code>iterator</code> and <code>contains</code> calling <code>filter</code>.
+ *
+ * @author reto
+ */
+public abstract class AbstractGraph extends AbstractCollection<Triple>
+        implements Graph {
+
+    
+    private static final String DEBUG_MODE = "rdfLocksDebugging";
+    private final ReadWriteLock lock;
+
+    private final Lock readLock;
+    private final Lock writeLock;
+
+    /**
+     * Constructs a LocalbleMGraph for an Graph.
+     *
+     * @param providedMGraph a non-lockable graph
+     */
+    public AbstractGraph() {
+        {
+            String debugMode = System.getProperty(DEBUG_MODE);
+            if (debugMode != null && debugMode.toLowerCase().equals("true")) {
+                lock = new ReentrantReadWriteLockTracker();
+            } else {
+                lock = new ReentrantReadWriteLock();
+            }
+        }
+        readLock = lock.readLock();
+        writeLock = lock.writeLock();
+    }
+    
+    public AbstractGraph(final ReadWriteLock lock) {
+        this.lock = lock;
+        readLock = lock.readLock();
+        writeLock = lock.writeLock();
+    }
+
+    @Override
+    public ReadWriteLock getLock() {
+        return lock;
+    }
+
+    @Override
+    public ImmutableGraph getImmutableGraph() {
+        readLock.lock();
+        try {
+            return performGetImmutableGraph();
+        } finally {
+            readLock.unlock();
+        }
+    }
+    
+    public ImmutableGraph performGetImmutableGraph() {
+        return new SimpleImmutableGraph(this);
+    }
+
+    @Override
+    public Iterator<Triple> filter(BlankNodeOrIRI subject, IRI predicate, RDFTerm object) {
+        readLock.lock();
+        try {
+            return new LockingIterator(performFilter(subject, predicate, object), lock);
+        } finally {
+            readLock.unlock();
+        }
+    }
+
+    @Override
+    public int size() {
+        readLock.lock();
+        try {
+            return performSize();
+        } finally {
+            readLock.unlock();
+        }
+    }
+
+    @Override
+    public boolean isEmpty() {
+        readLock.lock();
+        try {
+            return performIsEmpty();
+        } finally {
+            readLock.unlock();
+        }
+    }
+
+    @Override
+    @SuppressWarnings("element-type-mismatch")
+    public boolean contains(Object o) {
+        readLock.lock();
+        try {
+            return performContains(o);
+        } finally {
+            readLock.unlock();
+        }
+    }
+
+    @Override
+    public Iterator<Triple> iterator() {
+        readLock.lock();
+        try {
+            return new LockingIterator(performIterator(), lock);
+        } finally {
+            readLock.unlock();
+        }
+    }
+
+    @Override
+    public Object[] toArray() {
+        readLock.lock();
+        try {
+            return performToArray();
+        } finally {
+            readLock.unlock();
+        }
+    }
+
+    @Override
+    public <T> T[] toArray(T[] a) {
+        readLock.lock();
+        try {
+            return performToArray(a);
+        } finally {
+            readLock.unlock();
+        }
+    }
+
+    @Override
+    public boolean containsAll(Collection<?> c) {
+        readLock.lock();
+        try {
+            return performContainsAll(c);
+        } finally {
+            readLock.unlock();
+        }
+    }
+
+    @Override
+    public boolean add(Triple e) {
+        writeLock.lock();
+        try {
+            return performAdd(e);
+        } finally {
+            writeLock.unlock();
+        }
+    }
+
+    @Override
+    public boolean remove(Object o) {
+        writeLock.lock();
+        try {
+            return performRemove(o);
+        } finally {
+            writeLock.unlock();
+        }
+    }
+
+    @Override
+    public boolean addAll(Collection<? extends Triple> c) {
+        writeLock.lock();
+        try {
+            return performAddAll(c);
+        } finally {
+            writeLock.unlock();
+        }
+    }
+
+    @Override
+    public boolean removeAll(Collection<?> c) {
+        writeLock.lock();
+        try {
+            return performRemoveAll(c);
+        } finally {
+            writeLock.unlock();
+        }
+    }
+
+    @Override
+    public boolean retainAll(Collection<?> c) {
+        writeLock.lock();
+        try {
+            return performRetainAll(c);
+        } finally {
+            writeLock.unlock();
+        }
+    }
+
+    @Override
+    public void clear() {
+        writeLock.lock();
+        try {
+            performClear();
+        } finally {
+            writeLock.unlock();
+        }
+    }
+
+    
+    @Override
+    public boolean equals(Object obj) {
+        /*if (obj == null) {
+            return false;
+        }
+        if (obj == this) {
+            return true;
+        }
+        if (obj.getClass() != getClass()) {
+            return false;
+        }*/
+        return this == obj;
+    }
+
+
+    protected abstract Iterator<Triple> performFilter(BlankNodeOrIRI subject, IRI predicate, RDFTerm object);
+
+    protected abstract int performSize();
+
+    protected boolean performIsEmpty() {
+        return super.isEmpty();
+    }
+
+    protected Object[] performToArray() {
+        return super.toArray();
+    }
+
+    protected boolean performRemove(Object o) {
+        return super.remove(o);
+    }
+
+    protected boolean performAddAll(Collection<? extends Triple> c) {
+        return super.addAll(c);
+    }
+
+    protected boolean performRemoveAll(Collection<?> c) {
+        return super.removeAll(c);
+    }
+
+    protected boolean performRetainAll(Collection<?> c) {
+        return super.retainAll(c);
+    }
+
+    protected void performClear() {
+        super.clear();
+    }
+
+    protected boolean performContains(Object o) {
+        return super.contains(o);
+    }
+
+    protected Iterator<Triple> performIterator() {
+        return performFilter(null, null, null);
+    }
+
+    protected boolean performContainsAll(Collection<?> c) {
+        return super.containsAll(c);
+    }
+
+    protected <T> T[] performToArray(T[] a) {
+        return super.toArray(a);
+    }
+
+    protected boolean performAdd(Triple e) {
+        return super.add(e);
+    }
+
+ 
+}


Mime
View raw message