clerezza-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From r...@apache.org
Subject [5/7] clerezza-rdf-core git commit: CLEREZZA-982: moved files for new packages
Date Thu, 09 Apr 2015 12:34:08 GMT
http://git-wip-us.apache.org/repos/asf/clerezza-rdf-core/blob/816dc11f/api/src/main/java/org/apache/commons/rdf/package-info.java
----------------------------------------------------------------------
diff --git a/api/src/main/java/org/apache/commons/rdf/package-info.java b/api/src/main/java/org/apache/commons/rdf/package-info.java
deleted file mode 100644
index da34f2d..0000000
--- a/api/src/main/java/org/apache/commons/rdf/package-info.java
+++ /dev/null
@@ -1,21 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * Common RDF API
- */
-package org.apache.commons.rdf;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/clerezza-rdf-core/blob/816dc11f/impl.sparql/src/main/java/org/apache/clerezza/commons/rdf/impl/sparql/SparqlBNode.java
----------------------------------------------------------------------
diff --git a/impl.sparql/src/main/java/org/apache/clerezza/commons/rdf/impl/sparql/SparqlBNode.java b/impl.sparql/src/main/java/org/apache/clerezza/commons/rdf/impl/sparql/SparqlBNode.java
new file mode 100644
index 0000000..f55f7af
--- /dev/null
+++ b/impl.sparql/src/main/java/org/apache/clerezza/commons/rdf/impl/sparql/SparqlBNode.java
@@ -0,0 +1,74 @@
+/*
+ * Copyright 2015 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.commons.rdf.impl.sparql;
+
+import java.util.Collection;
+import java.util.Objects;
+import org.apache.commons.rdf.BlankNode;
+import org.apache.commons.rdf.BlankNodeOrIri;
+import org.apache.commons.rdf.ImmutableGraph;
+import org.apache.commons.rdf.Iri;
+import org.apache.commons.rdf.RdfTerm;
+import org.apache.commons.rdf.Triple;
+import org.apache.commons.rdf.impl.utils.TripleImpl;
+import org.apache.commons.rdf.impl.utils.simple.SimpleGraph;
+
+/**
+ *
+ * @author developer
+ */
+class SparqlBNode extends BlankNode {
+    
+    final static Iri internalBNodeId = new Iri("urn:x-internalid:fdmpoihdfw");
+    
+    final ImmutableGraph context;
+    private final int isoDistinguisher;
+
+    SparqlBNode(BlankNode node, Collection<Triple> context, int isoDistinguisher) {
+        this.isoDistinguisher = isoDistinguisher;
+        final SimpleGraph contextBuider = new SimpleGraph();
+        for (Triple triple : context) {
+            BlankNodeOrIri subject = triple.getSubject();
+            RdfTerm object = triple.getObject();
+            contextBuider.add(new TripleImpl(subject.equals(node) ? internalBNodeId : subject, 
+                    triple.getPredicate(), 
+                    object.equals(node) ? internalBNodeId : object));
+        }
+        this.context = contextBuider.getImmutableGraph();
+    }
+
+    @Override
+    public int hashCode() {
+        int hash = 7+isoDistinguisher;
+        hash = 61 * hash + Objects.hashCode(this.context);
+        return hash;
+    }
+
+    @Override
+    public boolean equals(Object obj) {
+        if (obj == null) {
+            return false;
+        }
+        if (getClass() != obj.getClass()) {
+            return false;
+        }
+        final SparqlBNode other = (SparqlBNode) obj;
+        if (isoDistinguisher != other.isoDistinguisher) {
+            return false;
+        }
+        return Objects.equals(this.context, other.context);
+    }
+}

http://git-wip-us.apache.org/repos/asf/clerezza-rdf-core/blob/816dc11f/impl.sparql/src/main/java/org/apache/clerezza/commons/rdf/impl/sparql/SparqlClient.java
----------------------------------------------------------------------
diff --git a/impl.sparql/src/main/java/org/apache/clerezza/commons/rdf/impl/sparql/SparqlClient.java b/impl.sparql/src/main/java/org/apache/clerezza/commons/rdf/impl/sparql/SparqlClient.java
new file mode 100644
index 0000000..e522924
--- /dev/null
+++ b/impl.sparql/src/main/java/org/apache/clerezza/commons/rdf/impl/sparql/SparqlClient.java
@@ -0,0 +1,224 @@
+/*
+ * Copyright 2015 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.commons.rdf.impl.sparql;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import org.apache.http.HttpEntity;
+import org.apache.http.NameValuePair;
+import org.apache.http.client.entity.UrlEncodedFormEntity;
+import org.apache.http.client.methods.CloseableHttpResponse;
+import org.apache.http.client.methods.HttpPost;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.impl.client.HttpClients;
+import org.apache.http.message.BasicNameValuePair;
+import org.apache.http.util.EntityUtils;
+import javax.xml.parsers.*;
+import org.apache.commons.rdf.BlankNode;
+import org.apache.commons.rdf.Iri;
+import org.apache.commons.rdf.Language;
+import org.apache.commons.rdf.RdfTerm;
+import org.apache.commons.rdf.impl.utils.AbstractLiteral;
+import org.xml.sax.*;
+import org.xml.sax.helpers.*;
+
+/**
+ *
+ * @author developer
+ */
+public class SparqlClient {
+
+    final String endpoint;
+
+    public SparqlClient(final String endpoint) {
+        this.endpoint = endpoint;
+    }
+
+    List<Map<String, RdfTerm>> queryResultSet(final String query) throws IOException {
+        CloseableHttpClient httpclient = HttpClients.createDefault();
+        HttpPost httpPost = new HttpPost(endpoint);
+        List<NameValuePair> nvps = new ArrayList<NameValuePair>();
+        nvps.add(new BasicNameValuePair("query", query));
+        httpPost.setEntity(new UrlEncodedFormEntity(nvps));
+        CloseableHttpResponse response2 = httpclient.execute(httpPost);
+
+        try {
+            HttpEntity entity2 = response2.getEntity();
+            InputStream in = entity2.getContent();
+            SAXParserFactory spf = SAXParserFactory.newInstance();
+            spf.setNamespaceAware(true);
+            SAXParser saxParser = spf.newSAXParser();
+            XMLReader xmlReader = saxParser.getXMLReader();
+            final SparqlsResultsHandler sparqlsResultsHandler = new SparqlsResultsHandler();
+            xmlReader.setContentHandler(sparqlsResultsHandler);
+            xmlReader.parse(new InputSource(in));
+            /*
+             for (int ch = in.read(); ch != -1; ch = in.read()) {
+             System.out.print((char)ch);
+             }
+             */
+            // do something useful with the response body
+            // and ensure it is fully consumed
+            EntityUtils.consume(entity2);
+            return sparqlsResultsHandler.getResults();
+        } catch (ParserConfigurationException ex) {
+            throw new RuntimeException(ex);
+        } catch (SAXException ex) {
+            throw new RuntimeException(ex);
+        } finally {
+            response2.close();
+        }
+
+    }
+
+    final public static class SparqlsResultsHandler extends DefaultHandler {
+
+        private String currentBindingName;
+        private Map<String, RdfTerm> currentResult = null;
+        private final List<Map<String, RdfTerm>> results = new ArrayList<>();
+        private boolean readingValue;
+        private String lang; //the xml:lang attribute of a literal
+        private String value;
+        private Map<String, BlankNode> bNodeMap = new HashMap<>();
+        private static final Iri XSD_STRING = new Iri("http://www.w3.org/2001/XMLSchema#string");
+
+        private RdfTerm getBNode(String value) {
+            if (!bNodeMap.containsKey(value)) {
+                bNodeMap.put(value, new BlankNode());
+            }
+            return bNodeMap.get(value);
+        }
+
+        private List<Map<String, RdfTerm>> getResults() {
+            return results;
+        }
+
+        enum BindingType {
+
+            uri, bnode, literal;
+        }
+
+        @Override
+        public void startDocument() throws SAXException {
+
+        }
+
+        @Override
+        public void startElement(String namespaceURI,
+                String localName,
+                String qName,
+                Attributes atts)
+                throws SAXException {
+            if ("http://www.w3.org/2005/sparql-results#".equals(namespaceURI)) {
+                if ("result".equals(localName)) {
+                    if (currentResult != null) {
+                        throw new SAXException("unexpected tag <result>");
+                    }
+                    currentResult = new HashMap<>();
+                } else if ("binding".equals(localName)) {
+                    if (currentResult == null) {
+                        throw new SAXException("unexpected tag <binding>");
+                    }
+                    currentBindingName = atts.getValue("name");
+                } else if ("uri".equals(localName) || "bnode".equals(localName) || "literal".equals(localName)) {
+                    if (readingValue) {
+                        throw new SAXException("unexpected tag <" + localName + ">");
+                    }
+                    lang = atts.getValue("http://www.w3.org/XML/1998/namespace", "lang");
+                    readingValue = true;
+                }
+            }
+
+            //System.out.println(namespaceURI);
+            //System.out.println(qName);
+        }
+
+        @Override
+        public void characters(char[] chars, int start, int length) throws SAXException {
+            if (readingValue) {
+                value = new String(chars, start, length);
+                //System.err.println(value + start + ", " + length);
+            }
+        }
+
+        @Override
+        public void endElement(String namespaceURI,
+                String localName,
+                String qName)
+                throws SAXException {
+            if ("http://www.w3.org/2005/sparql-results#".equals(namespaceURI)) {
+                if ("result".equals(localName)) {
+                    results.add(currentResult);
+                    currentResult = null;
+                } else if ("binding".equals(localName)) {
+                    if (currentBindingName == null) {
+                        throw new SAXException("unexpected tag </binding>");
+                    }
+                    currentBindingName = null;
+                } else {
+                    try {
+                        BindingType b = BindingType.valueOf(localName);
+                        RdfTerm rdfTerm = null;
+                        final Language language = lang == null? null : new Language(lang);;
+                        switch (b) {
+                            case uri:
+                                rdfTerm = new Iri(value);
+                                break;
+                            case bnode:
+                                rdfTerm = getBNode(value);
+                                break;
+                            case literal:
+                                final String lf = value;
+                                rdfTerm = new AbstractLiteral() {
+
+                                    @Override
+                                    public String getLexicalForm() {
+                                        return lf;
+                                    }
+
+                                    @Override
+                                    public Iri getDataType() {
+                                        //TODO implement
+                                        return XSD_STRING;
+                                    }
+
+                                    @Override
+                                    public Language getLanguage() {
+                                        return language;
+                                    }
+                                };
+                                break;
+                        }
+                        currentResult.put(currentBindingName, rdfTerm);
+                        readingValue = false;
+                    } catch (IllegalArgumentException e) {
+                            //not uri|bnode|literal
+                    }
+                }
+            }
+        }
+
+        public void endDocument() throws SAXException {
+            //System.out.println("results: " + results.size());
+        }
+
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/clerezza-rdf-core/blob/816dc11f/impl.sparql/src/main/java/org/apache/clerezza/commons/rdf/impl/sparql/SparqlGraph.java
----------------------------------------------------------------------
diff --git a/impl.sparql/src/main/java/org/apache/clerezza/commons/rdf/impl/sparql/SparqlGraph.java b/impl.sparql/src/main/java/org/apache/clerezza/commons/rdf/impl/sparql/SparqlGraph.java
new file mode 100644
index 0000000..594a264
--- /dev/null
+++ b/impl.sparql/src/main/java/org/apache/clerezza/commons/rdf/impl/sparql/SparqlGraph.java
@@ -0,0 +1,505 @@
+/*
+ * Copyright 2015 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.commons.rdf.impl.sparql;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.NoSuchElementException;
+import java.util.Set;
+import java.util.concurrent.Callable;
+import java.util.logging.Logger;
+import org.apache.commons.rdf.BlankNode;
+import org.apache.commons.rdf.BlankNodeOrIri;
+import org.apache.commons.rdf.Graph;
+import org.apache.commons.rdf.ImmutableGraph;
+import org.apache.commons.rdf.Iri;
+import org.apache.commons.rdf.Literal;
+import org.apache.commons.rdf.RdfTerm;
+import org.apache.commons.rdf.Triple;
+import org.apache.commons.rdf.impl.utils.AbstractGraph;
+import org.apache.commons.rdf.impl.utils.TripleImpl;
+import org.apache.commons.rdf.impl.utils.simple.SimpleGraph;
+
+/**
+ *
+ * @author reto
+ */
+public class SparqlGraph extends AbstractGraph {
+
+    private static final int MAX_ISOMORPHIC_BNODES = 1000;
+    private static final Logger log = Logger.getLogger(SparqlGraph.class.getName());
+
+    final SparqlClient sparqlClient;
+
+    /**
+     * Constructs a Graph representing the default graph at the specified
+     * endpoint
+     */
+    public SparqlGraph(final String endpoint) {
+        sparqlClient = new SparqlClient(endpoint);
+    }
+
+    @Override
+    protected Iterator<Triple> performFilter(final BlankNodeOrIri filterSubject,
+            final Iri filterPredicate, final RdfTerm filterObject) {
+        try {
+            String query = createQuery(filterSubject, filterPredicate, filterObject);
+            final List<Map<String, RdfTerm>> sparqlResults = sparqlClient.queryResultSet(query);
+            //first to triples without bnode-conversion
+            //rawTriples contains the triples with the BNodes from the result set
+            final Collection<Triple> rawTriples = new ArrayList<>();
+            for (Map<String, RdfTerm> result : sparqlResults) {
+                rawTriples.add(new TripleImpl(filterSubject != null ? filterSubject : (BlankNodeOrIri) result.get("s"),
+                        filterPredicate != null ? filterPredicate : (Iri) result.get("p"),
+                        filterObject != null ? filterObject : result.get("o")));
+
+            }
+            //then bnode conversion
+            final Iterator<Triple> rawTriplesIter = rawTriples.iterator();
+            //this is basically just wokring around the lack of (named) nested functions
+            return (new Callable<Iterator<Triple>>() {
+
+                final Map<BlankNode, SparqlBNode> nodeMap = new HashMap<>();
+                final Set<ImmutableGraph> usedContext = new HashSet<>();
+
+                private RdfTerm useSparqlNode(RdfTerm node) throws IOException {
+                    if (node instanceof BlankNodeOrIri) {
+                        return useSparqlNode((BlankNodeOrIri) node);
+                    }
+                    return node;
+                }
+
+                private BlankNodeOrIri useSparqlNode(BlankNodeOrIri node) throws IOException {
+                    if (node instanceof BlankNode) {
+                        if (!nodeMap.containsKey(node)) {
+                            createBlankNodesForcontext((BlankNode) node);
+                        }
+                        if (!nodeMap.containsKey(node)) {
+                            throw new RuntimeException("no Bnode created");
+                        }
+                        return nodeMap.get(node);
+                    } else {
+                        return node;
+                    }
+                }
+
+                private void createBlankNodesForcontext(final BlankNode node) throws IOException {
+                    final Collection<Triple> context = getContext(node);
+                    final Set<BlankNode> rawNodes = new HashSet<>();
+                    for (Triple triple : context) {
+                        {
+                            final BlankNodeOrIri subject = triple.getSubject();
+                            if (subject instanceof BlankNode) {
+                                rawNodes.add((BlankNode) subject);
+                            }
+                        }
+                        {
+                            final RdfTerm object = triple.getObject();
+                            if (object instanceof BlankNode) {
+                                rawNodes.add((BlankNode) object);
+                            }
+                        }
+                    }
+                    final Set<SparqlBNode> createdSparqlNodes = new HashSet<>();
+                    //final Map<BlankNode, SparqlBNode> preliminaryNodes = new HashMap<>();
+                    for (BlankNode rawNode : rawNodes) {
+                        for (int i = 0; i < MAX_ISOMORPHIC_BNODES; i++) {
+                            SparqlBNode sparqlBNode = new SparqlBNode(rawNode, context, i);
+                            if (!createdSparqlNodes.contains(sparqlBNode)) {
+                                nodeMap.put(rawNode, sparqlBNode);
+                                createdSparqlNodes.add(sparqlBNode);
+                                break;
+                            }
+                        }
+                    }
+                }
+
+                private ImmutableGraph getContext(final BlankNode node) throws IOException {
+                    //we need to get the cntext of the BNode
+                    //if the filter was for (null, null, null) we have the whole
+                    //bnode context in the reuslt set, otherwise we need to get 
+                    //more triples from the endpoint,
+                    //let's first handle the easy case
+                    if ((filterSubject == null) && (filterPredicate == null)
+                            && (filterObject == null)) {
+                        return getContextInRaw(node);
+                    } else {
+                        final ImmutableGraph startContext = getContextInRaw(node);
+                        final Set<ImmutableGraph> expandedContexts = expandContext(startContext);
+                        //expand bnode context
+                        //note that there might be different contexts for 
+                        //a bnode as present in the current result set
+                        //in this case we just haveto make sure we don't 
+                        //pick the same context for different bnodes in the resultset
+                        ImmutableGraph result = null;
+                        for (ImmutableGraph expandedContext : expandedContexts) {
+                            if (!usedContext.contains(expandedContext)) {
+                                result = expandedContext;
+                                break;
+                            }
+                        }
+                        if (result == null) {
+                            log.warning("he underlying sparql graph seems to contain redundant triples, this might cause unexpected results");
+                            result = expandedContexts.iterator().next();
+                        } else {
+                            usedContext.add(result);
+                        }
+                        return result;
+                    }
+
+                }
+
+                private ImmutableGraph getContextInRaw(BlankNode node) {
+                    final Graph contextBuilder = new SimpleGraph();
+                    for (Triple rawTriple : rawTriples) {
+                        BlankNodeOrIri rawSubject = rawTriple.getSubject();
+                        RdfTerm rawObject = rawTriple.getObject();
+                        if (rawSubject.equals(node) || rawObject.equals(node)) {
+                            contextBuilder.add(rawTriple);
+                        }
+                    }
+                    return contextBuilder.getImmutableGraph();
+                }
+
+                @Override
+                public Iterator<Triple> call() throws Exception {
+                    return new Iterator<Triple>() {
+
+                        @Override
+                        public boolean hasNext() {
+                            return rawTriplesIter.hasNext();
+                        }
+
+                        @Override
+                        public Triple next() {
+                            try {
+                                Triple rawTriple = rawTriplesIter.next();
+                                return new TripleImpl(useSparqlNode(rawTriple.getSubject()),
+                                        rawTriple.getPredicate(),
+                                        useSparqlNode(rawTriple.getObject()));
+                            } catch (IOException ex) {
+                                throw new RuntimeException(ex);
+                            }
+                        }
+                    };
+                }
+
+                /**
+                 * returns all MSGs that are supergraphs of startContext
+                 *
+                 * @param startContext
+                 * @return
+                 */
+                private Set<ImmutableGraph> expandContext(Collection<Triple> startContext) throws IOException {
+
+                    final StringBuilder queryBuilder = new StringBuilder();
+                    queryBuilder.append("SELECT * WHERE {\n ");
+                    Map<BlankNode, String> bNodeVarNameMap = writeTriplePattern(queryBuilder, startContext);
+                    Set<BlankNode> bNodesInContext = bNodeVarNameMap.keySet();
+                    for (BlankNode bNode : bNodesInContext) {
+                        final String bNodeVarLabel = bNodeVarNameMap.get(bNode);
+                        //looking for outgoing properties of the bnode
+                        queryBuilder.append("OPTIONAL { ");
+                        queryBuilder.append('?');
+                        queryBuilder.append(bNodeVarLabel);
+                        queryBuilder.append(' ');
+                        queryBuilder.append("?po");
+                        queryBuilder.append(bNodeVarLabel);
+                        queryBuilder.append(" ?o");
+                        queryBuilder.append(bNodeVarLabel);
+                        queryBuilder.append(" } .\n");
+                        //looking for incoming properties of the bnode
+                        queryBuilder.append("OPTIONAL { ");
+                        queryBuilder.append("?s");
+                        queryBuilder.append(bNodeVarLabel);
+                        queryBuilder.append(' ');
+                        queryBuilder.append("?pi");
+                        queryBuilder.append(bNodeVarLabel);
+                        queryBuilder.append(" ?");
+                        queryBuilder.append(bNodeVarLabel);
+                        queryBuilder.append(" } .\n");
+                    }
+                    queryBuilder.append(" }");
+                    final List<Map<String, RdfTerm>> expansionQueryResults = sparqlClient.queryResultSet(queryBuilder.toString());
+                    Set<ImmutableGraph> expandedContexts = new HashSet<>();
+                    //the query results may or may be from disjoint supergraphs
+                    //we expand them all as if they are different which may lead
+                    //us to the same MSG multiple times
+                    RESULTS:
+                    for (Map<String, RdfTerm> expansionQueryResult : expansionQueryResults) {
+                        Collection<Triple> expandedContext = new HashSet<>();
+                        Map<BlankNode, BlankNode> newBNodesToOldBNodes = new HashMap<>();
+                        for (BlankNode oldBNode : bNodesInContext) {
+                            final String bNodeVarLabel = bNodeVarNameMap.get(oldBNode);
+                            final RdfTerm newNode = expansionQueryResult.get(bNodeVarLabel);
+                            if (!(newNode instanceof BlankNode)) {
+                                //this subgraph is't a match
+                                continue RESULTS;
+                            }
+                            newBNodesToOldBNodes.put((BlankNode) newNode, oldBNode);
+                        }
+                        expandedContext.addAll(startContext);
+                        boolean newBNodeIntroduced = false;
+                        boolean newTripleAdded = false;
+                        for (BlankNode oldBNode : bNodesInContext) {
+                            final String bNodeVarLabel = bNodeVarNameMap.get(oldBNode);
+                            {
+                                final Iri newPredicate = (Iri) expansionQueryResult.get("po" + bNodeVarLabel);
+                                if (newPredicate != null) {
+                                    RdfTerm newObject = expansionQueryResult.get("o" + bNodeVarLabel);
+                                    if (newObject instanceof BlankNode) {
+                                        if (newBNodesToOldBNodes.containsKey(newObject)) {
+                                            //point back to BNode in startContext
+                                            newObject = newBNodesToOldBNodes.get(newObject);
+                                        } else {
+                                            newBNodeIntroduced = true;
+                                        }
+                                    }
+                                    if (expandedContext.add(new TripleImpl(oldBNode, newPredicate, newObject))) {
+                                        newTripleAdded = true;
+                                    }
+                                }
+                            }
+                            {
+                                final Iri newPredicate = (Iri) expansionQueryResult.get("pi" + bNodeVarLabel);
+                                if (newPredicate != null) {
+                                    RdfTerm newSubject = expansionQueryResult.get("s" + bNodeVarLabel);
+                                    if (newSubject instanceof BlankNode) {
+                                        if (newBNodesToOldBNodes.containsKey(newSubject)) {
+                                            //point back to BNode in startContext
+                                            newSubject = newBNodesToOldBNodes.get(newSubject);
+                                        } else {
+                                            newBNodeIntroduced = true;
+                                        }
+                                    }
+                                    if (expandedContext.add(new TripleImpl((BlankNodeOrIri) newSubject, newPredicate, oldBNode))) {
+                                        newTripleAdded = true;
+                                    }
+                                }
+                            }
+                        }
+                        if (newBNodeIntroduced) {
+                            //we could be more efficient than this ans just expand the newly introduced bnodes
+                            expandedContexts.addAll(expandContext(expandedContext));
+                        } else {
+                            if (newTripleAdded) {
+                                //look for more results
+                                expandedContexts.addAll(expandContext(expandedContext));
+                                //expandedContexts.add(expandedContext);
+                            }
+                        }
+
+                    }
+                    if (expandedContexts.isEmpty()) {
+                        expandedContexts.add(new SimpleGraph(startContext).getImmutableGraph());
+                    }
+                    return expandedContexts;
+                }
+
+            }).call();
+        } catch (AlienBNodeException e) {
+            return new Iterator<Triple>() {
+
+                @Override
+                public boolean hasNext() {
+                    return false;
+                }
+
+                @Override
+                public Triple next() {
+                    throw new NoSuchElementException();
+                }
+            };
+        } catch (IOException ex) {
+            throw new RuntimeException(ex);
+        } catch (Exception ex) {
+            throw new RuntimeException(ex);
+        }
+    }
+
+    private String createQuery(final BlankNodeOrIri filterSubject, final Iri filterPredicate, final RdfTerm filterObject) {
+        final StringBuilder selectBuilder = new StringBuilder();
+        selectBuilder.append("SELECT ");
+        final StringBuilder whereBuilder = new StringBuilder();
+        whereBuilder.append("WHERE { ");
+        if (filterSubject == null) {
+            whereBuilder.append("?s");
+            selectBuilder.append("?s ");
+        } else {
+            if (filterSubject instanceof SparqlBNode) {
+                whereBuilder.append("?sn");
+            } else {
+                whereBuilder.append(asSparqlTerm(filterSubject));
+            }
+        }
+        whereBuilder.append(' ');
+        if (filterPredicate == null) {
+            whereBuilder.append("?p");
+            selectBuilder.append("?p ");
+        } else {
+            whereBuilder.append(asSparqlTerm(filterPredicate));
+        }
+        whereBuilder.append(' ');
+        if (filterObject == null) {
+            whereBuilder.append("?o");
+            selectBuilder.append("?o ");
+        } else {
+            if (filterObject instanceof SparqlBNode) {
+                whereBuilder.append("?on");
+            } else {
+                whereBuilder.append(asSparqlTerm(filterObject));
+            }
+        }
+        whereBuilder.append(" .\n");
+        if (filterSubject instanceof SparqlBNode) {
+            //expand bnode context
+            writeTriplePattern(whereBuilder, ((SparqlBNode) filterSubject).context, "sn");
+        }
+        
+        if (filterObject instanceof SparqlBNode) {
+            //expand bnode context
+            writeTriplePattern(whereBuilder, ((SparqlBNode) filterObject).context, "on");
+        }
+
+        whereBuilder.append(" }");
+        return selectBuilder.append(whereBuilder).toString();
+    }
+
+    @Override
+    protected int performSize() {
+        try {
+            return sparqlClient.queryResultSet("SELECT * WHERE { ?s ?p ?o}").size();
+        } catch (IOException ex) {
+            throw new RuntimeException(ex);
+        }
+    }
+
+    private String asSparqlTerm(Iri iri) {
+        return "<" + iri.getUnicodeString() + ">";
+    }
+
+    private String asSparqlTerm(Literal literal) {
+        //TODO langauge and datatype
+        return "\"" + literal.getLexicalForm() + "\"";
+    }
+
+    private String asSparqlTerm(BlankNode bnode) {
+        if (!(bnode instanceof SparqlBNode)) {
+            throw new AlienBNodeException();
+        }
+        //this requires adding additional clauses to the graph pattern
+        throw new RuntimeException("SparqlBNodes should have been handled earlier");
+    }
+
+    private String asSparqlTerm(BlankNodeOrIri term) {
+        if (term instanceof Iri) {
+            return asSparqlTerm((Iri) term);
+        } else {
+            return asSparqlTerm((BlankNode) term);
+        }
+    }
+
+    private String asSparqlTerm(RdfTerm term) {
+        if (term instanceof BlankNodeOrIri) {
+            return asSparqlTerm((BlankNodeOrIri) term);
+        } else {
+            return asSparqlTerm((Literal) term);
+        }
+    }
+
+
+    private Map<BlankNode, String> writeTriplePattern(StringBuilder queryBuilder, Collection<Triple> triples) {
+        return writeTriplePattern(queryBuilder, triples, null);
+    }
+        
+    private Map<BlankNode, String> writeTriplePattern(StringBuilder queryBuilder, Collection<Triple> triples, String varLabelForInternalBNodeId) {
+        final Collection<String> triplePatterns = new ArrayList<>();
+        int varCounter = 0;
+        final Map<BlankNode, String> bNodeVarNameMap = new HashMap<>();
+        for (Triple t : triples) {
+            final StringBuilder builder = new StringBuilder();
+            {
+                final BlankNodeOrIri s = t.getSubject();
+                String varName;
+                if (s instanceof BlankNode) {
+                    if (bNodeVarNameMap.containsKey(s)) {
+                        varName = bNodeVarNameMap.get(s);
+                    } else {
+                        varName = "v" + (varCounter++);
+                        bNodeVarNameMap.put((BlankNode) s, varName);
+                    }
+                    builder.append('?');
+                    builder.append(varName);
+                } else {
+                    if (s.equals(SparqlBNode.internalBNodeId)) {
+                        builder.append('?');
+                        builder.append(varLabelForInternalBNodeId);
+                    } else {
+                        builder.append(asSparqlTerm(s));
+                    }
+                    
+                }
+            }
+            builder.append(' ');
+            builder.append(asSparqlTerm(t.getPredicate()));
+            builder.append(' ');
+            {
+                final RdfTerm o = t.getObject();
+                String varName;
+                if (o instanceof BlankNode) {
+                    if (bNodeVarNameMap.containsKey(o)) {
+                        varName = bNodeVarNameMap.get(o);
+                    } else {
+                        varName = "v" + (varCounter++);
+                        bNodeVarNameMap.put((BlankNode) o, varName);
+                    }
+                    builder.append('?');
+                    builder.append(varName);
+                } else {
+                    if (o.equals(SparqlBNode.internalBNodeId)) {
+                        builder.append('?');
+                        builder.append(varLabelForInternalBNodeId);
+                    } else {
+                        builder.append(asSparqlTerm(o));
+                    }
+                }
+            }
+            builder.append('.');
+            triplePatterns.add(builder.toString());
+
+        }
+        for (String triplePattern : triplePatterns) {
+
+            queryBuilder.append(triplePattern);
+            queryBuilder.append('\n');
+        }
+        return bNodeVarNameMap;
+
+    }
+
+    private static class AlienBNodeException extends RuntimeException {
+
+        public AlienBNodeException() {
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/clerezza-rdf-core/blob/816dc11f/impl.sparql/src/main/java/org/apache/commons/rdf/impl/sparql/SparqlBNode.java
----------------------------------------------------------------------
diff --git a/impl.sparql/src/main/java/org/apache/commons/rdf/impl/sparql/SparqlBNode.java b/impl.sparql/src/main/java/org/apache/commons/rdf/impl/sparql/SparqlBNode.java
deleted file mode 100644
index f55f7af..0000000
--- a/impl.sparql/src/main/java/org/apache/commons/rdf/impl/sparql/SparqlBNode.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Copyright 2015 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.commons.rdf.impl.sparql;
-
-import java.util.Collection;
-import java.util.Objects;
-import org.apache.commons.rdf.BlankNode;
-import org.apache.commons.rdf.BlankNodeOrIri;
-import org.apache.commons.rdf.ImmutableGraph;
-import org.apache.commons.rdf.Iri;
-import org.apache.commons.rdf.RdfTerm;
-import org.apache.commons.rdf.Triple;
-import org.apache.commons.rdf.impl.utils.TripleImpl;
-import org.apache.commons.rdf.impl.utils.simple.SimpleGraph;
-
-/**
- *
- * @author developer
- */
-class SparqlBNode extends BlankNode {
-    
-    final static Iri internalBNodeId = new Iri("urn:x-internalid:fdmpoihdfw");
-    
-    final ImmutableGraph context;
-    private final int isoDistinguisher;
-
-    SparqlBNode(BlankNode node, Collection<Triple> context, int isoDistinguisher) {
-        this.isoDistinguisher = isoDistinguisher;
-        final SimpleGraph contextBuider = new SimpleGraph();
-        for (Triple triple : context) {
-            BlankNodeOrIri subject = triple.getSubject();
-            RdfTerm object = triple.getObject();
-            contextBuider.add(new TripleImpl(subject.equals(node) ? internalBNodeId : subject, 
-                    triple.getPredicate(), 
-                    object.equals(node) ? internalBNodeId : object));
-        }
-        this.context = contextBuider.getImmutableGraph();
-    }
-
-    @Override
-    public int hashCode() {
-        int hash = 7+isoDistinguisher;
-        hash = 61 * hash + Objects.hashCode(this.context);
-        return hash;
-    }
-
-    @Override
-    public boolean equals(Object obj) {
-        if (obj == null) {
-            return false;
-        }
-        if (getClass() != obj.getClass()) {
-            return false;
-        }
-        final SparqlBNode other = (SparqlBNode) obj;
-        if (isoDistinguisher != other.isoDistinguisher) {
-            return false;
-        }
-        return Objects.equals(this.context, other.context);
-    }
-}

http://git-wip-us.apache.org/repos/asf/clerezza-rdf-core/blob/816dc11f/impl.sparql/src/main/java/org/apache/commons/rdf/impl/sparql/SparqlClient.java
----------------------------------------------------------------------
diff --git a/impl.sparql/src/main/java/org/apache/commons/rdf/impl/sparql/SparqlClient.java b/impl.sparql/src/main/java/org/apache/commons/rdf/impl/sparql/SparqlClient.java
deleted file mode 100644
index e522924..0000000
--- a/impl.sparql/src/main/java/org/apache/commons/rdf/impl/sparql/SparqlClient.java
+++ /dev/null
@@ -1,224 +0,0 @@
-/*
- * Copyright 2015 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.commons.rdf.impl.sparql;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import org.apache.http.HttpEntity;
-import org.apache.http.NameValuePair;
-import org.apache.http.client.entity.UrlEncodedFormEntity;
-import org.apache.http.client.methods.CloseableHttpResponse;
-import org.apache.http.client.methods.HttpPost;
-import org.apache.http.impl.client.CloseableHttpClient;
-import org.apache.http.impl.client.HttpClients;
-import org.apache.http.message.BasicNameValuePair;
-import org.apache.http.util.EntityUtils;
-import javax.xml.parsers.*;
-import org.apache.commons.rdf.BlankNode;
-import org.apache.commons.rdf.Iri;
-import org.apache.commons.rdf.Language;
-import org.apache.commons.rdf.RdfTerm;
-import org.apache.commons.rdf.impl.utils.AbstractLiteral;
-import org.xml.sax.*;
-import org.xml.sax.helpers.*;
-
-/**
- *
- * @author developer
- */
-public class SparqlClient {
-
-    final String endpoint;
-
-    public SparqlClient(final String endpoint) {
-        this.endpoint = endpoint;
-    }
-
-    List<Map<String, RdfTerm>> queryResultSet(final String query) throws IOException {
-        CloseableHttpClient httpclient = HttpClients.createDefault();
-        HttpPost httpPost = new HttpPost(endpoint);
-        List<NameValuePair> nvps = new ArrayList<NameValuePair>();
-        nvps.add(new BasicNameValuePair("query", query));
-        httpPost.setEntity(new UrlEncodedFormEntity(nvps));
-        CloseableHttpResponse response2 = httpclient.execute(httpPost);
-
-        try {
-            HttpEntity entity2 = response2.getEntity();
-            InputStream in = entity2.getContent();
-            SAXParserFactory spf = SAXParserFactory.newInstance();
-            spf.setNamespaceAware(true);
-            SAXParser saxParser = spf.newSAXParser();
-            XMLReader xmlReader = saxParser.getXMLReader();
-            final SparqlsResultsHandler sparqlsResultsHandler = new SparqlsResultsHandler();
-            xmlReader.setContentHandler(sparqlsResultsHandler);
-            xmlReader.parse(new InputSource(in));
-            /*
-             for (int ch = in.read(); ch != -1; ch = in.read()) {
-             System.out.print((char)ch);
-             }
-             */
-            // do something useful with the response body
-            // and ensure it is fully consumed
-            EntityUtils.consume(entity2);
-            return sparqlsResultsHandler.getResults();
-        } catch (ParserConfigurationException ex) {
-            throw new RuntimeException(ex);
-        } catch (SAXException ex) {
-            throw new RuntimeException(ex);
-        } finally {
-            response2.close();
-        }
-
-    }
-
-    final public static class SparqlsResultsHandler extends DefaultHandler {
-
-        private String currentBindingName;
-        private Map<String, RdfTerm> currentResult = null;
-        private final List<Map<String, RdfTerm>> results = new ArrayList<>();
-        private boolean readingValue;
-        private String lang; //the xml:lang attribute of a literal
-        private String value;
-        private Map<String, BlankNode> bNodeMap = new HashMap<>();
-        private static final Iri XSD_STRING = new Iri("http://www.w3.org/2001/XMLSchema#string");
-
-        private RdfTerm getBNode(String value) {
-            if (!bNodeMap.containsKey(value)) {
-                bNodeMap.put(value, new BlankNode());
-            }
-            return bNodeMap.get(value);
-        }
-
-        private List<Map<String, RdfTerm>> getResults() {
-            return results;
-        }
-
-        enum BindingType {
-
-            uri, bnode, literal;
-        }
-
-        @Override
-        public void startDocument() throws SAXException {
-
-        }
-
-        @Override
-        public void startElement(String namespaceURI,
-                String localName,
-                String qName,
-                Attributes atts)
-                throws SAXException {
-            if ("http://www.w3.org/2005/sparql-results#".equals(namespaceURI)) {
-                if ("result".equals(localName)) {
-                    if (currentResult != null) {
-                        throw new SAXException("unexpected tag <result>");
-                    }
-                    currentResult = new HashMap<>();
-                } else if ("binding".equals(localName)) {
-                    if (currentResult == null) {
-                        throw new SAXException("unexpected tag <binding>");
-                    }
-                    currentBindingName = atts.getValue("name");
-                } else if ("uri".equals(localName) || "bnode".equals(localName) || "literal".equals(localName)) {
-                    if (readingValue) {
-                        throw new SAXException("unexpected tag <" + localName + ">");
-                    }
-                    lang = atts.getValue("http://www.w3.org/XML/1998/namespace", "lang");
-                    readingValue = true;
-                }
-            }
-
-            //System.out.println(namespaceURI);
-            //System.out.println(qName);
-        }
-
-        @Override
-        public void characters(char[] chars, int start, int length) throws SAXException {
-            if (readingValue) {
-                value = new String(chars, start, length);
-                //System.err.println(value + start + ", " + length);
-            }
-        }
-
-        @Override
-        public void endElement(String namespaceURI,
-                String localName,
-                String qName)
-                throws SAXException {
-            if ("http://www.w3.org/2005/sparql-results#".equals(namespaceURI)) {
-                if ("result".equals(localName)) {
-                    results.add(currentResult);
-                    currentResult = null;
-                } else if ("binding".equals(localName)) {
-                    if (currentBindingName == null) {
-                        throw new SAXException("unexpected tag </binding>");
-                    }
-                    currentBindingName = null;
-                } else {
-                    try {
-                        BindingType b = BindingType.valueOf(localName);
-                        RdfTerm rdfTerm = null;
-                        final Language language = lang == null? null : new Language(lang);;
-                        switch (b) {
-                            case uri:
-                                rdfTerm = new Iri(value);
-                                break;
-                            case bnode:
-                                rdfTerm = getBNode(value);
-                                break;
-                            case literal:
-                                final String lf = value;
-                                rdfTerm = new AbstractLiteral() {
-
-                                    @Override
-                                    public String getLexicalForm() {
-                                        return lf;
-                                    }
-
-                                    @Override
-                                    public Iri getDataType() {
-                                        //TODO implement
-                                        return XSD_STRING;
-                                    }
-
-                                    @Override
-                                    public Language getLanguage() {
-                                        return language;
-                                    }
-                                };
-                                break;
-                        }
-                        currentResult.put(currentBindingName, rdfTerm);
-                        readingValue = false;
-                    } catch (IllegalArgumentException e) {
-                            //not uri|bnode|literal
-                    }
-                }
-            }
-        }
-
-        public void endDocument() throws SAXException {
-            //System.out.println("results: " + results.size());
-        }
-
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/clerezza-rdf-core/blob/816dc11f/impl.sparql/src/main/java/org/apache/commons/rdf/impl/sparql/SparqlGraph.java
----------------------------------------------------------------------
diff --git a/impl.sparql/src/main/java/org/apache/commons/rdf/impl/sparql/SparqlGraph.java b/impl.sparql/src/main/java/org/apache/commons/rdf/impl/sparql/SparqlGraph.java
deleted file mode 100644
index 594a264..0000000
--- a/impl.sparql/src/main/java/org/apache/commons/rdf/impl/sparql/SparqlGraph.java
+++ /dev/null
@@ -1,505 +0,0 @@
-/*
- * Copyright 2015 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.commons.rdf.impl.sparql;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.NoSuchElementException;
-import java.util.Set;
-import java.util.concurrent.Callable;
-import java.util.logging.Logger;
-import org.apache.commons.rdf.BlankNode;
-import org.apache.commons.rdf.BlankNodeOrIri;
-import org.apache.commons.rdf.Graph;
-import org.apache.commons.rdf.ImmutableGraph;
-import org.apache.commons.rdf.Iri;
-import org.apache.commons.rdf.Literal;
-import org.apache.commons.rdf.RdfTerm;
-import org.apache.commons.rdf.Triple;
-import org.apache.commons.rdf.impl.utils.AbstractGraph;
-import org.apache.commons.rdf.impl.utils.TripleImpl;
-import org.apache.commons.rdf.impl.utils.simple.SimpleGraph;
-
-/**
- *
- * @author reto
- */
-public class SparqlGraph extends AbstractGraph {
-
-    private static final int MAX_ISOMORPHIC_BNODES = 1000;
-    private static final Logger log = Logger.getLogger(SparqlGraph.class.getName());
-
-    final SparqlClient sparqlClient;
-
-    /**
-     * Constructs a Graph representing the default graph at the specified
-     * endpoint
-     */
-    public SparqlGraph(final String endpoint) {
-        sparqlClient = new SparqlClient(endpoint);
-    }
-
-    @Override
-    protected Iterator<Triple> performFilter(final BlankNodeOrIri filterSubject,
-            final Iri filterPredicate, final RdfTerm filterObject) {
-        try {
-            String query = createQuery(filterSubject, filterPredicate, filterObject);
-            final List<Map<String, RdfTerm>> sparqlResults = sparqlClient.queryResultSet(query);
-            //first to triples without bnode-conversion
-            //rawTriples contains the triples with the BNodes from the result set
-            final Collection<Triple> rawTriples = new ArrayList<>();
-            for (Map<String, RdfTerm> result : sparqlResults) {
-                rawTriples.add(new TripleImpl(filterSubject != null ? filterSubject : (BlankNodeOrIri) result.get("s"),
-                        filterPredicate != null ? filterPredicate : (Iri) result.get("p"),
-                        filterObject != null ? filterObject : result.get("o")));
-
-            }
-            //then bnode conversion
-            final Iterator<Triple> rawTriplesIter = rawTriples.iterator();
-            //this is basically just wokring around the lack of (named) nested functions
-            return (new Callable<Iterator<Triple>>() {
-
-                final Map<BlankNode, SparqlBNode> nodeMap = new HashMap<>();
-                final Set<ImmutableGraph> usedContext = new HashSet<>();
-
-                private RdfTerm useSparqlNode(RdfTerm node) throws IOException {
-                    if (node instanceof BlankNodeOrIri) {
-                        return useSparqlNode((BlankNodeOrIri) node);
-                    }
-                    return node;
-                }
-
-                private BlankNodeOrIri useSparqlNode(BlankNodeOrIri node) throws IOException {
-                    if (node instanceof BlankNode) {
-                        if (!nodeMap.containsKey(node)) {
-                            createBlankNodesForcontext((BlankNode) node);
-                        }
-                        if (!nodeMap.containsKey(node)) {
-                            throw new RuntimeException("no Bnode created");
-                        }
-                        return nodeMap.get(node);
-                    } else {
-                        return node;
-                    }
-                }
-
-                private void createBlankNodesForcontext(final BlankNode node) throws IOException {
-                    final Collection<Triple> context = getContext(node);
-                    final Set<BlankNode> rawNodes = new HashSet<>();
-                    for (Triple triple : context) {
-                        {
-                            final BlankNodeOrIri subject = triple.getSubject();
-                            if (subject instanceof BlankNode) {
-                                rawNodes.add((BlankNode) subject);
-                            }
-                        }
-                        {
-                            final RdfTerm object = triple.getObject();
-                            if (object instanceof BlankNode) {
-                                rawNodes.add((BlankNode) object);
-                            }
-                        }
-                    }
-                    final Set<SparqlBNode> createdSparqlNodes = new HashSet<>();
-                    //final Map<BlankNode, SparqlBNode> preliminaryNodes = new HashMap<>();
-                    for (BlankNode rawNode : rawNodes) {
-                        for (int i = 0; i < MAX_ISOMORPHIC_BNODES; i++) {
-                            SparqlBNode sparqlBNode = new SparqlBNode(rawNode, context, i);
-                            if (!createdSparqlNodes.contains(sparqlBNode)) {
-                                nodeMap.put(rawNode, sparqlBNode);
-                                createdSparqlNodes.add(sparqlBNode);
-                                break;
-                            }
-                        }
-                    }
-                }
-
-                private ImmutableGraph getContext(final BlankNode node) throws IOException {
-                    //we need to get the cntext of the BNode
-                    //if the filter was for (null, null, null) we have the whole
-                    //bnode context in the reuslt set, otherwise we need to get 
-                    //more triples from the endpoint,
-                    //let's first handle the easy case
-                    if ((filterSubject == null) && (filterPredicate == null)
-                            && (filterObject == null)) {
-                        return getContextInRaw(node);
-                    } else {
-                        final ImmutableGraph startContext = getContextInRaw(node);
-                        final Set<ImmutableGraph> expandedContexts = expandContext(startContext);
-                        //expand bnode context
-                        //note that there might be different contexts for 
-                        //a bnode as present in the current result set
-                        //in this case we just haveto make sure we don't 
-                        //pick the same context for different bnodes in the resultset
-                        ImmutableGraph result = null;
-                        for (ImmutableGraph expandedContext : expandedContexts) {
-                            if (!usedContext.contains(expandedContext)) {
-                                result = expandedContext;
-                                break;
-                            }
-                        }
-                        if (result == null) {
-                            log.warning("he underlying sparql graph seems to contain redundant triples, this might cause unexpected results");
-                            result = expandedContexts.iterator().next();
-                        } else {
-                            usedContext.add(result);
-                        }
-                        return result;
-                    }
-
-                }
-
-                private ImmutableGraph getContextInRaw(BlankNode node) {
-                    final Graph contextBuilder = new SimpleGraph();
-                    for (Triple rawTriple : rawTriples) {
-                        BlankNodeOrIri rawSubject = rawTriple.getSubject();
-                        RdfTerm rawObject = rawTriple.getObject();
-                        if (rawSubject.equals(node) || rawObject.equals(node)) {
-                            contextBuilder.add(rawTriple);
-                        }
-                    }
-                    return contextBuilder.getImmutableGraph();
-                }
-
-                @Override
-                public Iterator<Triple> call() throws Exception {
-                    return new Iterator<Triple>() {
-
-                        @Override
-                        public boolean hasNext() {
-                            return rawTriplesIter.hasNext();
-                        }
-
-                        @Override
-                        public Triple next() {
-                            try {
-                                Triple rawTriple = rawTriplesIter.next();
-                                return new TripleImpl(useSparqlNode(rawTriple.getSubject()),
-                                        rawTriple.getPredicate(),
-                                        useSparqlNode(rawTriple.getObject()));
-                            } catch (IOException ex) {
-                                throw new RuntimeException(ex);
-                            }
-                        }
-                    };
-                }
-
-                /**
-                 * returns all MSGs that are supergraphs of startContext
-                 *
-                 * @param startContext
-                 * @return
-                 */
-                private Set<ImmutableGraph> expandContext(Collection<Triple> startContext) throws IOException {
-
-                    final StringBuilder queryBuilder = new StringBuilder();
-                    queryBuilder.append("SELECT * WHERE {\n ");
-                    Map<BlankNode, String> bNodeVarNameMap = writeTriplePattern(queryBuilder, startContext);
-                    Set<BlankNode> bNodesInContext = bNodeVarNameMap.keySet();
-                    for (BlankNode bNode : bNodesInContext) {
-                        final String bNodeVarLabel = bNodeVarNameMap.get(bNode);
-                        //looking for outgoing properties of the bnode
-                        queryBuilder.append("OPTIONAL { ");
-                        queryBuilder.append('?');
-                        queryBuilder.append(bNodeVarLabel);
-                        queryBuilder.append(' ');
-                        queryBuilder.append("?po");
-                        queryBuilder.append(bNodeVarLabel);
-                        queryBuilder.append(" ?o");
-                        queryBuilder.append(bNodeVarLabel);
-                        queryBuilder.append(" } .\n");
-                        //looking for incoming properties of the bnode
-                        queryBuilder.append("OPTIONAL { ");
-                        queryBuilder.append("?s");
-                        queryBuilder.append(bNodeVarLabel);
-                        queryBuilder.append(' ');
-                        queryBuilder.append("?pi");
-                        queryBuilder.append(bNodeVarLabel);
-                        queryBuilder.append(" ?");
-                        queryBuilder.append(bNodeVarLabel);
-                        queryBuilder.append(" } .\n");
-                    }
-                    queryBuilder.append(" }");
-                    final List<Map<String, RdfTerm>> expansionQueryResults = sparqlClient.queryResultSet(queryBuilder.toString());
-                    Set<ImmutableGraph> expandedContexts = new HashSet<>();
-                    //the query results may or may be from disjoint supergraphs
-                    //we expand them all as if they are different which may lead
-                    //us to the same MSG multiple times
-                    RESULTS:
-                    for (Map<String, RdfTerm> expansionQueryResult : expansionQueryResults) {
-                        Collection<Triple> expandedContext = new HashSet<>();
-                        Map<BlankNode, BlankNode> newBNodesToOldBNodes = new HashMap<>();
-                        for (BlankNode oldBNode : bNodesInContext) {
-                            final String bNodeVarLabel = bNodeVarNameMap.get(oldBNode);
-                            final RdfTerm newNode = expansionQueryResult.get(bNodeVarLabel);
-                            if (!(newNode instanceof BlankNode)) {
-                                //this subgraph is't a match
-                                continue RESULTS;
-                            }
-                            newBNodesToOldBNodes.put((BlankNode) newNode, oldBNode);
-                        }
-                        expandedContext.addAll(startContext);
-                        boolean newBNodeIntroduced = false;
-                        boolean newTripleAdded = false;
-                        for (BlankNode oldBNode : bNodesInContext) {
-                            final String bNodeVarLabel = bNodeVarNameMap.get(oldBNode);
-                            {
-                                final Iri newPredicate = (Iri) expansionQueryResult.get("po" + bNodeVarLabel);
-                                if (newPredicate != null) {
-                                    RdfTerm newObject = expansionQueryResult.get("o" + bNodeVarLabel);
-                                    if (newObject instanceof BlankNode) {
-                                        if (newBNodesToOldBNodes.containsKey(newObject)) {
-                                            //point back to BNode in startContext
-                                            newObject = newBNodesToOldBNodes.get(newObject);
-                                        } else {
-                                            newBNodeIntroduced = true;
-                                        }
-                                    }
-                                    if (expandedContext.add(new TripleImpl(oldBNode, newPredicate, newObject))) {
-                                        newTripleAdded = true;
-                                    }
-                                }
-                            }
-                            {
-                                final Iri newPredicate = (Iri) expansionQueryResult.get("pi" + bNodeVarLabel);
-                                if (newPredicate != null) {
-                                    RdfTerm newSubject = expansionQueryResult.get("s" + bNodeVarLabel);
-                                    if (newSubject instanceof BlankNode) {
-                                        if (newBNodesToOldBNodes.containsKey(newSubject)) {
-                                            //point back to BNode in startContext
-                                            newSubject = newBNodesToOldBNodes.get(newSubject);
-                                        } else {
-                                            newBNodeIntroduced = true;
-                                        }
-                                    }
-                                    if (expandedContext.add(new TripleImpl((BlankNodeOrIri) newSubject, newPredicate, oldBNode))) {
-                                        newTripleAdded = true;
-                                    }
-                                }
-                            }
-                        }
-                        if (newBNodeIntroduced) {
-                            //we could be more efficient than this ans just expand the newly introduced bnodes
-                            expandedContexts.addAll(expandContext(expandedContext));
-                        } else {
-                            if (newTripleAdded) {
-                                //look for more results
-                                expandedContexts.addAll(expandContext(expandedContext));
-                                //expandedContexts.add(expandedContext);
-                            }
-                        }
-
-                    }
-                    if (expandedContexts.isEmpty()) {
-                        expandedContexts.add(new SimpleGraph(startContext).getImmutableGraph());
-                    }
-                    return expandedContexts;
-                }
-
-            }).call();
-        } catch (AlienBNodeException e) {
-            return new Iterator<Triple>() {
-
-                @Override
-                public boolean hasNext() {
-                    return false;
-                }
-
-                @Override
-                public Triple next() {
-                    throw new NoSuchElementException();
-                }
-            };
-        } catch (IOException ex) {
-            throw new RuntimeException(ex);
-        } catch (Exception ex) {
-            throw new RuntimeException(ex);
-        }
-    }
-
-    private String createQuery(final BlankNodeOrIri filterSubject, final Iri filterPredicate, final RdfTerm filterObject) {
-        final StringBuilder selectBuilder = new StringBuilder();
-        selectBuilder.append("SELECT ");
-        final StringBuilder whereBuilder = new StringBuilder();
-        whereBuilder.append("WHERE { ");
-        if (filterSubject == null) {
-            whereBuilder.append("?s");
-            selectBuilder.append("?s ");
-        } else {
-            if (filterSubject instanceof SparqlBNode) {
-                whereBuilder.append("?sn");
-            } else {
-                whereBuilder.append(asSparqlTerm(filterSubject));
-            }
-        }
-        whereBuilder.append(' ');
-        if (filterPredicate == null) {
-            whereBuilder.append("?p");
-            selectBuilder.append("?p ");
-        } else {
-            whereBuilder.append(asSparqlTerm(filterPredicate));
-        }
-        whereBuilder.append(' ');
-        if (filterObject == null) {
-            whereBuilder.append("?o");
-            selectBuilder.append("?o ");
-        } else {
-            if (filterObject instanceof SparqlBNode) {
-                whereBuilder.append("?on");
-            } else {
-                whereBuilder.append(asSparqlTerm(filterObject));
-            }
-        }
-        whereBuilder.append(" .\n");
-        if (filterSubject instanceof SparqlBNode) {
-            //expand bnode context
-            writeTriplePattern(whereBuilder, ((SparqlBNode) filterSubject).context, "sn");
-        }
-        
-        if (filterObject instanceof SparqlBNode) {
-            //expand bnode context
-            writeTriplePattern(whereBuilder, ((SparqlBNode) filterObject).context, "on");
-        }
-
-        whereBuilder.append(" }");
-        return selectBuilder.append(whereBuilder).toString();
-    }
-
-    @Override
-    protected int performSize() {
-        try {
-            return sparqlClient.queryResultSet("SELECT * WHERE { ?s ?p ?o}").size();
-        } catch (IOException ex) {
-            throw new RuntimeException(ex);
-        }
-    }
-
-    private String asSparqlTerm(Iri iri) {
-        return "<" + iri.getUnicodeString() + ">";
-    }
-
-    private String asSparqlTerm(Literal literal) {
-        //TODO langauge and datatype
-        return "\"" + literal.getLexicalForm() + "\"";
-    }
-
-    private String asSparqlTerm(BlankNode bnode) {
-        if (!(bnode instanceof SparqlBNode)) {
-            throw new AlienBNodeException();
-        }
-        //this requires adding additional clauses to the graph pattern
-        throw new RuntimeException("SparqlBNodes should have been handled earlier");
-    }
-
-    private String asSparqlTerm(BlankNodeOrIri term) {
-        if (term instanceof Iri) {
-            return asSparqlTerm((Iri) term);
-        } else {
-            return asSparqlTerm((BlankNode) term);
-        }
-    }
-
-    private String asSparqlTerm(RdfTerm term) {
-        if (term instanceof BlankNodeOrIri) {
-            return asSparqlTerm((BlankNodeOrIri) term);
-        } else {
-            return asSparqlTerm((Literal) term);
-        }
-    }
-
-
-    private Map<BlankNode, String> writeTriplePattern(StringBuilder queryBuilder, Collection<Triple> triples) {
-        return writeTriplePattern(queryBuilder, triples, null);
-    }
-        
-    private Map<BlankNode, String> writeTriplePattern(StringBuilder queryBuilder, Collection<Triple> triples, String varLabelForInternalBNodeId) {
-        final Collection<String> triplePatterns = new ArrayList<>();
-        int varCounter = 0;
-        final Map<BlankNode, String> bNodeVarNameMap = new HashMap<>();
-        for (Triple t : triples) {
-            final StringBuilder builder = new StringBuilder();
-            {
-                final BlankNodeOrIri s = t.getSubject();
-                String varName;
-                if (s instanceof BlankNode) {
-                    if (bNodeVarNameMap.containsKey(s)) {
-                        varName = bNodeVarNameMap.get(s);
-                    } else {
-                        varName = "v" + (varCounter++);
-                        bNodeVarNameMap.put((BlankNode) s, varName);
-                    }
-                    builder.append('?');
-                    builder.append(varName);
-                } else {
-                    if (s.equals(SparqlBNode.internalBNodeId)) {
-                        builder.append('?');
-                        builder.append(varLabelForInternalBNodeId);
-                    } else {
-                        builder.append(asSparqlTerm(s));
-                    }
-                    
-                }
-            }
-            builder.append(' ');
-            builder.append(asSparqlTerm(t.getPredicate()));
-            builder.append(' ');
-            {
-                final RdfTerm o = t.getObject();
-                String varName;
-                if (o instanceof BlankNode) {
-                    if (bNodeVarNameMap.containsKey(o)) {
-                        varName = bNodeVarNameMap.get(o);
-                    } else {
-                        varName = "v" + (varCounter++);
-                        bNodeVarNameMap.put((BlankNode) o, varName);
-                    }
-                    builder.append('?');
-                    builder.append(varName);
-                } else {
-                    if (o.equals(SparqlBNode.internalBNodeId)) {
-                        builder.append('?');
-                        builder.append(varLabelForInternalBNodeId);
-                    } else {
-                        builder.append(asSparqlTerm(o));
-                    }
-                }
-            }
-            builder.append('.');
-            triplePatterns.add(builder.toString());
-
-        }
-        for (String triplePattern : triplePatterns) {
-
-            queryBuilder.append(triplePattern);
-            queryBuilder.append('\n');
-        }
-        return bNodeVarNameMap;
-
-    }
-
-    private static class AlienBNodeException extends RuntimeException {
-
-        public AlienBNodeException() {
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/clerezza-rdf-core/blob/816dc11f/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/BNodeCircleTest.java
----------------------------------------------------------------------
diff --git a/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/BNodeCircleTest.java b/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/BNodeCircleTest.java
new file mode 100644
index 0000000..9329c9b
--- /dev/null
+++ b/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/BNodeCircleTest.java
@@ -0,0 +1,118 @@
+/*
+ * Copyright 2015 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.commons.rdf.impl.sparql;
+
+import com.hp.hpl.jena.query.DatasetAccessor;
+import com.hp.hpl.jena.query.DatasetAccessorFactory;
+import java.io.IOException;
+import java.net.ServerSocket;
+import org.apache.jena.fuseki.EmbeddedFusekiServer;
+import com.hp.hpl.jena.rdf.model.Model;
+import com.hp.hpl.jena.rdf.model.ModelFactory;
+import java.io.InputStream;
+import java.util.Iterator;
+import org.apache.commons.rdf.BlankNode;
+import org.apache.commons.rdf.BlankNodeOrIri;
+import org.apache.commons.rdf.Graph;
+import org.apache.commons.rdf.Iri;
+import org.apache.commons.rdf.RdfTerm;
+import org.apache.commons.rdf.Triple;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+/**
+ *
+ * @author reto
+ */
+public class BNodeCircleTest {
+
+    final static int serverPort = findFreePort();
+    static EmbeddedFusekiServer server;
+
+    @BeforeClass
+    public static void prepare() throws IOException {
+        final String serviceURI = "http://localhost:" + serverPort + "/ds/data";
+        final DatasetAccessor accessor = DatasetAccessorFactory.createHTTP(serviceURI);
+        final InputStream in = BNodeCircleTest.class.getResourceAsStream("bnode-circle.ttl");
+        final Model m = ModelFactory.createDefaultModel();
+        String base = "http://example.org/";
+        m.read(in, base, "TURTLE");
+        server = EmbeddedFusekiServer.memTDB(serverPort, "/ds");//dataSet.getAbsolutePath());
+        server.start();
+        System.out.println("Started fuseki on port " + serverPort);
+        accessor.putModel(m);
+    }
+
+    @AfterClass
+    public static void cleanup() {
+        server.stop();
+    }
+
+    @Test
+    public void graphSize() {
+        final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query");
+        Assert.assertEquals("Graph not of the exepected size", 2, graph.size());
+    }
+
+    
+    
+    @Test
+    public void nullFilter() {
+        final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query");
+        final Iterator<Triple> iter = graph.filter(null, null, null);
+        Assert.assertTrue(iter.hasNext());
+        final Triple triple1 = iter.next();
+        final BlankNodeOrIri subject = triple1.getSubject();
+        final RdfTerm object = triple1.getObject();
+        Assert.assertTrue(subject instanceof BlankNode);
+        Assert.assertTrue(object instanceof BlankNode);
+        Assert.assertNotEquals(subject, object);
+        Assert.assertTrue(iter.hasNext());
+    }
+    
+    @Test
+    public void foafKnowsFilter() {
+        final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query");
+        
+        final Iri foafKnows = new Iri("http://xmlns.com/foaf/0.1/knows");
+
+        final Iterator<Triple> iter = graph.filter(null, foafKnows, null);
+        Assert.assertTrue(iter.hasNext());
+        final Triple triple1 = iter.next();
+        final BlankNodeOrIri subject = triple1.getSubject();
+        final RdfTerm object = triple1.getObject();
+        Assert.assertTrue(subject instanceof BlankNode);
+        Assert.assertTrue(object instanceof BlankNode);
+        Assert.assertNotEquals(subject, object);
+        Assert.assertTrue(iter.hasNext());
+    }
+    
+
+    
+
+    public static int findFreePort() {
+        int port = 0;
+        try (ServerSocket server = new ServerSocket(0);) {
+            port = server.getLocalPort();
+        } catch (Exception e) {
+            throw new RuntimeException("unable to find a free port");
+        }
+        return port;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/clerezza-rdf-core/blob/816dc11f/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/BNodeTest.java
----------------------------------------------------------------------
diff --git a/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/BNodeTest.java b/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/BNodeTest.java
new file mode 100644
index 0000000..f0a4aff
--- /dev/null
+++ b/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/BNodeTest.java
@@ -0,0 +1,139 @@
+/*
+ * Copyright 2015 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.commons.rdf.impl.sparql;
+
+import com.hp.hpl.jena.query.DatasetAccessor;
+import com.hp.hpl.jena.query.DatasetAccessorFactory;
+import java.io.IOException;
+import java.net.ServerSocket;
+import org.apache.jena.fuseki.EmbeddedFusekiServer;
+import com.hp.hpl.jena.rdf.model.Model;
+import com.hp.hpl.jena.rdf.model.ModelFactory;
+import java.io.InputStream;
+import java.util.Iterator;
+import org.apache.commons.rdf.BlankNode;
+import org.apache.commons.rdf.BlankNodeOrIri;
+import org.apache.commons.rdf.Graph;
+import org.apache.commons.rdf.Iri;
+import org.apache.commons.rdf.RdfTerm;
+import org.apache.commons.rdf.Triple;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+/**
+ *
+ * @author reto
+ */
+public class BNodeTest {
+
+    final static int serverPort = findFreePort();
+    static EmbeddedFusekiServer server;
+
+    @BeforeClass
+    public static void prepare() throws IOException {
+        final String serviceURI = "http://localhost:" + serverPort + "/ds/data";
+        final DatasetAccessor accessor = DatasetAccessorFactory.createHTTP(serviceURI);
+        final InputStream in = BNodeTest.class.getResourceAsStream("simple-bnode.ttl");
+        final Model m = ModelFactory.createDefaultModel();
+        String base = "http://example.org/";
+        m.read(in, base, "TURTLE");
+        server = EmbeddedFusekiServer.memTDB(serverPort, "/ds");//dataSet.getAbsolutePath());
+        server.start();
+        System.out.println("Started fuseki on port " + serverPort);
+        accessor.putModel(m);
+    }
+
+    @AfterClass
+    public static void cleanup() {
+        server.stop();
+    }
+
+    @Test
+    public void graphSize() {
+        final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query");
+        Assert.assertEquals("Graph not of the exepected size", 3, graph.size());
+    }
+
+    /* Filtering with a Bode that cannot be in graph
+    */
+    @Test
+    public void filterAlienBNode() {
+        final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query");
+        
+        final BlankNode blankNode = new BlankNode();
+        final Iterator<Triple> iter = graph.filter(blankNode, null, null);
+        Assert.assertFalse(iter.hasNext());
+    }
+    
+    @Test
+    public void bNodeIdentity() {
+        final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query");
+        
+        final Iri foafPerson = new Iri("http://xmlns.com/foaf/0.1/Person");
+        final Iri foafName = new Iri("http://xmlns.com/foaf/0.1/name");
+        final Iri foafKnows = new Iri("http://xmlns.com/foaf/0.1/knows");
+        final Iri rdfType = new Iri("http://www.w3.org/1999/02/22-rdf-syntax-ns#type");
+
+        final Iterator<Triple> iter = graph.filter(null, foafName, null);
+        Assert.assertTrue(iter.hasNext());
+        final BlankNodeOrIri namedThing = iter.next().getSubject();
+        Assert.assertTrue(namedThing instanceof BlankNode);
+        
+        final Iterator<Triple> iter2 = graph.filter(null, rdfType, foafPerson);
+        Assert.assertTrue(iter2.hasNext());
+        final BlankNodeOrIri person = iter2.next().getSubject();
+        Assert.assertTrue(person instanceof BlankNode);
+        Assert.assertEquals(namedThing, person);
+        
+        final Iterator<Triple> iter3 = graph.filter(null, foafKnows, null);
+        Assert.assertTrue(iter3.hasNext());
+        final RdfTerm knownThing = iter3.next().getObject();
+        Assert.assertTrue(knownThing instanceof BlankNode);
+        Assert.assertEquals(knownThing, person);
+        Assert.assertEquals(namedThing, knownThing);
+    }
+    
+    @Test
+    public void filter1() {
+        final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query");
+        
+        final Iri foafPerson = new Iri("http://xmlns.com/foaf/0.1/Person");
+        final Iri foafName = new Iri("http://xmlns.com/foaf/0.1/name");
+        final Iri rdfType = new Iri("http://www.w3.org/1999/02/22-rdf-syntax-ns#type");
+
+        final Iterator<Triple> iter = graph.filter(null, foafName, null);
+        Assert.assertTrue(iter.hasNext());
+        final BlankNodeOrIri person = iter.next().getSubject();
+        Assert.assertTrue(person instanceof BlankNode);
+        
+        final Iterator<Triple> iter2 = graph.filter(person, rdfType, null);
+        Assert.assertTrue(iter2.hasNext());
+    }
+    
+
+    public static int findFreePort() {
+        int port = 0;
+        try (ServerSocket server = new ServerSocket(0);) {
+            port = server.getLocalPort();
+        } catch (Exception e) {
+            throw new RuntimeException("unable to find a free port");
+        }
+        return port;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/clerezza-rdf-core/blob/816dc11f/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/SimilarBNodes.java
----------------------------------------------------------------------
diff --git a/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/SimilarBNodes.java b/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/SimilarBNodes.java
new file mode 100644
index 0000000..6300281
--- /dev/null
+++ b/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/SimilarBNodes.java
@@ -0,0 +1,104 @@
+/*
+ * Copyright 2015 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.commons.rdf.impl.sparql;
+
+import com.hp.hpl.jena.query.DatasetAccessor;
+import com.hp.hpl.jena.query.DatasetAccessorFactory;
+import java.io.IOException;
+import java.net.ServerSocket;
+import org.apache.jena.fuseki.EmbeddedFusekiServer;
+import com.hp.hpl.jena.rdf.model.Model;
+import com.hp.hpl.jena.rdf.model.ModelFactory;
+import java.io.InputStream;
+import java.util.Iterator;
+import org.apache.commons.rdf.BlankNode;
+import org.apache.commons.rdf.BlankNodeOrIri;
+import org.apache.commons.rdf.Graph;
+import org.apache.commons.rdf.Iri;
+import org.apache.commons.rdf.Triple;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+/**
+ *
+ * @author reto
+ */
+public class SimilarBNodes {
+
+    final static int serverPort = findFreePort();
+    static EmbeddedFusekiServer server;
+
+    @BeforeClass
+    public static void prepare() throws IOException {
+        final String serviceURI = "http://localhost:" + serverPort + "/ds/data";
+        final DatasetAccessor accessor = DatasetAccessorFactory.createHTTP(serviceURI);
+        final InputStream in = SimilarBNodes.class.getResourceAsStream("similar-bnodes.ttl");
+        final Model m = ModelFactory.createDefaultModel();
+        String base = "http://example.org/";
+        m.read(in, base, "TURTLE");
+        server = EmbeddedFusekiServer.memTDB(serverPort, "/ds");//dataSet.getAbsolutePath());
+        server.start();
+        System.out.println("Started fuseki on port " + serverPort);
+        accessor.putModel(m);
+    }
+
+    @AfterClass
+    public static void cleanup() {
+        server.stop();
+    }
+
+    @Test
+    public void graphSize() {
+        final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query");
+        Assert.assertEquals("Graph not of the exepected size", 2, graph.size());
+    }
+
+    
+    
+    @Test
+    public void foafKnowsFilter() {
+        final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query");
+        
+        final Iri foafKnows = new Iri("http://xmlns.com/foaf/0.1/knows");
+
+        final Iterator<Triple> iter = graph.filter(null, foafKnows, null);
+        Assert.assertTrue(iter.hasNext());
+        final Triple triple1 = iter.next();
+        final BlankNodeOrIri subject1 = triple1.getSubject();
+        Assert.assertTrue(subject1 instanceof BlankNode);
+        Assert.assertTrue(iter.hasNext());
+        final Triple triple2 = iter.next();
+        final BlankNodeOrIri subject2 = triple2.getSubject();
+        Assert.assertTrue(subject2 instanceof BlankNode);
+        Assert.assertNotEquals(subject1, subject2);
+    }
+    
+
+    
+
+    public static int findFreePort() {
+        int port = 0;
+        try (ServerSocket server = new ServerSocket(0);) {
+            port = server.getLocalPort();
+        } catch (Exception e) {
+            throw new RuntimeException("unable to find a free port");
+        }
+        return port;
+    }
+
+}


Mime
View raw message