marmotta-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From sschaff...@apache.org
Subject [4/8] git commit: towards SPARQL native
Date Tue, 16 Sep 2014 09:55:02 GMT
towards SPARQL native


Project: http://git-wip-us.apache.org/repos/asf/marmotta/repo
Commit: http://git-wip-us.apache.org/repos/asf/marmotta/commit/22918591
Tree: http://git-wip-us.apache.org/repos/asf/marmotta/tree/22918591
Diff: http://git-wip-us.apache.org/repos/asf/marmotta/diff/22918591

Branch: refs/heads/develop
Commit: 22918591604aae8b8c258cce1bef7ff1881c846a
Parents: 3779418
Author: Sebastian Schaffert <sschaffert@apache.org>
Authored: Mon Sep 15 18:08:12 2014 +0200
Committer: Sebastian Schaffert <sschaffert@apache.org>
Committed: Mon Sep 15 18:08:12 2014 +0200

----------------------------------------------------------------------
 .../kiwi/sparql/builder/PatternCollector.java   |  31 +-
 .../kiwi/sparql/builder/SQLBuilder.java         | 431 ++++++++++---------
 .../kiwi/sparql/builder/SQLFragment.java        |  60 +++
 .../kiwi/sparql/builder/SQLPattern.java         |  98 +++++
 .../kiwi/sparql/builder/SQLVariable.java        |  78 ++++
 .../evaluation/KiWiEvaluationStrategyImpl.java  |  35 +-
 .../persistence/KiWiSparqlConnection.java       |  28 +-
 .../kiwi/test/junit/KiWiDatabaseRunner.java     |  20 +-
 8 files changed, 560 insertions(+), 221 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/marmotta/blob/22918591/libraries/kiwi/kiwi-sparql/src/main/java/org/apache/marmotta/kiwi/sparql/builder/PatternCollector.java
----------------------------------------------------------------------
diff --git a/libraries/kiwi/kiwi-sparql/src/main/java/org/apache/marmotta/kiwi/sparql/builder/PatternCollector.java
b/libraries/kiwi/kiwi-sparql/src/main/java/org/apache/marmotta/kiwi/sparql/builder/PatternCollector.java
index 9005968..3dba067 100644
--- a/libraries/kiwi/kiwi-sparql/src/main/java/org/apache/marmotta/kiwi/sparql/builder/PatternCollector.java
+++ b/libraries/kiwi/kiwi-sparql/src/main/java/org/apache/marmotta/kiwi/sparql/builder/PatternCollector.java
@@ -17,12 +17,13 @@
 
 package org.apache.marmotta.kiwi.sparql.builder;
 
+import org.openrdf.query.algebra.Filter;
+import org.openrdf.query.algebra.LeftJoin;
 import org.openrdf.query.algebra.StatementPattern;
 import org.openrdf.query.algebra.TupleExpr;
 import org.openrdf.query.algebra.helpers.QueryModelVisitorBase;
 
-import java.util.ArrayList;
-import java.util.List;
+import java.util.LinkedList;
 
 /**
 * Collect all statement patterns in a tuple expression.
@@ -31,15 +32,37 @@ import java.util.List;
 */
 public class PatternCollector extends QueryModelVisitorBase<RuntimeException> {
 
-    List<StatementPattern> patterns = new ArrayList<>();
+    LinkedList<SQLFragment> parts   = new LinkedList<>();
+
+    int counter = 0;
 
     public PatternCollector(TupleExpr expr) {
+        parts.push(new SQLFragment());
         expr.visit(this);
     }
 
     @Override
     public void meet(StatementPattern node) throws RuntimeException {
-        patterns.add(node);
+        parts.getLast().getPatterns().add(new SQLPattern("P" + (++counter), node));
+
+        super.meet(node);
+    }
+
+    @Override
+    public void meet(LeftJoin node) throws RuntimeException {
+        node.getLeftArg().visit(this);
+        parts.addLast(new SQLFragment());
+        if(node.hasCondition()) {
+            parts.getLast().getFilters().add(node.getCondition());
+        }
+        node.getRightArg().visit(this);
+
+    }
+
+
+    @Override
+    public void meet(Filter node) throws RuntimeException {
+        parts.getLast().getFilters().add(node.getCondition());
 
         super.meet(node);
     }

http://git-wip-us.apache.org/repos/asf/marmotta/blob/22918591/libraries/kiwi/kiwi-sparql/src/main/java/org/apache/marmotta/kiwi/sparql/builder/SQLBuilder.java
----------------------------------------------------------------------
diff --git a/libraries/kiwi/kiwi-sparql/src/main/java/org/apache/marmotta/kiwi/sparql/builder/SQLBuilder.java
b/libraries/kiwi/kiwi-sparql/src/main/java/org/apache/marmotta/kiwi/sparql/builder/SQLBuilder.java
index 0a50fbc..418df64 100644
--- a/libraries/kiwi/kiwi-sparql/src/main/java/org/apache/marmotta/kiwi/sparql/builder/SQLBuilder.java
+++ b/libraries/kiwi/kiwi-sparql/src/main/java/org/apache/marmotta/kiwi/sparql/builder/SQLBuilder.java
@@ -125,12 +125,6 @@ public class SQLBuilder {
 
 
     /**
-     * Maps triple patterns from SPARQL WHERE to SQL aliases for the TRIPLES table in the
FROM part. Used
-     * to join one instance of the triples table for each triple pattern occurring in the
query.
-     */
-    private Map<StatementPattern,String> patternNames = new HashMap<>();
-
-    /**
      * A map for mapping the SPARQL variable names to internal names used for constructing
SQL aliases.
      * Will look like { ?x -> "V1", ?y -> "V2", ... }
      */
@@ -151,15 +145,11 @@ public class SQLBuilder {
     private Map<Var,List<String>> queryVariableIds = new HashMap<>();
 
 
-    /**
-     * A map for defining alternative context values for each variable used in the context
part of a pattern
-     */
-    private Map<StatementPattern,List<Resource>> variableContexts = new HashMap<>();
 
     /**
      * The triple patterns collected from the query.
      */
-    private List<StatementPattern> patterns;
+    private List<SQLFragment> fragments;
 
 
     private TupleExpr query;
@@ -246,11 +236,13 @@ public class SQLBuilder {
     }
 
     private void prepareBuilder()  throws UnsatisfiableQueryException {
-        Preconditions.checkArgument(query instanceof Join || query instanceof Filter || query
instanceof StatementPattern || query instanceof Distinct || query instanceof Slice || query
instanceof Reduced);
+        Preconditions.checkArgument(query instanceof LeftJoin ||query instanceof Join ||
query instanceof Filter || query instanceof StatementPattern || query instanceof Distinct
|| query instanceof Slice || query instanceof Reduced);
 
 
         // collect all patterns in a list, using depth-first search over the join
-        patterns = new PatternCollector(query).patterns;
+        PatternCollector pc = new PatternCollector(query);
+
+        fragments = pc.parts;
 
         // collect offset and limit from the query if given
         offset   = new LimitFinder(query).offset;
@@ -259,110 +251,192 @@ public class SQLBuilder {
         // check if query is distinct
         distinct = new DistinctFinder(query).distinct;
 
-        // associate a name with each pattern; the names are used in the database query to
refer to the triple
-        // that matched this pattern and in the construction of variable names for the SQL
query
-        int patternCount = 0;
-        for(StatementPattern p : patterns) {
-            patternNames.put(p,"P"+ (++patternCount));
-        }
-
         // find all variables occurring in the patterns and create a map to map them to
         // field names in the database query; each variable will have one or several field
names,
         // one for each pattern it occurs in; field names are constructed automatically by
a counter
         // and the pattern name to ensure the name is a valid HQL identifier
         int variableCount = 0;
-        for(StatementPattern p : patterns) {
-            // build pattern
-            Var[] fields = new Var[] {
-                    p.getSubjectVar(),
-                    p.getPredicateVar(),
-                    p.getObjectVar(),
-                    p.getContextVar()
-            };
-            for(int i = 0; i<fields.length; i++) {
-                if(fields[i] != null && !fields[i].hasValue()) {
-                    Var v = fields[i];
-                    if(variableNames.get(v) == null) {
-                        variableNames.put(v,"V"+ (++variableCount));
-                        queryVariables.put(v,new LinkedList<String>());
-                        queryVariableIds.put(v, new LinkedList<String>());
-                    }
-                    String pName = patternNames.get(p);
-                    String vName = variableNames.get(v);
-                    if(hasNodeCondition(fields[i], query)) {
-                        queryVariables.get(v).add(pName + "_" + positions[i] + "_" + vName);
+        for(SQLFragment f : fragments) {
+            for (SQLPattern p : f.getPatterns()) {
+                // build pattern
+                Var[] fields = p.getFields();
+                for (int i = 0; i < fields.length; i++) {
+                    if (fields[i] != null && !fields[i].hasValue()) {
+                        Var v = fields[i];
+                        if (variableNames.get(v) == null) {
+                            variableNames.put(v, "V" + (++variableCount));
+                            queryVariables.put(v, new LinkedList<String>());
+                            queryVariableIds.put(v, new LinkedList<String>());
+                        }
+                        String pName = p.getName();
+                        String vName = variableNames.get(v);
+                        if (hasNodeCondition(fields[i], query)) {
+                            queryVariables.get(v).add(pName + "_" + positions[i] + "_" +
vName);
+                        }
+
+                        // if the variable has been used before, add a join condition to
the first occurrence
+                        if(queryVariableIds.get(v).size() > 0) {
+                            p.getConditions().add(queryVariableIds.get(v).get(0) + " = "
+ pName + "." + positions[i]);
+                        }
+
+                        queryVariableIds.get(v).add(pName + "." + positions[i]);
                     }
-                    queryVariableIds.get(v).add(pName + "." + positions[i]);
                 }
             }
         }
 
         // find context restrictions of patterns and match them with potential restrictions
given in the
         // dataset (MARMOTTA-340)
-        for(StatementPattern p : patterns) {
-            Resource[] contexts;
-            Value contextValue = p.getContextVar() != null ? p.getContextVar().getValue()
: null;
-
-            Set<URI> graphs = null;
-            boolean emptyGraph = false;
-
-            if (dataset != null) {
-                if (p.getScope() == StatementPattern.Scope.DEFAULT_CONTEXTS) {
-                    graphs = dataset.getDefaultGraphs();
-                    emptyGraph = graphs.isEmpty() && !dataset.getNamedGraphs().isEmpty();
-                } else {
-                    graphs = dataset.getNamedGraphs();
-                    emptyGraph = graphs.isEmpty() && !dataset.getDefaultGraphs().isEmpty();
+        for(SQLFragment f : fragments) {
+            for (SQLPattern p : f.getPatterns()) {
+                Resource[] contexts;
+                Value contextValue = p.getSparqlPattern().getContextVar() != null ? p.getSparqlPattern().getContextVar().getValue()
: null;
+
+                Set<URI> graphs = null;
+                boolean emptyGraph = false;
+
+                if (dataset != null) {
+                    if (p.getSparqlPattern().getScope() == StatementPattern.Scope.DEFAULT_CONTEXTS)
{
+                        graphs = dataset.getDefaultGraphs();
+                        emptyGraph = graphs.isEmpty() && !dataset.getNamedGraphs().isEmpty();
+                    } else {
+                        graphs = dataset.getNamedGraphs();
+                        emptyGraph = graphs.isEmpty() && !dataset.getDefaultGraphs().isEmpty();
+                    }
                 }
-            }
 
-            // set the contexts to query according to the following rules:
-            // 1. if the context defined in the dataset does not exist, there will be no
result, so set "empty" to true
-            // 2. if no context graphs have been given, use the context from the statement
-            // 3. if context graphs have been given and the statement has a context, check
if the statement context is
-            //    contained in the context graphs; if no, set "empty" to true as there can
be no result
-            // 4. if context graphs have been given and the statement has no context, use
the contexts from the
-            //    dataset
-
-            if (emptyGraph) {
-                // Search zero contexts
-                throw new UnsatisfiableQueryException("dataset does not contain any default
graphs");
-            } else if (graphs == null || graphs.isEmpty()) {
-                if (contextValue != null) {
-                    contexts = new Resource[]{(Resource) contextValue};
+                // set the contexts to query according to the following rules:
+                // 1. if the context defined in the dataset does not exist, there will be
no result, so set "empty" to true
+                // 2. if no context graphs have been given, use the context from the statement
+                // 3. if context graphs have been given and the statement has a context,
check if the statement context is
+                //    contained in the context graphs; if no, set "empty" to true as there
can be no result
+                // 4. if context graphs have been given and the statement has no context,
use the contexts from the
+                //    dataset
+
+                if (emptyGraph) {
+                    // Search zero contexts
+                    throw new UnsatisfiableQueryException("dataset does not contain any default
graphs");
+                } else if (graphs == null || graphs.isEmpty()) {
+                    if (contextValue != null) {
+                        contexts = new Resource[]{(Resource) contextValue};
+                    } else {
+                        contexts = new Resource[0];
+                    }
+                } else if (contextValue != null) {
+                    if (graphs.contains(contextValue)) {
+                        contexts = new Resource[]{(Resource) contextValue};
+                    } else {
+                        // Statement pattern specifies a context that is not part of
+                        // the dataset
+                        throw new UnsatisfiableQueryException("default graph does not contain
statement context '" + contextValue.stringValue() + "'");
+                    }
                 } else {
-                    contexts = new Resource[0];
+                    contexts = new Resource[graphs.size()];
+                    int i = 0;
+                    for (URI graph : graphs) {
+                        URI context = null;
+                        if (!SESAME.NIL.equals(graph)) {
+                            context = graph;
+                        }
+                        contexts[i++] = context;
+                    }
                 }
-            } else if (contextValue != null) {
-                if (graphs.contains(contextValue)) {
-                    contexts = new Resource[]{(Resource) contextValue};
-                } else {
-                    // Statement pattern specifies a context that is not part of
-                    // the dataset
-                    throw new UnsatisfiableQueryException("default graph does not contain
statement context '" + contextValue.stringValue() + "'");
+
+
+                // build an OR query for the value of the context variable
+                if (contexts.length > 0) {
+                    p.setVariableContexts(Arrays.asList(contexts));
                 }
-            } else {
-                contexts = new Resource[graphs.size()];
-                int i = 0;
-                for (URI graph : graphs) {
-                    URI context = null;
-                    if (!SESAME.NIL.equals(graph)) {
-                        context = graph;
+            }
+        }
+
+        prepareConditions();
+    }
+
+
+    private void prepareConditions() throws UnsatisfiableQueryException {
+        // build the where clause as follows:
+        // 1. iterate over all patterns and for each resource and literal field in subject,
+        //    property, object, or context, and set a query condition according to the
+        //    nodes given in the pattern
+        // 2. for each variable that has more than one occurrences, add a join condition
+        // 3. for each variable in the initialBindings, add a condition to the where clause
+
+
+        // iterate over all fragments and add translate the filter conditions into SQL
+        for(SQLFragment f : fragments) {
+            for(ValueExpr e : f.getFilters()) {
+                f.getConditions().add(evaluateExpression(e, OPTypes.ANY));
+            }
+        }
+
+
+        // 1. iterate over all patterns and for each resource and literal field in subject,
+        //    property, object, or context, and set a query condition according to the
+        //    nodes given in the pattern
+        for(SQLFragment f : fragments) {
+            for (SQLPattern p : f.getPatterns()) {
+                String pName = p.getName();
+                Var[] fields = p.getFields();
+                for (int i = 0; i < fields.length; i++) {
+                    // find node id of the resource or literal field and use it in the where
clause
+                    // in this way we can avoid setting too many query parameters
+                    long nodeId = -1;
+                    if (fields[i] != null && fields[i].hasValue()) {
+                        Value v = converter.convert(fields[i].getValue());
+                        if (v instanceof KiWiNode) {
+                            nodeId = ((KiWiNode) v).getId();
+                        } else {
+                            throw new UnsatisfiableQueryException("the values in this query
have not been created by the KiWi value factory");
+                        }
+
+                        if (nodeId >= 0) {
+                            String condition = pName + "." + positions[i] + " = " + nodeId;
+                            p.getConditions().add(condition);
+                        }
                     }
-                    contexts[i++] = context;
                 }
             }
+        }
 
 
-            // build an OR query for the value of the context variable
-            if(contexts.length > 0) {
-                variableContexts.put(p, Arrays.asList(contexts));
+        // 6. for each context variable with a restricted list of contexts, we add a condition
to the where clause
+        //    of the form (V.id = R1.id OR V.id = R2.id ...)
+        for(SQLFragment f : fragments) {
+            for (SQLPattern p : f.getPatterns()) {
+                // the variable
+                String varName = p.getName();
+
+                if (p.getVariableContexts() != null) {
+                    // the string we are building
+                    StringBuilder cCond = new StringBuilder();
+                    cCond.append("(");
+                    for (Iterator<Resource> it = p.getVariableContexts().iterator();
it.hasNext(); ) {
+                        Value v = converter.convert(it.next());
+                        if (v instanceof KiWiNode) {
+                            long nodeId = ((KiWiNode) v).getId();
+
+                            cCond.append(varName);
+                            cCond.append(".context = ");
+                            cCond.append(nodeId);
+
+                            if (it.hasNext()) {
+                                cCond.append(" OR ");
+                            }
+                        } else {
+                            throw new UnsatisfiableQueryException("the values in this query
have not been created by the KiWi value factory");
+                        }
+
+                    }
+                    cCond.append(")");
+                    p.getConditions().add(cCond.toString());
+                }
             }
         }
 
-
     }
 
+
     private String buildSelectClause() {
         StringBuilder selectClause = new StringBuilder();
 
@@ -396,29 +470,80 @@ public class SQLBuilder {
         //    - object, there will be a "inner join P.object as P_O_V" or "left outer join
p.object as P_O_V"
         //    - context, there will be a "inner join P.context as P_C_V" or "left outer join
p.context as P_C_V"
         StringBuilder fromClause = new StringBuilder();
-        for(Iterator<StatementPattern> it = patterns.iterator(); it.hasNext(); ) {
-            StatementPattern p = it.next();
-            String pName = patternNames.get(p);
-            fromClause.append("triples "+pName);
-
-            Var[] fields = new Var[] {
-                    p.getSubjectVar(),
-                    p.getPredicateVar(),
-                    p.getObjectVar(),
-                    p.getContextVar()
-            };
-            for(int i = 0; i<fields.length; i++) {
-                if(fields[i] != null && !fields[i].hasValue() && hasNodeCondition(fields[i],
query)) {
-                    String vName = variableNames.get(fields[i]);
-                    fromClause.append(" INNER JOIN nodes AS ");
-                    fromClause.append(pName + "_"+positions[i]+"_" + vName);
-                    fromClause.append(" ON " + pName + "." + positions[i] + " = ");
-                    fromClause.append(pName + "_"+positions[i]+"_" + vName + ".id ");
+        for(Iterator<SQLFragment> fit = fragments.iterator(); fit.hasNext(); ) {
+            SQLFragment frag = fit.next();
+
+            for (Iterator<SQLPattern> it = frag.getPatterns().iterator(); it.hasNext();
) {
+                boolean firstFragment = fromClause.length() == 0;
+
+                SQLPattern p = it.next();
+                String pName = p.getName();
+                fromClause.append("triples " + pName);
+
+                StringBuilder onClause = new StringBuilder();
+
+                if(!firstFragment) {
+                    for(Iterator<String> cit = p.getConditions().iterator(); cit.hasNext();
) {
+                        if(onClause.length() > 0) {
+                            onClause.append("\n      AND ");
+                        }
+                        onClause.append(cit.next());
+                    }
+                }
+
+
+
+                Var[] fields = p.getFields();
+                for (int i = 0; i < fields.length; i++) {
+
+                    if (fields[i] != null && !fields[i].hasValue() && hasNodeCondition(fields[i],
query)) {
+                        // finish previous ON clause and start a new one
+                        if(onClause.length() > 0) {
+                            fromClause.append(" ON (");
+                            fromClause.append(onClause);
+                            fromClause.append(")");
+                            onClause = new StringBuilder();
+                        }
+
+                        String vName = variableNames.get(fields[i]);
+                        fromClause.append("\n    INNER JOIN nodes AS ");
+                        fromClause.append(pName + "_" + positions[i] + "_" + vName);
+
+                        if(onClause.length() > 0) {
+                            onClause.append("\n      AND ");
+                        }
+                        onClause.append(pName + "." + positions[i] + " = " + pName + "_"
+ positions[i] + "_" + vName + ".id ");
+
+                        //fromClause.append(" ON " + pName + "." + positions[i] + " = ");
+                        //fromClause.append(pName + "_" + positions[i] + "_" + vName + ".id
");
+                    }
+                }
+
+                if(!it.hasNext()) {
+                    // if this is the last pattern of the fragment, add the filter conditions
+                    for(Iterator<String> cit = frag.getConditions().iterator(); cit.hasNext();
) {
+                        if(onClause.length() > 0) {
+                            onClause.append("\n       AND ");
+                        }
+                        onClause.append(cit.next());
+                    }
+                }
+
+
+                if(onClause.length() > 0) {
+                    fromClause.append(" ON (");
+                    fromClause.append(onClause);
+                    fromClause.append(")");
+                }
+
+
+                if (it.hasNext()) {
+                    fromClause.append("\n JOIN \n  ");
                 }
             }
 
-            if(it.hasNext()) {
-                fromClause.append(",\n ");
+            if(fit.hasNext()) {
+                fromClause.append("\n LEFT JOIN \n  ");
             }
         }
 
@@ -437,47 +562,11 @@ public class SQLBuilder {
         // list of where conditions that will later be connected by AND
         List<String> whereConditions = new LinkedList<String>();
 
-
-        // 1. iterate over all patterns and for each resource and literal field in subject,
-        //    property, object, or context, and set a query condition according to the
-        //    nodes given in the pattern
-        for(StatementPattern p : patterns) {
-            String pName = patternNames.get(p);
-            Var[] fields = new Var[] {
-                    p.getSubjectVar(),
-                    p.getPredicateVar(),
-                    p.getObjectVar(),
-                    p.getContextVar()
-            };
-            for(int i = 0; i<fields.length; i++) {
-                // find node id of the resource or literal field and use it in the where
clause
-                // in this way we can avoid setting too many query parameters
-                long nodeId = -1;
-                if(fields[i] != null && fields[i].hasValue()) {
-                    Value v = converter.convert(fields[i].getValue());
-                    if(v instanceof KiWiNode) {
-                        nodeId = ((KiWiNode) v).getId();
-                    } else {
-                        throw new UnsatisfiableQueryException("the values in this query have
not been created by the KiWi value factory");
-                    }
-
-                    if(nodeId >= 0) {
-                        String condition = pName+"."+positions[i]+" = " + nodeId;
-                        whereConditions.add(condition);
-                    }
-                }
-            }
+        // 1. for the first pattern of the first fragment, we add the conditions to the WHERE
clause
+        if(fragments.size() > 0 && fragments.get(0).getPatterns().size() >
0) {
+            whereConditions.addAll(fragments.get(0).getPatterns().get(0).getConditions());
         }
 
-        // 2. for each variable that has more than one occurrences, add a join condition
-        for(Var v : queryVariableIds.keySet()) {
-            List<String> vNames = queryVariableIds.get(v);
-            for(int i = 1; i < vNames.size(); i++) {
-                String vName1 = vNames.get(i-1);
-                String vName2 = vNames.get(i);
-                whereConditions.add(vName1 + " = " + vName2);
-            }
-        }
 
         // 3. for each variable in the initialBindings, add a condition to the where clause
setting it
         //    to the node given as binding
@@ -499,50 +588,6 @@ public class SQLBuilder {
             }
         }
 
-        // 4. for each pattern, ensure that the matched triple is not marked as deleted
-        for(StatementPattern p : patterns) {
-            String pName = patternNames.get(p);
-            whereConditions.add(pName+".deleted = false");
-        }
-
-
-        // 5. for each filter condition, add a statement to the where clause
-        List<ValueExpr> filters = new FilterCollector(query).filters;
-        for(ValueExpr expr : filters) {
-            whereConditions.add(evaluateExpression(expr, null));
-        }
-
-
-        // 6. for each context variable with a restricted list of contexts, we add a condition
to the where clause
-        //    of the form (V.id = R1.id OR V.id = R2.id ...)
-        for(Map.Entry<StatementPattern,List<Resource>> vctx : variableContexts.entrySet())
{
-            // the variable
-            String varName = patternNames.get(vctx.getKey());
-
-            // the string we are building
-            StringBuilder cCond = new StringBuilder();
-            cCond.append("(");
-            for(Iterator<Resource> it = vctx.getValue().iterator(); it.hasNext(); )
{
-                Value v = converter.convert(it.next());
-                if(v instanceof KiWiNode) {
-                    long nodeId = ((KiWiNode) v).getId();
-
-                    cCond.append(varName);
-                    cCond.append(".context = ");
-                    cCond.append(nodeId);
-
-                    if(it.hasNext()) {
-                        cCond.append(" OR ");
-                    }
-                } else {
-                    throw new UnsatisfiableQueryException("the values in this query have
not been created by the KiWi value factory");
-                }
-
-            }
-            cCond.append(")");
-            whereConditions.add(cCond.toString());
-        }
-
         // construct the where clause
         StringBuilder whereClause = new StringBuilder();
         for(Iterator<String> it = whereConditions.iterator(); it.hasNext(); ) {

http://git-wip-us.apache.org/repos/asf/marmotta/blob/22918591/libraries/kiwi/kiwi-sparql/src/main/java/org/apache/marmotta/kiwi/sparql/builder/SQLFragment.java
----------------------------------------------------------------------
diff --git a/libraries/kiwi/kiwi-sparql/src/main/java/org/apache/marmotta/kiwi/sparql/builder/SQLFragment.java
b/libraries/kiwi/kiwi-sparql/src/main/java/org/apache/marmotta/kiwi/sparql/builder/SQLFragment.java
new file mode 100644
index 0000000..b9f40b4
--- /dev/null
+++ b/libraries/kiwi/kiwi-sparql/src/main/java/org/apache/marmotta/kiwi/sparql/builder/SQLFragment.java
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.marmotta.kiwi.sparql.builder;
+
+import org.openrdf.query.algebra.ValueExpr;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * An SQL fragment is a part of the SQL query where all patterns are joinedwith  INNER JOINS
and not LEFT JOINS. Several
+ * patterns are then joined using a left join.
+ *
+ * @author Sebastian Schaffert (sschaffert@apache.org)
+ */
+public class SQLFragment {
+
+    /**
+     * The patterns contained in this fragment. All patterns are joined using an INNER JOIN.
+     */
+    private List<SQLPattern> patterns;
+
+    private List<String> conditions;
+
+    private List<ValueExpr> filters;
+
+
+    public SQLFragment() {
+        this.patterns   = new ArrayList<>();
+        this.conditions = new ArrayList<>();
+        this.filters    = new ArrayList<>();
+    }
+
+    public List<SQLPattern> getPatterns() {
+        return patterns;
+    }
+
+    public List<String> getConditions() {
+        return conditions;
+    }
+
+    public List<ValueExpr> getFilters() {
+        return filters;
+    }
+}

http://git-wip-us.apache.org/repos/asf/marmotta/blob/22918591/libraries/kiwi/kiwi-sparql/src/main/java/org/apache/marmotta/kiwi/sparql/builder/SQLPattern.java
----------------------------------------------------------------------
diff --git a/libraries/kiwi/kiwi-sparql/src/main/java/org/apache/marmotta/kiwi/sparql/builder/SQLPattern.java
b/libraries/kiwi/kiwi-sparql/src/main/java/org/apache/marmotta/kiwi/sparql/builder/SQLPattern.java
new file mode 100644
index 0000000..61525eb
--- /dev/null
+++ b/libraries/kiwi/kiwi-sparql/src/main/java/org/apache/marmotta/kiwi/sparql/builder/SQLPattern.java
@@ -0,0 +1,98 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.marmotta.kiwi.sparql.builder;
+
+import org.openrdf.model.Resource;
+import org.openrdf.query.algebra.StatementPattern;
+import org.openrdf.query.algebra.Var;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * A statement pattern translated to SQL consists of a named reference to the triple table,
an indicator giving the
+ * join type (JOIN or LEFT JOIN), and any number of filter conditions
+ *
+ * @author Sebastian Schaffert (sschaffert@apache.org)
+ */
+public class SQLPattern {
+
+
+    /**
+     * SQL conditions defined on this pattern; may only refer to previous or the current
statement.
+     */
+    private List<String> conditions;
+
+    /**
+     * Maps triple patterns from SPARQL WHERE to SQL aliases for the TRIPLES table in the
FROM part. Used
+     * to join one instance of the triples table for each triple pattern occurring in the
query.
+     */
+    private String name;
+
+    /**
+     * A reference to the SPARQL statement pattern represented by this SQLPattern
+     */
+    private StatementPattern sparqlPattern;
+
+    /**
+     * Alternative context values for each variable used in the context part of a pattern
+     */
+    private List<Resource> variableContexts;
+
+    public SQLPattern(String name, StatementPattern sparqlPattern) {
+        this.name = name;
+        this.conditions = new ArrayList<>();
+        this.conditions.add(name + ".deleted = false");
+        this.sparqlPattern = sparqlPattern;
+    }
+
+
+    public Var[] getFields() {
+        return new Var[] {
+                getSparqlPattern().getSubjectVar(),
+                getSparqlPattern().getPredicateVar(),
+                getSparqlPattern().getObjectVar(),
+                getSparqlPattern().getContextVar()
+        };
+    }
+
+
+    public List<String> getConditions() {
+        return conditions;
+    }
+
+    public String getName() {
+        return name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+    public StatementPattern getSparqlPattern() {
+        return sparqlPattern;
+    }
+
+    public List<Resource> getVariableContexts() {
+        return variableContexts;
+    }
+
+    public void setVariableContexts(List<Resource> variableContexts) {
+        this.variableContexts = variableContexts;
+    }
+}

http://git-wip-us.apache.org/repos/asf/marmotta/blob/22918591/libraries/kiwi/kiwi-sparql/src/main/java/org/apache/marmotta/kiwi/sparql/builder/SQLVariable.java
----------------------------------------------------------------------
diff --git a/libraries/kiwi/kiwi-sparql/src/main/java/org/apache/marmotta/kiwi/sparql/builder/SQLVariable.java
b/libraries/kiwi/kiwi-sparql/src/main/java/org/apache/marmotta/kiwi/sparql/builder/SQLVariable.java
new file mode 100644
index 0000000..436558b
--- /dev/null
+++ b/libraries/kiwi/kiwi-sparql/src/main/java/org/apache/marmotta/kiwi/sparql/builder/SQLVariable.java
@@ -0,0 +1,78 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.marmotta.kiwi.sparql.builder;
+
+import org.openrdf.query.algebra.Var;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Representation of a SPARQL variable in SQL.
+ *
+ * @author Sebastian Schaffert (sschaffert@apache.org)
+ */
+public class SQLVariable {
+
+    /**
+     * A map for mapping the SPARQL variable names to internal names used for constructing
SQL aliases.
+     * Will look like { ?x -> "V1", ?y -> "V2", ... }
+     */
+    private String name;
+
+    private Var sparqlVariable;
+
+    /**
+     * A map for mapping SPARQL variables to field names; each variable might have one or
more field names,
+     * depending on the number of patterns it occurs in; will look like
+     * { ?x -> ["P1_V1", "P2_V1"], ?y -> ["P2_V2"], ... }
+     */
+    private List<String> aliases;
+
+
+    /**
+     * A map for mapping SPARQL variables to database node ID selectors. A node ID can occur
either as
+     * primary key in the NODES table or in the subject, predicate, object and context fields
of a pattern.
+     */
+    private List<String> nodeIds;
+
+
+    public SQLVariable(String name, Var sparqlVariable) {
+        this.name = name;
+        this.sparqlVariable = sparqlVariable;
+
+        this.aliases = new ArrayList<>();
+        this.nodeIds = new ArrayList<>();
+    }
+
+    public String getName() {
+        return name;
+    }
+
+    public Var getSparqlVariable() {
+        return sparqlVariable;
+    }
+
+    public List<String> getAliases() {
+        return aliases;
+    }
+
+    public List<String> getNodeIds() {
+        return nodeIds;
+    }
+}

http://git-wip-us.apache.org/repos/asf/marmotta/blob/22918591/libraries/kiwi/kiwi-sparql/src/main/java/org/apache/marmotta/kiwi/sparql/evaluation/KiWiEvaluationStrategyImpl.java
----------------------------------------------------------------------
diff --git a/libraries/kiwi/kiwi-sparql/src/main/java/org/apache/marmotta/kiwi/sparql/evaluation/KiWiEvaluationStrategyImpl.java
b/libraries/kiwi/kiwi-sparql/src/main/java/org/apache/marmotta/kiwi/sparql/evaluation/KiWiEvaluationStrategyImpl.java
index c7bbcaf..7d560c0 100644
--- a/libraries/kiwi/kiwi-sparql/src/main/java/org/apache/marmotta/kiwi/sparql/evaluation/KiWiEvaluationStrategyImpl.java
+++ b/libraries/kiwi/kiwi-sparql/src/main/java/org/apache/marmotta/kiwi/sparql/evaluation/KiWiEvaluationStrategyImpl.java
@@ -68,6 +68,35 @@ public class KiWiEvaluationStrategyImpl extends EvaluationStrategyImpl{
     }
 
     @Override
+    public CloseableIteration<BindingSet, QueryEvaluationException> evaluate(LeftJoin
join, BindingSet bindings) throws QueryEvaluationException {
+        if(Thread.currentThread().isInterrupted()) {
+            throw new QueryEvaluationException("SPARQL evaluation has already been cancelled");
+        }
+
+        if(isSupported(join)) {
+            log.debug("applying KiWi LEFTJOIN optimizations on SPARQL query ...");
+
+            try {
+                return new ExceptionConvertingIteration<BindingSet, QueryEvaluationException>(connection.evaluateJoin(join,
bindings, dataset)) {
+                    @Override
+                    protected QueryEvaluationException convert(Exception e) {
+                        return new QueryEvaluationException(e);
+                    }
+                };
+            } catch (SQLException e) {
+                throw new QueryEvaluationException(e.getMessage(),e);
+            } catch (IllegalArgumentException e) {
+                throw new QueryEvaluationException(e.getMessage(),e);
+            } catch (InterruptedException e) {
+                throw new QueryInterruptedException(e.getMessage());
+            }
+        } else {
+            return super.evaluate(join, bindings);
+        }
+    }
+
+
+    @Override
     public CloseableIteration<BindingSet, QueryEvaluationException> evaluate(Join join,
BindingSet bindings) throws QueryEvaluationException {
         if(Thread.currentThread().isInterrupted()) {
             throw new QueryEvaluationException("SPARQL evaluation has already been cancelled");
@@ -200,6 +229,8 @@ public class KiWiEvaluationStrategyImpl extends EvaluationStrategyImpl{
     private boolean isSupported(TupleExpr expr) {
         if(expr instanceof Join) {
             return isSupported(((Join) expr).getLeftArg()) && isSupported(((Join)
expr).getRightArg());
+        } else if(expr instanceof LeftJoin) {
+                return isSupported(((LeftJoin) expr).getLeftArg()) && isSupported(((LeftJoin)
expr).getRightArg()) && isSupported(((LeftJoin)expr).getCondition());
         } else if(expr instanceof Filter) {
             return isSupported(((Filter) expr).getArg()) && isSupported(((Filter)
expr).getCondition());
         } else if(expr instanceof StatementPattern) {
@@ -223,7 +254,9 @@ public class KiWiEvaluationStrategyImpl extends EvaluationStrategyImpl{
      * @return
      */
     private boolean isSupported(ValueExpr expr) {
-        if(expr instanceof Compare) {
+        if(expr == null) {
+            return true;
+        } else if(expr instanceof Compare) {
             return isSupported(((Compare) expr).getLeftArg()) && isSupported(((Compare)
expr).getRightArg());
         } else if(expr instanceof MathExpr) {
             return isSupported(((MathExpr) expr).getLeftArg()) && isSupported(((MathExpr)
expr).getRightArg());

http://git-wip-us.apache.org/repos/asf/marmotta/blob/22918591/libraries/kiwi/kiwi-sparql/src/main/java/org/apache/marmotta/kiwi/sparql/persistence/KiWiSparqlConnection.java
----------------------------------------------------------------------
diff --git a/libraries/kiwi/kiwi-sparql/src/main/java/org/apache/marmotta/kiwi/sparql/persistence/KiWiSparqlConnection.java
b/libraries/kiwi/kiwi-sparql/src/main/java/org/apache/marmotta/kiwi/sparql/persistence/KiWiSparqlConnection.java
index ea1f476..ac0cc6e 100644
--- a/libraries/kiwi/kiwi-sparql/src/main/java/org/apache/marmotta/kiwi/sparql/persistence/KiWiSparqlConnection.java
+++ b/libraries/kiwi/kiwi-sparql/src/main/java/org/apache/marmotta/kiwi/sparql/persistence/KiWiSparqlConnection.java
@@ -17,30 +17,23 @@
 
 package org.apache.marmotta.kiwi.sparql.persistence;
 
-import com.google.common.base.Preconditions;
 import info.aduna.iteration.CloseableIteration;
 import info.aduna.iteration.CloseableIteratorIteration;
 import info.aduna.iteration.EmptyIteration;
 import info.aduna.iteration.Iterations;
-import org.apache.commons.lang3.StringUtils;
-import org.apache.marmotta.commons.util.DateUtils;
 import org.apache.marmotta.kiwi.model.rdf.KiWiNode;
 import org.apache.marmotta.kiwi.persistence.KiWiConnection;
 import org.apache.marmotta.kiwi.persistence.KiWiDialect;
 import org.apache.marmotta.kiwi.persistence.util.ResultSetIteration;
 import org.apache.marmotta.kiwi.persistence.util.ResultTransformerFunction;
 import org.apache.marmotta.kiwi.sail.KiWiValueFactory;
-import org.apache.marmotta.kiwi.sparql.builder.*;
+import org.apache.marmotta.kiwi.sparql.builder.SQLBuilder;
 import org.apache.marmotta.kiwi.sparql.exception.UnsatisfiableQueryException;
-import org.openrdf.model.*;
-import org.openrdf.model.impl.URIImpl;
-import org.openrdf.model.vocabulary.FN;
-import org.openrdf.model.vocabulary.SESAME;
-import org.openrdf.model.vocabulary.XMLSchema;
 import org.openrdf.query.Binding;
 import org.openrdf.query.BindingSet;
 import org.openrdf.query.Dataset;
-import org.openrdf.query.algebra.*;
+import org.openrdf.query.algebra.TupleExpr;
+import org.openrdf.query.algebra.Var;
 import org.openrdf.query.impl.MapBindingSet;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -48,11 +41,9 @@ import org.slf4j.LoggerFactory;
 import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.SQLException;
-import java.text.DateFormat;
-import java.text.SimpleDateFormat;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.List;
 import java.util.concurrent.*;
-import java.util.regex.Pattern;
 
 /**
  * Provide improved SPARQL support by evaluating certain common compley SPARQL constructs
directly on the
@@ -132,8 +123,10 @@ public class KiWiSparqlConnection {
                         KiWiNode[] nodes = parent.loadNodesByIds(nodeIds);
 
                         for (int i = 0; i < vars.size(); i++) {
-                            Var v = vars.get(i);
-                            resultRow.addBinding(v.getName(), nodes[i]);
+                            if(nodes[i] != null) {
+                                Var v = vars.get(i);
+                                resultRow.addBinding(v.getName(), nodes[i]);
+                            }
                         }
 
 
@@ -170,4 +163,7 @@ public class KiWiSparqlConnection {
         }
     }
 
+    public KiWiDialect getDialect() {
+        return parent.getDialect();
+    }
 }

http://git-wip-us.apache.org/repos/asf/marmotta/blob/22918591/libraries/kiwi/kiwi-triplestore/src/test/java/org/apache/marmotta/kiwi/test/junit/KiWiDatabaseRunner.java
----------------------------------------------------------------------
diff --git a/libraries/kiwi/kiwi-triplestore/src/test/java/org/apache/marmotta/kiwi/test/junit/KiWiDatabaseRunner.java
b/libraries/kiwi/kiwi-triplestore/src/test/java/org/apache/marmotta/kiwi/test/junit/KiWiDatabaseRunner.java
index 0fd3998..a483a7f 100644
--- a/libraries/kiwi/kiwi-triplestore/src/test/java/org/apache/marmotta/kiwi/test/junit/KiWiDatabaseRunner.java
+++ b/libraries/kiwi/kiwi-triplestore/src/test/java/org/apache/marmotta/kiwi/test/junit/KiWiDatabaseRunner.java
@@ -101,16 +101,22 @@ public class KiWiDatabaseRunner extends Suite {
     
     public KiWiDatabaseRunner(Class<?> klass) throws Throwable {
         super(klass, Collections.<Runner>emptyList());
-        
-        ForDialects d = klass.getAnnotation(ForDialects.class);
-        if (d != null) {
+
+        if(System.getProperty("dialect") != null) {
             ArrayList<Class<? extends KiWiDialect>> forDialects = new ArrayList<>();
-            for (Class<? extends KiWiDialect> dialect : d.value()) {
-                forDialects.add(dialect);
-            }
+            forDialects.add((Class<? extends KiWiDialect>) Class.forName(System.getProperty("dialect")));
             this.forDialects = Collections.unmodifiableList(forDialects);
         } else {
-            forDialects = null;
+            ForDialects d = klass.getAnnotation(ForDialects.class);
+            if (d != null) {
+                ArrayList<Class<? extends KiWiDialect>> forDialects = new ArrayList<>();
+                for (Class<? extends KiWiDialect> dialect : d.value()) {
+                    forDialects.add(dialect);
+                }
+                this.forDialects = Collections.unmodifiableList(forDialects);
+            } else {
+                forDialects = null;
+            }
         }
         
         createRunners();


Mime
View raw message