marmotta-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ans...@apache.org
Subject [1/2] git commit: MARMOTTA-265 : Avoid recoding a SPARQL Results JSON parser in SPARQLClient
Date Tue, 25 Jun 2013 02:49:33 GMT
Updated Branches:
  refs/heads/develop e2f157ddf -> e931d8bc1


MARMOTTA-265 : Avoid recoding a SPARQL Results JSON parser in SPARQLClient


Project: http://git-wip-us.apache.org/repos/asf/incubator-marmotta/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-marmotta/commit/bb4c15ef
Tree: http://git-wip-us.apache.org/repos/asf/incubator-marmotta/tree/bb4c15ef
Diff: http://git-wip-us.apache.org/repos/asf/incubator-marmotta/diff/bb4c15ef

Branch: refs/heads/develop
Commit: bb4c15efddaf0848ce6bb0163dd4a2c05f729481
Parents: e2f157d
Author: Peter Ansell <p_ansell@yahoo.com>
Authored: Tue Jun 25 12:27:55 2013 +1000
Committer: Peter Ansell <p_ansell@yahoo.com>
Committed: Tue Jun 25 12:27:55 2013 +1000

----------------------------------------------------------------------
 client/marmotta-client-java/pom.xml             |  13 +++
 .../marmotta/client/clients/SPARQLClient.java   | 117 +++++++++++++------
 2 files changed, 94 insertions(+), 36 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-marmotta/blob/bb4c15ef/client/marmotta-client-java/pom.xml
----------------------------------------------------------------------
diff --git a/client/marmotta-client-java/pom.xml b/client/marmotta-client-java/pom.xml
index ba38e9a..b79125f 100644
--- a/client/marmotta-client-java/pom.xml
+++ b/client/marmotta-client-java/pom.xml
@@ -102,9 +102,22 @@
         </dependency>
         <dependency>
         	<groupId>org.openrdf.sesame</groupId>
+        	<artifactId>sesame-rio-api</artifactId>
+        </dependency>
+        <dependency>
+        	<groupId>org.openrdf.sesame</groupId>
         	<artifactId>sesame-rio-rdfjson</artifactId>
         </dependency>
         <dependency>
+        	<groupId>org.openrdf.sesame</groupId>
+        	<artifactId>sesame-queryresultio-api</artifactId>
+        </dependency>
+        <dependency>
+        	<groupId>org.openrdf.sesame</groupId>
+        	<artifactId>sesame-queryresultio-sparqljson</artifactId>
+        	<scope>runtime</scope>
+        </dependency>
+        <dependency>
             <groupId>org.slf4j</groupId>
             <artifactId>slf4j-api</artifactId>
         </dependency>

http://git-wip-us.apache.org/repos/asf/incubator-marmotta/blob/bb4c15ef/client/marmotta-client-java/src/main/java/org/apache/marmotta/client/clients/SPARQLClient.java
----------------------------------------------------------------------
diff --git a/client/marmotta-client-java/src/main/java/org/apache/marmotta/client/clients/SPARQLClient.java
b/client/marmotta-client-java/src/main/java/org/apache/marmotta/client/clients/SPARQLClient.java
index fe4c9f1..235f122 100644
--- a/client/marmotta-client-java/src/main/java/org/apache/marmotta/client/clients/SPARQLClient.java
+++ b/client/marmotta-client-java/src/main/java/org/apache/marmotta/client/clients/SPARQLClient.java
@@ -31,13 +31,32 @@ import org.apache.marmotta.client.util.HTTPUtil;
 import org.codehaus.jackson.JsonParser;
 import org.codehaus.jackson.map.ObjectMapper;
 import org.codehaus.jackson.type.TypeReference;
+import org.openrdf.model.Value;
+import org.openrdf.model.ValueFactory;
+import org.openrdf.model.impl.ValueFactoryImpl;
+import org.openrdf.query.Binding;
+import org.openrdf.query.BindingSet;
+import org.openrdf.query.QueryResultHandler;
+import org.openrdf.query.QueryResultHandlerException;
+import org.openrdf.query.TupleQueryResult;
+import org.openrdf.query.TupleQueryResultHandler;
+import org.openrdf.query.TupleQueryResultHandlerException;
+import org.openrdf.query.resultio.QueryResultIO;
+import org.openrdf.query.resultio.QueryResultParseException;
+import org.openrdf.query.resultio.QueryResultParser;
+import org.openrdf.query.resultio.TupleQueryResultFormat;
+import org.openrdf.query.resultio.TupleQueryResultParser;
+import org.openrdf.query.resultio.UnsupportedQueryResultFormatException;
+import org.openrdf.query.resultio.helpers.QueryResultCollector;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
+import java.io.InputStream;
 import java.net.URLEncoder;
 import java.util.HashMap;
 import java.util.HashSet;
+import java.util.LinkedHashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -84,57 +103,50 @@ public class SPARQLClient {
             switch(response.getStatusLine().getStatusCode()) {
                 case 200:
                     log.debug("SPARQL Query {} evaluated successfully",query);
-                    ObjectMapper mapper = new ObjectMapper();
-                    mapper.configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, true);
-                    Map<String,Map<String,List<?>>> resultMap =
-                            mapper.readValue(response.getEntity().getContent(),new TypeReference<Map<String,Map<String,List<?>>>>(){});
+                    QueryResultCollector results = new QueryResultCollector();
+                    
+                    parse(response.getEntity().getContent(), TupleQueryResultFormat.JSON,
results, ValueFactoryImpl.getInstance());
 
-                    if(resultMap.isEmpty()) {
+                    if(!results.getHandledTuple() || results.getBindingSets().isEmpty())
{
                         return null;
                     } else {
-                        List<?> head = resultMap.get("head").get("vars");
-                        Set<String> fieldNames = new HashSet<String>();
-                        for(Object o : head) {
-                            if(o instanceof String) {
-                                fieldNames.add((String)o);
-                            }
-                        }
+                        List<String> fieldNames = results.getBindingNames();
 
-                        SPARQLResult result = new SPARQLResult(fieldNames);
+                        SPARQLResult result = new SPARQLResult(new LinkedHashSet<String>(fieldNames));
 
-                        List<?> bindings = resultMap.get("results").get("bindings");
-                        for(Object o : bindings) {
-                            if(o instanceof Map) {
-                                Map<String,RDFNode> row = new HashMap<String, RDFNode>();
-                                for(Map.Entry<String,?> entry : ((Map<String,?>)o).entrySet())
{
-                                    Map<String,String> nodeDef = (Map<String,String>)
entry.getValue();
+                        //List<?> bindings = resultMap.get("results").get("bindings");
+                        for(BindingSet nextRow : results.getBindingSets()) {
+                            Map<String,RDFNode> row = new HashMap<String, RDFNode>();
+                            
+                            for(String nextBindingName : fieldNames) {
+                                if(nextRow.hasBinding(nextBindingName)) {
+                                    Binding nextBinding = nextRow.getBinding(nextBindingName);
+                                    //Map<String,String> nodeDef = (Map<String,String>)
entry.getValue();
+                                    Value nodeDef = nextBinding.getValue();
                                     RDFNode node = null;
-                                    if("uri".equalsIgnoreCase(nodeDef.get("type"))) {
-                                        node = new URI(nodeDef.get("value"));
-                                    } else if("literal".equalsIgnoreCase(nodeDef.get("type"))
||
-                                              "typed-literal".equalsIgnoreCase(nodeDef.get("type")))
{
-                                        String lang = nodeDef.get("xml:lang");
-                                        String datatype = nodeDef.get("datatype");
-
-                                        if(lang != null) {
-                                            node = new Literal(nodeDef.get("value"),lang);
-                                        } else if(datatype != null) {
-                                            node = new Literal(nodeDef.get("value"),new URI(datatype));
+                                    if(nodeDef instanceof org.openrdf.model.URI) {
+                                        node = new URI(nodeDef.stringValue());
+                                    } else if(nodeDef instanceof org.openrdf.model.BNode)
{
+                                        node = new BNode(((org.openrdf.model.BNode)nodeDef).getID());
+                                    } else if(nodeDef instanceof org.openrdf.model.Literal)
{
+                                        org.openrdf.model.Literal nodeLiteral = (org.openrdf.model.Literal)nodeDef;
+                                        if(nodeLiteral.getLanguage() != null) {
+                                            node = new Literal(nodeLiteral.getLabel(), nodeLiteral.getLanguage());
+                                        } else if(nodeLiteral.getDatatype() != null) {
+                                            node = new Literal(nodeLiteral.getLabel(), new
URI(nodeLiteral.getDatatype().stringValue()));
                                         } else {
-                                            node = new Literal(nodeDef.get("value"));
+                                            node = new Literal(nodeLiteral.getLabel());
                                         }
-                                    } else if("bnode".equalsIgnoreCase(nodeDef.get("type")))
{
-                                        node = new BNode(nodeDef.get("value"));
                                     } else {
-                                        log.error("unknown result node type: {}",nodeDef.get("type"));
+                                        log.error("unknown result node type: {}",nodeDef);
                                     }
                                     
                                     if(node != null) {
-                                        row.put(entry.getKey(),node);
+                                        row.put(nextBindingName, node);
                                     }
                                 }
-                                result.add(row);
                             }
+                            result.add(row);
                         }
                         return result;
                     }
@@ -143,12 +155,45 @@ public class SPARQLClient {
                     throw new MarmottaClientException("error evaluating SPARQL Select Query
"+query+": "+response.getStatusLine().getStatusCode() + " " + response.getStatusLine().getReasonPhrase());
             }
 
+        } catch(TupleQueryResultHandlerException e) {
+            throw new MarmottaClientException("error evaluating SPARQL Select Query ", e);
+        } catch(QueryResultParseException e) {
+            throw new MarmottaClientException("error evaluating SPARQL Select Query ", e);
+        } catch(UnsupportedQueryResultFormatException e) {
+            throw new MarmottaClientException("error evaluating SPARQL Select Query ", e);
+        } catch(IllegalStateException e) {
+            throw new MarmottaClientException("error evaluating SPARQL Select Query ", e);
+        } catch(QueryResultHandlerException e) {
+            throw new MarmottaClientException("error evaluating SPARQL Select Query ", e);
         } finally {
             get.releaseConnection();
         }
     }
 
     /**
+     * FIXME: Replace this with QueryResultIO.parse after Sesame-2.7.3.
+     * 
+     * @param in
+     * @param format
+     * @param handler
+     * @param valueFactory
+     * @throws IOException
+     * @throws QueryResultParseException
+     * @throws TupleQueryResultHandlerException
+     * @throws UnsupportedQueryResultFormatException
+     */
+    private static void parse(InputStream in, TupleQueryResultFormat format, QueryResultHandler
handler,
+            ValueFactory valueFactory)
+        throws IOException, QueryResultParseException, QueryResultHandlerException,
+        UnsupportedQueryResultFormatException
+    {
+        QueryResultParser parser = QueryResultIO.createParser(format);
+        parser.setValueFactory(valueFactory);
+        parser.setQueryResultHandler(handler);
+        parser.parseQueryResult(in);
+    }
+    
+    /**
      * Carry out a SPARQL ASK Query and return either true or false, depending on the query
result.
      *
      * @param askQuery


Mime
View raw message