hawq-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From shiv...@apache.org
Subject incubator-hawq git commit: HAWQ-459. Enhance Metadata API to support multiple table
Date Thu, 24 Mar 2016 22:33:31 GMT
Repository: incubator-hawq
Updated Branches:
  refs/heads/master 1d1c85460 -> 857797733


HAWQ-459. Enhance Metadata API to support multiple table


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/85779773
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/85779773
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/85779773

Branch: refs/heads/master
Commit: 8577977334311c1222d2d77b90a2fd67473fea68
Parents: 1d1c854
Author: Shivram Mani <shivram.mani@gmail.com>
Authored: Thu Mar 24 15:33:14 2016 -0700
Committer: Shivram Mani <shivram.mani@gmail.com>
Committed: Thu Mar 24 15:33:14 2016 -0700

----------------------------------------------------------------------
 .../java/org/apache/hawq/pxf/api/Metadata.java  |  63 +++----
 .../apache/hawq/pxf/api/MetadataFetcher.java    |  24 ++-
 .../hawq/pxf/api/utilities/InputData.java       |  12 ++
 .../pxf/plugins/hive/HiveDataFragmenter.java    |  27 ++-
 .../pxf/plugins/hive/HiveMetadataFetcher.java   |  32 +++-
 .../plugins/hive/utilities/HiveUtilities.java   |  78 +++++++--
 .../plugins/hive/HiveMetadataFetcherTest.java   |  24 +--
 .../hive/utilities/HiveUtilitiesTest.java       |  16 +-
 .../pxf/service/MetadataFetcherFactory.java     |   6 +-
 .../pxf/service/MetadataResponseFormatter.java  |  85 ++++++----
 .../hawq/pxf/service/rest/MetadataResource.java |  52 +++---
 .../pxf/service/utilities/ProtocolData.java     |  19 +++
 .../src/main/resources/pxf-profiles-default.xml |   4 +-
 .../service/MetadataResponseFormatterTest.java  | 166 ++++++++++++++-----
 14 files changed, 408 insertions(+), 200 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/85779773/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/Metadata.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/Metadata.java b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/Metadata.java
index c00e08a..4fc510d 100644
--- a/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/Metadata.java
+++ b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/Metadata.java
@@ -26,51 +26,52 @@ import java.util.List;
 import org.apache.commons.lang.StringUtils;
 
 /**
- * Metadata holds a table's metadata information.
- * {@link MetadataFetcher#getTableMetadata} returns the table's metadata.
+ * Metadata holds an item's metadata information.
+ * {@link MetadataFetcher#getMetadata} returns the item's metadata.
  */
 public class Metadata {
 
     /**
-     * Class representing table name - db (schema) name and table name.
+     * Class representing item name - db/schema/path name and table/file name.
      */
-    public static class Table {
-        private String dbName;
-        private String tableName;
+    public static class Item {
+        private String path;
+        private String name;
 
-        public Table(String dbName, String tableName) {
+        public Item(String path, String itemName) {
 
-            if (StringUtils.isBlank(dbName) || StringUtils.isBlank(tableName)) {
-                throw new IllegalArgumentException("Table name cannot be empty");
+            if (StringUtils.isBlank(path) || StringUtils.isBlank(itemName)) {
+                throw new IllegalArgumentException("Item or path name cannot be empty");
             }
 
-            this.dbName = dbName;
-            this.tableName = tableName;
+            this.path = path;
+            this.name = itemName;
         }
 
-        public String getDbName() {
-            return dbName;
+        public String getPath() {
+            return path;
         }
 
-        public String getTableName() {
-            return tableName;
+        public String getName() {
+            return name;
         }
 
         /**
-         * Returns full table name in the form db_name.table_name
+         * Returns full item name in the form path.name
+         * eg: dbname.tblname
          */
         @Override
         public String toString() {
-            return dbName + "." + tableName;
+            return path + "." + name;
         }
     }
 
     /**
-     * Class representing table field - name and type.
+     * Class representing item field - name and type.
      */
     public static class Field {
         private String name;
-        private String type; // TODO: nhorn - 06-03-15 - change to enum
+        private String type; // TODO: change to enum
         private String[] modifiers; // type modifiers, optional field
 
         public Field(String name, String type) {
@@ -102,33 +103,33 @@ public class Metadata {
     }
 
     /**
-     * Table name
+     * Item name
      */
-    private Metadata.Table table;
+    private Item item;
 
     /**
-     * Table's fields
+     * Item's fields
      */
     private List<Metadata.Field> fields;
 
     /**
-     * Constructs a table's Metadata.
+     * Constructs an item's Metadata.
      *
-     * @param tableName the table name
-     * @param fields the table's fields
+     * @param itemName the item name
+     * @param fields the item's fields
      */
-    public Metadata(Metadata.Table tableName,
+    public Metadata(Item itemName,
             List<Metadata.Field> fields) {
-        this.table = tableName;
+        this.item = itemName;
         this.fields = fields;
     }
 
-    public Metadata(Metadata.Table tableName) {
-        this(tableName, new ArrayList<Metadata.Field>());
+    public Metadata(Item itemName) {
+        this(itemName, new ArrayList<Metadata.Field>());
     }
 
-    public Metadata.Table getTable() {
-        return table;
+    public Item getItem() {
+        return item;
     }
 
     public List<Metadata.Field> getFields() {

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/85779773/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/MetadataFetcher.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/MetadataFetcher.java b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/MetadataFetcher.java
index 5f449e6..21aea72 100644
--- a/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/MetadataFetcher.java
+++ b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/MetadataFetcher.java
@@ -19,27 +19,33 @@ package org.apache.hawq.pxf.api;
  * under the License.
  */
 
+import java.util.List;
+
+import org.apache.hawq.pxf.api.utilities.InputData;
+import org.apache.hawq.pxf.api.utilities.Plugin;
+
 
 /**
- * Abstract class that defines getting metadata of a table.
+ * Abstract class that defines getting metadata.
  */
-public abstract class MetadataFetcher {
-    protected Metadata metadata;
+public abstract class MetadataFetcher extends Plugin {
+    protected List<Metadata> metadata;
 
     /**
      * Constructs a MetadataFetcher.
      *
+     * @param metaData the input data
      */
-    public MetadataFetcher() {
-
+    public MetadataFetcher(InputData metaData) {
+        super(metaData);
     }
 
     /**
-     * Gets a metadata of a given table
+     * Gets a metadata of a given item
      *
-     * @param tableName table name
-     * @return metadata of given table
+     * @param pattern table/file name or pattern
+     * @return metadata of all items corresponding to given pattern
      * @throws Exception if metadata information could not be retrieved
      */
-    public abstract Metadata getTableMetadata(String tableName) throws Exception;
+    public abstract List<Metadata> getMetadata(String pattern) throws Exception;
 }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/85779773/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/utilities/InputData.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/utilities/InputData.java b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/utilities/InputData.java
index cbec312..891dba8 100644
--- a/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/utilities/InputData.java
+++ b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/utilities/InputData.java
@@ -46,6 +46,7 @@ public class InputData {
     protected String accessor;
     protected String resolver;
     protected String fragmenter;
+    protected String metadata;
     protected String remoteLogin;
     protected String remoteSecret;
     protected int dataFragment; /* should be deprecated */
@@ -96,6 +97,7 @@ public class InputData {
         this.accessor = copy.accessor;
         this.resolver = copy.resolver;
         this.fragmenter = copy.fragmenter;
+        this.metadata = copy.metadata;
         this.remoteLogin = copy.remoteLogin;
         this.remoteSecret = copy.remoteSecret;
         this.threadSafe = copy.threadSafe;
@@ -272,6 +274,16 @@ public class InputData {
     }
 
     /**
+     * Returns the ClassName for the java class that was defined as Metadata
+     * or null if no metadata was defined.
+     *
+     * @return class name for METADATA or null
+     */
+    public String getMetadata() {
+        return metadata;
+    }
+
+    /**
      * Returns the contents of pxf_remote_service_login set in Hawq. Should the
      * user set it to an empty string this function will return null.
      *

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/85779773/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveDataFragmenter.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveDataFragmenter.java b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveDataFragmenter.java
index af1a666..ded9627 100644
--- a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveDataFragmenter.java
+++ b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveDataFragmenter.java
@@ -141,7 +141,7 @@ public class HiveDataFragmenter extends Fragmenter {
 
     @Override
     public List<Fragment> getFragments() throws Exception {
-        Metadata.Table tblDesc = HiveUtilities.parseTableQualifiedName(inputData.getDataSource());
+        Metadata.Item tblDesc = HiveUtilities.extractTableFromName(inputData.getDataSource());
 
         fetchTableMetaData(tblDesc);
 
@@ -175,7 +175,7 @@ public class HiveDataFragmenter extends Fragmenter {
      * Goes over the table partitions metadata and extracts the splits and the
      * InputFormat and Serde per split.
      */
-    private void fetchTableMetaData(Metadata.Table tblDesc) throws Exception {
+    private void fetchTableMetaData(Metadata.Item tblDesc) throws Exception {
 
         Table tbl = HiveUtilities.getHiveTable(client, tblDesc);
 
@@ -210,15 +210,15 @@ public class HiveDataFragmenter extends Fragmenter {
             // API call to Hive Metastore, will return a List of all the
             // partitions for this table, that matches the partition filters
             // Defined in filterStringForHive.
-            partitions = client.listPartitionsByFilter(tblDesc.getDbName(),
-                    tblDesc.getTableName(), filterStringForHive, ALL_PARTS);
+            partitions = client.listPartitionsByFilter(tblDesc.getPath(),
+                    tblDesc.getName(), filterStringForHive, ALL_PARTS);
 
             // No matched partitions for the filter, no fragments to return.
             if (partitions == null || partitions.isEmpty()) {
 
                 if (LOG.isDebugEnabled()) {
-                    LOG.debug("Table -  " + tblDesc.getDbName() + "."
-                            + tblDesc.getTableName()
+                    LOG.debug("Table -  " + tblDesc.getPath() + "."
+                            + tblDesc.getName()
                             + " Has no matched partitions for the filter : "
                             + filterStringForHive);
                 }
@@ -226,16 +226,16 @@ public class HiveDataFragmenter extends Fragmenter {
             }
 
             if (LOG.isDebugEnabled()) {
-                LOG.debug("Table -  " + tblDesc.getDbName() + "."
-                        + tblDesc.getTableName()
+                LOG.debug("Table -  " + tblDesc.getPath() + "."
+                        + tblDesc.getName()
                         + " Matched partitions list size: " + partitions.size());
             }
 
         } else {
             // API call to Hive Metastore, will return a List of all the
             // partitions for this table (no filtering)
-            partitions = client.listPartitions(tblDesc.getDbName(),
-                    tblDesc.getTableName(), ALL_PARTS);
+            partitions = client.listPartitions(tblDesc.getPath(),
+                    tblDesc.getName(), ALL_PARTS);
         }
 
         StorageDescriptor descTable = tbl.getSd();
@@ -250,12 +250,11 @@ public class HiveDataFragmenter extends Fragmenter {
             for (Partition partition : partitions) {
                 StorageDescriptor descPartition = partition.getSd();
                 props = MetaStoreUtils.getSchema(descPartition, descTable,
-                        null, // Map<string, string> parameters - can be empty
-                        tblDesc.getDbName(), tblDesc.getTableName(), // table
-                                                                     // name
+                        null,
+                        tblDesc.getPath(), tblDesc.getName(),
                         partitionKeys);
                 fetchMetaDataForPartitionedTable(descPartition, props,
-                        partition, partitionKeys, tblDesc.getTableName());
+                        partition, partitionKeys, tblDesc.getName());
             }
         }
     }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/85779773/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveMetadataFetcher.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveMetadataFetcher.java b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveMetadataFetcher.java
index f1aab21..d228ec5 100644
--- a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveMetadataFetcher.java
+++ b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveMetadataFetcher.java
@@ -20,6 +20,7 @@ package org.apache.hawq.pxf.plugins.hive;
  */
 
 
+import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.commons.logging.Log;
@@ -31,6 +32,7 @@ import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.hawq.pxf.api.Metadata;
 import org.apache.hawq.pxf.api.MetadataFetcher;
 import org.apache.hawq.pxf.api.UnsupportedTypeException;
+import org.apache.hawq.pxf.api.utilities.InputData;
 import org.apache.hawq.pxf.plugins.hive.utilities.HiveUtilities;
 
 /**
@@ -41,24 +43,33 @@ public class HiveMetadataFetcher extends MetadataFetcher {
     private static final Log LOG = LogFactory.getLog(HiveMetadataFetcher.class);
     private HiveMetaStoreClient client;
 
-    public HiveMetadataFetcher() {
-        super();
+    public HiveMetadataFetcher(InputData md) {
+        super(md);
 
         // init hive metastore client connection.
         client = HiveUtilities.initHiveClient();
     }
 
     @Override
-    public Metadata getTableMetadata(String tableName) throws Exception {
+    public List<Metadata> getMetadata(String pattern) throws Exception {
 
-        Metadata.Table tblDesc = HiveUtilities.parseTableQualifiedName(tableName);
-        Metadata metadata = new Metadata(tblDesc);
+        List<Metadata.Item> tblsDesc = HiveUtilities.extractTablesFromPattern(client, pattern);
 
-        Table tbl = HiveUtilities.getHiveTable(client, tblDesc);
+        if(tblsDesc == null || tblsDesc.isEmpty()) {
+            LOG.warn("No tables found for the given pattern: " + pattern);
+            return null;
+        }
+
+        List<Metadata> metadataList = new ArrayList<Metadata>();
 
-        getSchema(tbl, metadata);
+        for(Metadata.Item tblDesc: tblsDesc) {
+            Metadata metadata = new Metadata(tblDesc);
+            Table tbl = HiveUtilities.getHiveTable(client, tblDesc);
+            getSchema(tbl, metadata);
+            metadataList.add(metadata);
+        }
 
-        return metadata;
+        return metadataList;
     }
 
 
@@ -66,6 +77,9 @@ public class HiveMetadataFetcher extends MetadataFetcher {
      * Populates the given metadata object with the given table's fields and partitions,
      * The partition fields are added at the end of the table schema.
      * Throws an exception if the table contains unsupported field types.
+     * Supported HCatalog types: TINYINT,
+     * SMALLINT, INT, BIGINT, BOOLEAN, FLOAT, DOUBLE, STRING, BINARY, TIMESTAMP,
+     * DATE, DECIMAL, VARCHAR, CHAR.
      *
      * @param tbl Hive table
      * @param metadata schema of given table
@@ -91,7 +105,7 @@ public class HiveMetadataFetcher extends MetadataFetcher {
                 metadata.addField(HiveUtilities.mapHiveType(hivePart));
             }
         } catch (UnsupportedTypeException e) {
-            String errorMsg = "Failed to retrieve metadata for table " + metadata.getTable() + ". " +
+            String errorMsg = "Failed to retrieve metadata for table " + metadata.getItem() + ". " +
                     e.getMessage();
             throw new UnsupportedTypeException(errorMsg);
         }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/85779773/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilities.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilities.java b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilities.java
index 651e0fe..7dfe410 100644
--- a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilities.java
+++ b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilities.java
@@ -20,6 +20,7 @@ package org.apache.hawq.pxf.plugins.hive.utilities;
  */
 
 
+import java.util.List;
 import java.util.ArrayList;
 
 import org.apache.commons.lang.StringUtils;
@@ -42,6 +43,7 @@ import org.apache.hawq.pxf.api.UnsupportedTypeException;
 public class HiveUtilities {
 
     private static final Log LOG = LogFactory.getLog(HiveUtilities.class);
+    private static final String WILDCARD = "*";
 
     /**
      * Default Hive DB (schema) name.
@@ -64,13 +66,13 @@ public class HiveUtilities {
         return client;
     }
 
-    public static Table getHiveTable(HiveMetaStoreClient client, Metadata.Table tableName)
+    public static Table getHiveTable(HiveMetaStoreClient client, Metadata.Item itemName)
             throws Exception {
-        Table tbl = client.getTable(tableName.getDbName(), tableName.getTableName());
+        Table tbl = client.getTable(itemName.getPath(), itemName.getName());
         String tblType = tbl.getTableType();
 
         if (LOG.isDebugEnabled()) {
-            LOG.debug("Table: " + tableName.getDbName() + "." + tableName.getTableName() + ", type: " + tblType);
+            LOG.debug("Item: " + itemName.getPath() + "." + itemName.getName() + ", type: " + tblType);
         }
 
         if (TableType.valueOf(tblType) == TableType.VIRTUAL_VIEW) {
@@ -203,19 +205,37 @@ public class HiveUtilities {
      * It can be either <code>table_name</code> or <code>db_name.table_name</code>.
      *
      * @param qualifiedName Hive table name
-     * @return {@link org.apache.hawq.pxf.api.Metadata.Table} object holding the full table name
+     * @return {@link Metadata.Item} object holding the full table name
      */
-    public static Metadata.Table parseTableQualifiedName(String qualifiedName) {
+    public static Metadata.Item extractTableFromName(String qualifiedName) {
+        List<Metadata.Item> items = extractTablesFromPattern(null, qualifiedName);
+        if(items.isEmpty()) {
+            throw new IllegalArgumentException("No tables found");
+        }
+        return items.get(0);
+    }
+
+    /**
+     * Extracts the db_name(s) and table_name(s) corresponding to the given pattern.
+     * pattern is the Hive table name or pattern that the user enters in the CREATE EXTERNAL TABLE statement
+     * or when querying HCatalog table.
+     * It can be either <code>table_name_pattern</code> or <code>db_name_pattern.table_name_pattern</code>.
+     *
+     * @param client Hivemetastore client
+     * @param pattern Hive table name or pattern
+     * @return list of {@link Metadata.Item} objects holding the full table name
+     */
+    public static List<Metadata.Item> extractTablesFromPattern(HiveMetaStoreClient client, String pattern) {
 
-        String dbName, tableName;
+        String dbPattern, tablePattern;
         String errorMsg = " is not a valid Hive table name. "
                 + "Should be either <table_name> or <db_name.table_name>";
 
-        if (StringUtils.isBlank(qualifiedName)) {
+        if (StringUtils.isBlank(pattern)) {
             throw new IllegalArgumentException("empty string" + errorMsg);
         }
 
-        String[] rawToks = qualifiedName.split("[.]");
+        String[] rawToks = pattern.split("[.]");
         ArrayList<String> toks = new ArrayList<String>();
         for (String tok: rawToks) {
             if (StringUtils.isBlank(tok)) {
@@ -225,15 +245,45 @@ public class HiveUtilities {
         }
 
         if (toks.size() == 1) {
-            dbName = HIVE_DEFAULT_DBNAME;
-            tableName = toks.get(0);
+            dbPattern = HIVE_DEFAULT_DBNAME;
+            tablePattern = toks.get(0);
         } else if (toks.size() == 2) {
-            dbName = toks.get(0);
-            tableName = toks.get(1);
+            dbPattern = toks.get(0);
+            tablePattern = toks.get(1);
         } else {
-            throw new IllegalArgumentException("\"" + qualifiedName + "\"" + errorMsg);
+            throw new IllegalArgumentException("\"" + pattern + "\"" + errorMsg);
         }
 
-        return new Metadata.Table(dbName, tableName);
+        return getTablesFromPattern(client, dbPattern, tablePattern);
+   }
+
+    private static List<Metadata.Item> getTablesFromPattern(HiveMetaStoreClient client, String dbPattern, String tablePattern) {
+
+        List<String> databases = null;
+        List<Metadata.Item> itemList = new ArrayList<Metadata.Item>();
+        List<String> tables = new ArrayList<String>();
+
+        if(client == null || (!dbPattern.contains(WILDCARD) && !tablePattern.contains(WILDCARD)) ) {
+            /* This case occurs when the call is invoked as part of the fragmenter api or when metadata is requested for a specific table name */
+            itemList.add(new Metadata.Item(dbPattern, tablePattern));
+            return itemList;
+        }
+
+        try {
+            databases = client.getDatabases(dbPattern);
+            if(databases.isEmpty()) {
+                LOG.warn("No database found for the given pattern: " + dbPattern);
+                return null;
+            }
+            for(String dbName: databases) {
+                for(String tableName: client.getTables(dbName, tablePattern)) {
+                    itemList.add(new Metadata.Item(dbName, tableName));
+                }
+            }
+            return itemList;
+
+        } catch (MetaException cause) {
+            throw new RuntimeException("Failed connecting to Hive MetaStore service: " + cause.getMessage(), cause);
+        }
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/85779773/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveMetadataFetcherTest.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveMetadataFetcherTest.java b/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveMetadataFetcherTest.java
index c300214..4ddb486 100644
--- a/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveMetadataFetcherTest.java
+++ b/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveMetadataFetcherTest.java
@@ -33,6 +33,7 @@ import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
 import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hawq.pxf.api.utilities.InputData;
 import org.junit.Before;
 import org.junit.Test;
 import org.junit.runner.RunWith;
@@ -50,12 +51,12 @@ import org.apache.hawq.pxf.plugins.hive.utilities.HiveUtilities;
 @SuppressStaticInitializationFor({"org.apache.hadoop.hive.metastore.api.MetaException",
 "org.apache.hawq.pxf.plugins.hive.utilities.HiveUtilities"}) // Prevents static inits
 public class HiveMetadataFetcherTest {
-
+    InputData inputData;
     Log LOG;
     HiveConf hiveConfiguration;
     HiveMetaStoreClient hiveClient;
     HiveMetadataFetcher fetcher;
-    Metadata metadata;
+    List<Metadata> metadataList;
 
     @Before
     public void SetupCompressionFactory() {
@@ -66,7 +67,7 @@ public class HiveMetadataFetcherTest {
     @Test
     public void construction() throws Exception {
         prepareConstruction();
-        fetcher = new HiveMetadataFetcher();
+        fetcher = new HiveMetadataFetcher(inputData);
         PowerMockito.verifyNew(HiveMetaStoreClient.class).withArguments(hiveConfiguration);
     }
 
@@ -76,7 +77,7 @@ public class HiveMetadataFetcherTest {
         PowerMockito.whenNew(HiveMetaStoreClient.class).withArguments(hiveConfiguration).thenThrow(new MetaException("which way to albuquerque"));
 
         try {
-            fetcher = new HiveMetadataFetcher();
+            fetcher = new HiveMetadataFetcher(inputData);
             fail("Expected a RuntimeException");
         } catch (RuntimeException ex) {
             assertEquals("Failed connecting to Hive MetaStore service: which way to albuquerque", ex.getMessage());
@@ -86,11 +87,11 @@ public class HiveMetadataFetcherTest {
     @Test
     public void getTableMetadataInvalidTableName() throws Exception {
         prepareConstruction();
-        fetcher = new HiveMetadataFetcher();
+        fetcher = new HiveMetadataFetcher(inputData);
         String tableName = "t.r.o.u.b.l.e.m.a.k.e.r";
 
         try {
-            fetcher.getTableMetadata(tableName);
+            fetcher.getMetadata(tableName);
             fail("Expected an IllegalArgumentException");
         } catch (IllegalArgumentException ex) {
             assertEquals("\"t.r.o.u.b.l.e.m.a.k.e.r\" is not a valid Hive table name. Should be either <table_name> or <db_name.table_name>", ex.getMessage()); 
@@ -101,7 +102,7 @@ public class HiveMetadataFetcherTest {
     public void getTableMetadataView() throws Exception {
         prepareConstruction();
 
-        fetcher = new HiveMetadataFetcher();
+        fetcher = new HiveMetadataFetcher(inputData);
         String tableName = "cause";
 
         // mock hive table returned from hive client
@@ -110,7 +111,7 @@ public class HiveMetadataFetcherTest {
         when(hiveClient.getTable("default", tableName)).thenReturn(hiveTable);
 
         try {
-            metadata = fetcher.getTableMetadata(tableName);
+            metadataList = fetcher.getMetadata(tableName);
             fail("Expected an UnsupportedOperationException because PXF doesn't support views");
         } catch (UnsupportedOperationException e) {
             assertEquals("Hive views are not supported by HAWQ", e.getMessage());
@@ -121,7 +122,7 @@ public class HiveMetadataFetcherTest {
     public void getTableMetadata() throws Exception {
         prepareConstruction();
 
-        fetcher = new HiveMetadataFetcher();
+        fetcher = new HiveMetadataFetcher(inputData);
         String tableName = "cause";
 
         // mock hive table returned from hive client
@@ -137,9 +138,10 @@ public class HiveMetadataFetcherTest {
         when(hiveClient.getTable("default", tableName)).thenReturn(hiveTable);
 
         // get metadata
-        metadata = fetcher.getTableMetadata(tableName);
+        metadataList = fetcher.getMetadata(tableName);
+        Metadata metadata = metadataList.get(0);
 
-        assertEquals("default.cause", metadata.getTable().toString());
+        assertEquals("default.cause", metadata.getItem().toString());
 
         List<Metadata.Field> resultFields = metadata.getFields();
         assertNotNull(resultFields);

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/85779773/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilitiesTest.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilitiesTest.java b/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilitiesTest.java
index 03cca72..466dedb 100644
--- a/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilitiesTest.java
+++ b/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilitiesTest.java
@@ -31,7 +31,7 @@ import org.apache.hawq.pxf.api.UnsupportedTypeException;
 public class HiveUtilitiesTest {
 
     FieldSchema hiveColumn;
-    Metadata.Table tblDesc;
+    Metadata.Item tblDesc;
 
     static String[][] typesMappings = {
         /* hive type -> hawq type */
@@ -154,19 +154,19 @@ public class HiveUtilitiesTest {
     @Test
     public void parseTableQualifiedNameNoDbName() throws Exception {
         String name = "orphan";
-        tblDesc = HiveUtilities.parseTableQualifiedName(name);
+        tblDesc = HiveUtilities.extractTableFromName(name);
 
-        assertEquals("default", tblDesc.getDbName());
-        assertEquals(name, tblDesc.getTableName());
+        assertEquals("default", tblDesc.getPath());
+        assertEquals(name, tblDesc.getName());
     }
 
     @Test
     public void parseTableQualifiedName() throws Exception {
         String name = "not.orphan";
-        tblDesc = HiveUtilities.parseTableQualifiedName(name);
+        tblDesc = HiveUtilities.extractTableFromName(name);
 
-        assertEquals("not", tblDesc.getDbName());
-        assertEquals("orphan", tblDesc.getTableName());
+        assertEquals("not", tblDesc.getPath());
+        assertEquals("orphan", tblDesc.getName());
     }
 
     @Test
@@ -206,7 +206,7 @@ public class HiveUtilitiesTest {
 
     private void parseTableQualifiedNameNegative(String name, String errorMsg, String reason) throws Exception {
         try {
-            tblDesc = HiveUtilities.parseTableQualifiedName(name);
+            tblDesc = HiveUtilities.extractTableFromName(name);
             fail("test should fail because of " + reason);
         } catch (IllegalArgumentException e) {
             assertEquals(errorMsg, e.getMessage());

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/85779773/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataFetcherFactory.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataFetcherFactory.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataFetcherFactory.java
index c059f59..396b711 100644
--- a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataFetcherFactory.java
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataFetcherFactory.java
@@ -21,6 +21,8 @@ package org.apache.hawq.pxf.service;
 
 
 import org.apache.hawq.pxf.api.MetadataFetcher;
+import org.apache.hawq.pxf.api.utilities.InputData;
+import org.apache.hawq.pxf.api.utilities.Utilities;
 
 /**
  * Factory class for creation of {@link MetadataFetcher} objects. 
@@ -28,7 +30,7 @@ import org.apache.hawq.pxf.api.MetadataFetcher;
  * abstract class which is returned by the MetadataFetcherFactory. 
  */
 public class MetadataFetcherFactory {
-    static public MetadataFetcher create(String fetcherName) throws Exception {
-        return (MetadataFetcher) Class.forName(fetcherName).newInstance();
+    public static MetadataFetcher create(InputData inputData) throws Exception {
+        return (MetadataFetcher) Utilities.createAnyInstance(InputData.class, inputData.getMetadata(), inputData);
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/85779773/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponseFormatter.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponseFormatter.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponseFormatter.java
index eb83627..a420ca7 100644
--- a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponseFormatter.java
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponseFormatter.java
@@ -21,6 +21,8 @@ package org.apache.hawq.pxf.service;
 
 
 import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -35,19 +37,17 @@ import org.apache.hawq.pxf.api.Metadata;
 public class MetadataResponseFormatter {
 
     private static final Log LOG = LogFactory.getLog(MetadataResponseFormatter.class);
+    private static final String METADATA_DEFAULT_RESPONSE = "{\"PXFMetadata\":[]}";
 
     /**
-     * Converts {@link Metadata} to JSON String format.
+     * Converts list of {@link Metadata} to JSON String format.
      *
-     * @param metadata metadata to convert
+     * @param metadataList list of metadata objects to convert
      * @return JSON formatted response
      * @throws IOException if converting the data to JSON fails
      */
-    public static String formatResponseString(Metadata metadata) throws IOException {
-        /* print the metadata before serialization */
-        LOG.debug(MetadataResponseFormatter.metadataToString(metadata));
-
-        return MetadataResponseFormatter.metadataToJSON(metadata);
+    public static String formatResponseString(List<Metadata> metadataList) throws IOException {
+        return MetadataResponseFormatter.metadataToJSON(metadataList);
     }
 
     /**
@@ -55,50 +55,65 @@ public class MetadataResponseFormatter {
      * To be used as the result string for HAWQ.
      * An example result is as follows:
      *
-     * {"PXFMetadata":[{"table":{"dbName":"default","tableName":"t1"},"fields":[{"name":"a","type":"int"},{"name":"b","type":"float"}]}]}
+     * {"PXFMetadata":[{"item":{"path":"default","name":"t1"},"fields":[{"name":"a","type":"int"},{"name":"b","type":"float"}]}]}
      */
-    private static String metadataToJSON(Metadata metadata) throws IOException {
+    private static String metadataToJSON(List<Metadata> metadataList) throws IOException {
 
-        if (metadata == null) {
-            throw new IllegalArgumentException("metadata object is null - cannot serialize");
+        if (metadataList == null || metadataList.isEmpty()) {
+               return METADATA_DEFAULT_RESPONSE;
         }
 
-        if ((metadata.getFields() == null) || metadata.getFields().isEmpty()) {
-            throw new IllegalArgumentException("metadata contains no fields - cannot serialize");
+        StringBuilder result = null;
+
+        for(Metadata metadata: metadataList) {
+            if(metadata == null) {
+                throw new IllegalArgumentException("metadata object is null - cannot serialize");
+            }
+            if ((metadata.getFields() == null) || metadata.getFields().isEmpty()) {
+                throw new IllegalArgumentException("metadata for " + metadata.getItem() + " contains no fields - cannot serialize");
+            }
+            if (result == null) {
+                result = new StringBuilder("{\"PXFMetadata\":["); /* prefix info */
+            } else {
+                result.append(",");
+            }
+
+            ObjectMapper mapper = new ObjectMapper();
+            mapper.setSerializationInclusion(Inclusion.NON_EMPTY); // ignore empty fields
+            result.append(mapper.writeValueAsString(metadata));
         }
 
-        ObjectMapper mapper = new ObjectMapper();
-        mapper.setSerializationInclusion(Inclusion.NON_EMPTY); // ignore empty fields
+        return result.append("]}").toString(); /* append suffix info */
 
-        StringBuilder result = new StringBuilder("{\"PXFMetadata\":");
-        String prefix = "["; // preparation for supporting multiple tables
-        result.append(prefix).append(mapper.writeValueAsString(metadata));
-        return result.append("]}").toString();
     }
 
     /**
-     * Converts metadata to a readable string.
+     * Converts metadata list to a readable string.
      * Intended for debugging purposes only.
      */
-    private static String metadataToString(Metadata metadata) {
-        StringBuilder result = new StringBuilder("Metadata for table \"");
+    private static String metadataToString(List<Metadata> metadataList) {
+        StringBuilder result = new StringBuilder("Metadata:");
 
-        if (metadata == null) {
-            return "No metadata";
-        }
+        for(Metadata metadata: metadataList) {
+            result.append(" Metadata for item \"");
 
-        result.append(metadata.getTable()).append("\": ");
+            if (metadata == null) {
+                return "No metadata";
+            }
 
-        if ((metadata.getFields() == null) || metadata.getFields().isEmpty()) {
-            result.append("no fields in table");
-            return result.toString();
-        }
+            result.append(metadata.getItem()).append("\": ");
+
+            if ((metadata.getFields() == null) || metadata.getFields().isEmpty()) {
+                result.append("no fields in item");
+                return result.toString();
+            }
 
-        int i = 0;
-        for (Metadata.Field field: metadata.getFields()) {
-            result.append("Field #").append(++i).append(": [")
-                .append("Name: ").append(field.getName())
-                .append(", Type: ").append(field.getType()).append("] ");
+            int i = 0;
+            for (Metadata.Field field : metadata.getFields()) {
+                result.append("Field #").append(++i).append(": [")
+                        .append("Name: ").append(field.getName())
+                        .append(", Type: ").append(field.getType()).append("] ");
+            }
         }
 
         return result.toString();

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/85779773/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/MetadataResource.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/MetadataResource.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/MetadataResource.java
index 12a1904..e16d14a 100644
--- a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/MetadataResource.java
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/MetadataResource.java
@@ -20,6 +20,8 @@ package org.apache.hawq.pxf.service.rest;
  */
 
 import java.io.IOException;
+import java.util.List;
+import java.util.Map;
 
 import javax.servlet.ServletContext;
 import javax.ws.rs.GET;
@@ -37,8 +39,11 @@ import org.apache.commons.logging.LogFactory;
 
 import org.apache.hawq.pxf.api.Metadata;
 import org.apache.hawq.pxf.api.MetadataFetcher;
+import org.apache.hawq.pxf.api.utilities.InputData;
 import org.apache.hawq.pxf.service.MetadataFetcherFactory;
 import org.apache.hawq.pxf.service.MetadataResponseFormatter;
+import org.apache.hawq.pxf.service.utilities.ProtocolData;
+import org.apache.hawq.pxf.service.utilities.SecuredHDFS;
 
 /**
  * Class enhances the API of the WEBHDFS REST server. Returns the metadata of a
@@ -56,51 +61,54 @@ public class MetadataResource extends RestResource {
     }
 
     /**
-     * This function queries the HiveMetaStore to get the given table's
-     * metadata: Table name, field names, field types. The types are converted
-     * from HCatalog types to HAWQ types. Supported HCatalog types: TINYINT,
-     * SMALLINT, INT, BIGINT, BOOLEAN, FLOAT, DOUBLE, STRING, BINARY, TIMESTAMP,
-     * DATE, DECIMAL, VARCHAR, CHAR. <br>
+     * This function queries the underlying store based on the given profile to get schema for items that match the given pattern
+     * metadata: Item name, field names, field types. The types are converted
+     * from the underlying types to HAWQ types.
      * Unsupported types result in an error. <br>
      * Response Examples:<br>
      * For a table <code>default.t1</code> with 2 fields (a int, b float) will
      * be returned as:
-     * <code>{"PXFMetadata":[{"table":{"dbName":"default","tableName":"t1"},"fields":[{"name":"a","type":"int"},{"name":"b","type":"float"}]}]}</code>
+     * <code>{"PXFMetadata":[{"item":{"path":"default","name":"t1"},"fields":[{"name":"a","type":"int"},{"name":"b","type":"float"}]}]}</code>
      *
      * @param servletContext servlet context
      * @param headers http headers
-     * @param table HCatalog table name
-     * @return JSON formatted response with metadata for given table
-     * @throws Exception if connection to Hcatalog failed, table didn't exist or
+     * @param profile based on this the metadata source can be inferred
+     * @param pattern table/file name or pattern in the given source
+     * @return JSON formatted response with metadata of each item that corresponds to the pattern
+     * @throws Exception if connection to the source/catalog failed, item didn't exist for the pattern
      *             its type or fields are not supported
      */
     @GET
-    @Path("getTableMetadata")
+    @Path("getMetadata")
     @Produces("application/json")
     public Response read(@Context final ServletContext servletContext,
                          @Context final HttpHeaders headers,
-                         @QueryParam("table") final String table)
+                         @QueryParam("profile") final String profile,
+                         @QueryParam("pattern") final String pattern)
             throws Exception {
-        LOG.debug("getTableMetadata started");
+        LOG.debug("getMetadata started");
         String jsonOutput;
         try {
+
+            // Convert headers into a regular map
+            Map<String, String> params = convertToCaseInsensitiveMap(headers.getRequestHeaders());
+
+            // Add profile and verify token
+            ProtocolData protData = new ProtocolData(params, profile.toLowerCase());
+
+            // 0. Verify token
+            SecuredHDFS.verifyToken(protData, servletContext);
+
             // 1. start MetadataFetcher
-            MetadataFetcher metadataFetcher = MetadataFetcherFactory.create("org.apache.hawq.pxf.plugins.hive.HiveMetadataFetcher"); // TODO:
-                                                                                                                                     // nhorn
-                                                                                                                                     // -
-                                                                                                                                     // 09-03-15
-                                                                                                                                     // -
-                                                                                                                                     // pass
-                                                                                                                                     // as
-                                                                                                                                     // param
+            MetadataFetcher metadataFetcher = MetadataFetcherFactory.create(protData);
 
             // 2. get Metadata
-            Metadata metadata = metadataFetcher.getTableMetadata(table);
+            List<Metadata> metadata = metadataFetcher.getMetadata(pattern);
 
             // 3. serialize to JSON
             jsonOutput = MetadataResponseFormatter.formatResponseString(metadata);
 
-            LOG.debug("getTableMetadata output: " + jsonOutput);
+            LOG.debug("getMetadata output: " + jsonOutput);
 
         } catch (ClientAbortException e) {
             LOG.error("Remote connection closed by HAWQ", e);

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/85779773/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/ProtocolData.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/ProtocolData.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/ProtocolData.java
index 6ab224a..ec258fa 100644
--- a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/ProtocolData.java
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/ProtocolData.java
@@ -95,6 +95,7 @@ public class ProtocolData extends InputData {
         accessor = getProperty("ACCESSOR");
         resolver = getProperty("RESOLVER");
         fragmenter = getOptionalProperty("FRAGMENTER");
+        metadata = getOptionalProperty("METADATA");
         dataSource = getProperty("DATA-DIR");
 
         /* Kerberos token information */
@@ -150,6 +151,24 @@ public class ProtocolData extends InputData {
     }
 
     /**
+     * Constructs a ProtocolData. Parses X-GP-* configuration variables.
+     *
+     * @param paramsMap contains all query-specific parameters from Hawq
+     * @param profile contains the profile name
+     */
+    public ProtocolData(Map<String, String> paramsMap, String profileString) {
+        requestParametersMap = paramsMap;
+        profile = profileString;
+        setProfilePlugins();
+        metadata = getProperty("METADATA");
+
+        /* Kerberos token information */
+        if (UserGroupInformation.isSecurityEnabled()) {
+            token = getProperty("TOKEN");
+        }
+    }
+
+    /**
      * Sets the requested profile plugins from profile file into
      * {@link #requestParametersMap}.
      */

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/85779773/pxf/pxf-service/src/main/resources/pxf-profiles-default.xml
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/resources/pxf-profiles-default.xml b/pxf/pxf-service/src/main/resources/pxf-profiles-default.xml
index 5b57d21..d908935 100644
--- a/pxf/pxf-service/src/main/resources/pxf-profiles-default.xml
+++ b/pxf/pxf-service/src/main/resources/pxf-profiles-default.xml
@@ -49,6 +49,7 @@ under the License.
             <fragmenter>org.apache.hawq.pxf.plugins.hive.HiveDataFragmenter</fragmenter>
             <accessor>org.apache.hawq.pxf.plugins.hive.HiveAccessor</accessor>
             <resolver>org.apache.hawq.pxf.plugins.hive.HiveResolver</resolver>
+            <metadata>org.apache.hawq.pxf.plugins.hive.HiveMetadataFetcher</metadata>
         </plugins>
     </profile>
     <profile>
@@ -62,6 +63,7 @@ under the License.
             <fragmenter>org.apache.hawq.pxf.plugins.hive.HiveInputFormatFragmenter</fragmenter>
             <accessor>org.apache.hawq.pxf.plugins.hive.HiveRCFileAccessor</accessor>
             <resolver>org.apache.hawq.pxf.plugins.hive.HiveColumnarSerdeResolver</resolver>
+            <metadata>org.apache.hawq.pxf.plugins.hive.HiveMetadataFetcher</metadata>
         </plugins>
     </profile>
     <profile>
@@ -74,6 +76,7 @@ under the License.
             <fragmenter>org.apache.hawq.pxf.plugins.hive.HiveInputFormatFragmenter</fragmenter>
             <accessor>org.apache.hawq.pxf.plugins.hive.HiveLineBreakAccessor</accessor>
             <resolver>org.apache.hawq.pxf.plugins.hive.HiveStringPassResolver</resolver>
+            <metadata>org.apache.hawq.pxf.plugins.hive.HiveMetadataFetcher</metadata>
         </plugins>
     </profile>
     <profile>
@@ -111,7 +114,6 @@ under the License.
         <name>SequenceWritable</name>
         <description>
             Profile for accessing Sequence files serialized with a custom Writable class
-            usage: pxf://nn:50070/path/to/file?profile=SequenceWritable&amp;data-schema=CustomClass
         </description>
         <plugins>
             <fragmenter>org.apache.hawq.pxf.plugins.hdfs.HdfsDataFragmenter</fragmenter>

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/85779773/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/MetadataResponseFormatterTest.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/MetadataResponseFormatterTest.java b/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/MetadataResponseFormatterTest.java
index a47952a..b4583aa 100644
--- a/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/MetadataResponseFormatterTest.java
+++ b/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/MetadataResponseFormatterTest.java
@@ -25,6 +25,8 @@ import static org.junit.Assert.*;
 import java.util.ArrayList;
 import java.util.List;
 
+import org.apache.commons.lang.StringUtils;
+import org.apache.directory.shared.kerberos.components.MethodData;
 import org.junit.Test;
 
 import org.apache.hawq.pxf.api.Metadata;
@@ -35,92 +37,168 @@ public class MetadataResponseFormatterTest {
 
     @Test
     public void formatResponseString() throws Exception {
+        List<Metadata> metadataList = new ArrayList<Metadata>();
         List<Metadata.Field> fields = new ArrayList<Metadata.Field>();
-        Metadata.Table tableName = new Metadata.Table("default", "table1");
-        Metadata metadata = new Metadata(tableName, fields);
+        Metadata.Item itemName = new Metadata.Item("default", "table1");
+        Metadata metadata = new Metadata(itemName, fields);
         fields.add(new Metadata.Field("field1", "int"));
         fields.add(new Metadata.Field("field2", "text"));
+        metadataList.add(metadata);
 
-        result = MetadataResponseFormatter.formatResponseString(metadata);
-        String expected = "{\"PXFMetadata\":[{"
-                + "\"table\":{\"dbName\":\"default\",\"tableName\":\"table1\"},"
-                + "\"fields\":[{\"name\":\"field1\",\"type\":\"int\"},{\"name\":\"field2\",\"type\":\"text\"}]}]}";
+        result = MetadataResponseFormatter.formatResponseString(metadataList);
+        StringBuilder expected = new StringBuilder("{\"PXFMetadata\":[{");
+        expected.append("\"item\":{\"path\":\"default\",\"name\":\"table1\"},")
+                .append("\"fields\":[{\"name\":\"field1\",\"type\":\"int\"},{\"name\":\"field2\",\"type\":\"text\"}]}]}");
 
-        assertEquals(expected, result);
+        assertEquals(expected.toString(), result);
     }
 
     @Test
     public void formatResponseStringWithNullModifier() throws Exception {
+        List<Metadata> metadataList = new ArrayList<Metadata>();
         List<Metadata.Field> fields = new ArrayList<Metadata.Field>();
-        Metadata.Table tableName = new Metadata.Table("default", "table1");
-        Metadata metadata = new Metadata(tableName, fields);
+        Metadata.Item itemName = new Metadata.Item("default", "table1");
+        Metadata metadata = new Metadata(itemName, fields);
         fields.add(new Metadata.Field("field1", "int", null));
         fields.add(new Metadata.Field("field2", "text", new String[] {}));
+        metadataList.add(metadata);
 
-        result = MetadataResponseFormatter.formatResponseString(metadata);
-        String expected = "{\"PXFMetadata\":[{"
-                + "\"table\":{\"dbName\":\"default\",\"tableName\":\"table1\"},"
-                + "\"fields\":[{\"name\":\"field1\",\"type\":\"int\"},{\"name\":\"field2\",\"type\":\"text\"}]}]}";
+        result = MetadataResponseFormatter.formatResponseString(metadataList);
+        StringBuilder expected = new StringBuilder("{\"PXFMetadata\":[{");
+        expected.append("\"item\":{\"path\":\"default\",\"name\":\"table1\"},")
+                .append("\"fields\":[{\"name\":\"field1\",\"type\":\"int\"},{\"name\":\"field2\",\"type\":\"text\"}]}]}");
 
-        assertEquals(expected, result);
+        assertEquals(expected.toString(), result);
     }
 
     @Test
     public void formatResponseStringWithModifiers() throws Exception {
+        List<Metadata> metadataList = new ArrayList<Metadata>();
         List<Metadata.Field> fields = new ArrayList<Metadata.Field>();
-        Metadata.Table tableName = new Metadata.Table("default", "table1");
-        Metadata metadata = new Metadata(tableName, fields);
+        Metadata.Item itemName = new Metadata.Item("default", "table1");
+        Metadata metadata = new Metadata(itemName, fields);
         fields.add(new Metadata.Field("field1", "int"));
         fields.add(new Metadata.Field("field2", "numeric",
                 new String[] {"1349", "1789"}));
         fields.add(new Metadata.Field("field3", "char",
                 new String[] {"50"}));
-
-        result = MetadataResponseFormatter.formatResponseString(metadata);
-        String expected = "{\"PXFMetadata\":[{"
-                + "\"table\":{\"dbName\":\"default\",\"tableName\":\"table1\"},"
-                + "\"fields\":["
-                + "{\"name\":\"field1\",\"type\":\"int\"},"
-                + "{\"name\":\"field2\",\"type\":\"numeric\",\"modifiers\":[\"1349\",\"1789\"]},"
-                + "{\"name\":\"field3\",\"type\":\"char\",\"modifiers\":[\"50\"]}"
-                + "]}]}";
-
-        assertEquals(expected, result);
+        metadataList.add(metadata);
+
+        result = MetadataResponseFormatter.formatResponseString(metadataList);
+        StringBuilder expected = new StringBuilder("{\"PXFMetadata\":[{");
+        expected.append("\"item\":{\"path\":\"default\",\"name\":\"table1\"},")
+                .append("\"fields\":[")
+                .append("{\"name\":\"field1\",\"type\":\"int\"},")
+                .append("{\"name\":\"field2\",\"type\":\"numeric\",\"modifiers\":[\"1349\",\"1789\"]},")
+                .append("{\"name\":\"field3\",\"type\":\"char\",\"modifiers\":[\"50\"]}")
+                .append("]}]}");
+
+        assertEquals(expected.toString(), result);
     }
 
     @Test
     public void formatResponseStringNull() throws Exception {
-        Metadata metadata = null;
-
-        try {
-            result = MetadataResponseFormatter.formatResponseString(metadata);
-            fail("formatting should fail because metadata is null");
-        } catch (IllegalArgumentException e) {
-            assertEquals("metadata object is null - cannot serialize", e.getMessage());
-        }
+        List<Metadata> metadataList = null;
+        result = MetadataResponseFormatter.formatResponseString(metadataList);
+        String expected = new String("{\"PXFMetadata\":[]}");
+        assertEquals(expected, result);
     }
 
     @Test
     public void formatResponseStringNoFields() throws Exception {
-        Metadata.Table tableName = new Metadata.Table("default", "table1");
-        Metadata metadata = new Metadata(tableName, null);
-
+        List<Metadata> metadataList = new ArrayList<Metadata>();
+        Metadata.Item itemName = new Metadata.Item("default", "table1");
+        Metadata metadata = new Metadata(itemName, null);
+        metadataList.add(metadata);
         try {
-            result = MetadataResponseFormatter.formatResponseString(metadata);
+            result = MetadataResponseFormatter.formatResponseString(metadataList);
             fail("formatting should fail because fields field is null");
         } catch (IllegalArgumentException e) {
-            assertEquals("metadata contains no fields - cannot serialize", e.getMessage());
+            assertEquals("metadata for " + metadata.getItem() + " contains no fields - cannot serialize", e.getMessage());
         }
 
         ArrayList<Metadata.Field> fields = new ArrayList<Metadata.Field>();
-        metadata = new Metadata(tableName, fields);
-
+        metadataList = new ArrayList<Metadata>();
+        metadata = new Metadata(itemName, fields);
+        metadataList.add(metadata);
         try {
-            result = MetadataResponseFormatter.formatResponseString(metadata);
+            result = MetadataResponseFormatter.formatResponseString(metadataList);
             fail("formatting should fail because there are no fields");
         } catch (IllegalArgumentException e) {
-            assertEquals("metadata contains no fields - cannot serialize", e.getMessage());
+            assertEquals("metadata for " + metadata.getItem() + " contains no fields - cannot serialize", e.getMessage());
+        }
+    }
+
+    @Test
+    public void formatResponseStringPartialNull() throws Exception {
+        List<Metadata> metadataList = new ArrayList<Metadata>();
+        List<Metadata.Field> fields = new ArrayList<Metadata.Field>();
+        Metadata.Item itemName = new Metadata.Item("default", "table1");
+        Metadata metadata = new Metadata(itemName, fields);
+        fields.add(new Metadata.Field("field1", "int"));
+        metadataList.add(null);
+        metadataList.add(metadata);
+        try {
+            result = MetadataResponseFormatter.formatResponseString(metadataList);
+            fail("formatting should fail because one of the metdata object is null");
+        } catch (IllegalArgumentException e) {
+            assertEquals("metadata object is null - cannot serialize", e.getMessage());
+        }
+    }
+
+    @Test
+    public void formatResponseStringWithMultipleItems() throws Exception {
+        List <Metadata> metdataList = new ArrayList<Metadata>();
+        for (int i=1; i<=10; i++) {
+            List<Metadata.Field> fields = new ArrayList<Metadata.Field>();
+            Metadata.Item itemName = new Metadata.Item("default", "table"+i);
+            Metadata metadata = new Metadata(itemName, fields);
+            fields.add(new Metadata.Field("field1", "int"));
+            fields.add(new Metadata.Field("field2", "text"));
+            metdataList.add(metadata);
         }
+        result = MetadataResponseFormatter.formatResponseString(metdataList);
+
+        StringBuilder expected = new StringBuilder();
+        for (int i=1; i<=10; i++) {
+            if(i==1) {
+                expected.append("{\"PXFMetadata\":[");
+            } else {
+                expected.append(",");
+            }
+            expected.append("{\"item\":{\"path\":\"default\",\"name\":\"table").append(i).append("\"},");
+            expected.append("\"fields\":[{\"name\":\"field1\",\"type\":\"int\"},{\"name\":\"field2\",\"type\":\"text\"}]}");
+        }
+        expected.append("]}");
+
+        assertEquals(expected.toString(), result);
+    }
+
+    @Test
+    public void formatResponseStringWithMultiplePathsAndItems() throws Exception {
+        List <Metadata> metdataList = new ArrayList<Metadata>();
+        for (int i=1; i<=10; i++) {
+            List<Metadata.Field> fields = new ArrayList<Metadata.Field>();
+            Metadata.Item itemName = new Metadata.Item("default"+i, "table"+i);
+            Metadata metadata = new Metadata(itemName, fields);
+            fields.add(new Metadata.Field("field1", "int"));
+            fields.add(new Metadata.Field("field2", "text"));
+            metdataList.add(metadata);
+        }
+        result = MetadataResponseFormatter.formatResponseString(metdataList);
+        StringBuilder expected = new StringBuilder();
+        for (int i=1; i<=10; i++) {
+            if(i==1) {
+                expected.append("{\"PXFMetadata\":[");
+            } else {
+                expected.append(",");
+            }
+            expected.append("{\"item\":{\"path\":\"default").append(i).append("\",\"name\":\"table").append(i).append("\"},");
+            expected.append("\"fields\":[{\"name\":\"field1\",\"type\":\"int\"},{\"name\":\"field2\",\"type\":\"text\"}]}");
+        }
+        expected.append("]}");
+
+        assertEquals(expected.toString(), result);
     }
 }
 


Mime
View raw message