hawq-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From shiv...@apache.org
Subject incubator-hawq git commit: HAWQ-459. Enhance Metadata Rest API to support multiple tables
Date Tue, 15 Mar 2016 23:15:54 GMT
Repository: incubator-hawq
Updated Branches:
  refs/heads/HAWQ-459 [created] 854f1aa43


HAWQ-459. Enhance Metadata Rest API to support multiple tables


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/854f1aa4
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/854f1aa4
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/854f1aa4

Branch: refs/heads/HAWQ-459
Commit: 854f1aa43dc478fd3ff68115bcc49705b5848600
Parents: 675465d
Author: Shivram Mani <shivram.mani@gmail.com>
Authored: Tue Mar 15 16:15:45 2016 -0700
Committer: Shivram Mani <shivram.mani@gmail.com>
Committed: Tue Mar 15 16:15:45 2016 -0700

----------------------------------------------------------------------
 pxf/README.md                                   |  2 +-
 .../java/org/apache/hawq/pxf/api/Metadata.java  | 63 +++++++-------
 .../apache/hawq/pxf/api/MetadataFetcher.java    | 12 +--
 .../pxf/plugins/hive/HiveDataFragmenter.java    | 24 ++---
 .../pxf/plugins/hive/HiveMetadataFetcher.java   | 25 ++++--
 .../plugins/hive/utilities/HiveUtilities.java   | 89 ++++++++++++++++---
 .../plugins/hive/HiveMetadataFetcherTest.java   | 11 +--
 .../hive/utilities/HiveUtilitiesTest.java       | 16 ++--
 .../pxf/service/MetadataFetcherFactory.java     | 11 ++-
 .../pxf/service/MetadataResponseFormatter.java  | 92 +++++++++++++-------
 .../hawq/pxf/service/rest/MetadataResource.java | 38 ++++----
 .../src/main/resources/pxf-profiles-default.xml |  4 +-
 .../service/MetadataResponseFormatterTest.java  | 24 ++---
 pxf/src/scripts/pxf_manual_failover.py          |  2 +-
 14 files changed, 258 insertions(+), 155 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/854f1aa4/pxf/README.md
----------------------------------------------------------------------
diff --git a/pxf/README.md b/pxf/README.md
index b7cd7f0..c8e7a3f 100644
--- a/pxf/README.md
+++ b/pxf/README.md
@@ -12,7 +12,7 @@ Introduction
 ============
 
 PXF is an extensible framework that allows HAWQ to query external system data.
-PXF includes built-in connectors for accessing data that exists inside HDFS files, Hive tables,
HBase tables and more.
+PXF includes built-in connectors for accessing data that exists inside HDFS files, Hive items,
HBase items and more.
 Users can also create their own connectors to other parallel data stores or processing engines.
 To create these connectors using JAVA plugins, see the Pivotal Extension Framework API and
Reference Guide .
 

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/854f1aa4/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/Metadata.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/Metadata.java b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/Metadata.java
index c00e08a..7a044d6 100644
--- a/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/Metadata.java
+++ b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/Metadata.java
@@ -26,51 +26,52 @@ import java.util.List;
 import org.apache.commons.lang.StringUtils;
 
 /**
- * Metadata holds a table's metadata information.
- * {@link MetadataFetcher#getTableMetadata} returns the table's metadata.
+ * Metadata holds a item's metadata information.
+ * {@link MetadataFetcher#getMetadata} returns the item's metadata.
  */
 public class Metadata {
 
     /**
-     * Class representing table name - db (schema) name and table name.
+     * Class representing item name - db (schema) name and item name.
      */
-    public static class Table {
-        private String dbName;
-        private String tableName;
+    public static class Item {
+        private String path;
+        private String name;
 
-        public Table(String dbName, String tableName) {
+        public Item(String path, String itemName) {
 
-            if (StringUtils.isBlank(dbName) || StringUtils.isBlank(tableName)) {
-                throw new IllegalArgumentException("Table name cannot be empty");
+            if (StringUtils.isBlank(path) || StringUtils.isBlank(itemName)) {
+                throw new IllegalArgumentException("Item name cannot be empty");
             }
 
-            this.dbName = dbName;
-            this.tableName = tableName;
+            this.path = path;
+            this.name = itemName;
         }
 
-        public String getDbName() {
-            return dbName;
+        public String getPath() {
+            return path;
         }
 
-        public String getTableName() {
-            return tableName;
+        public String getName() {
+            return name;
         }
 
         /**
-         * Returns full table name in the form db_name.table_name
+         * Returns full item name in the form path.name
+         * eg: dbname.tblname
          */
         @Override
         public String toString() {
-            return dbName + "." + tableName;
+            return path + "." + name;
         }
     }
 
     /**
-     * Class representing table field - name and type.
+     * Class representing item field - name and type.
      */
     public static class Field {
         private String name;
-        private String type; // TODO: nhorn - 06-03-15 - change to enum
+        private String type; // TODO: change to enum
         private String[] modifiers; // type modifiers, optional field
 
         public Field(String name, String type) {
@@ -102,33 +103,33 @@ public class Metadata {
     }
 
     /**
-     * Table name
+     * Item name
      */
-    private Metadata.Table table;
+    private Item item;
 
     /**
-     * Table's fields
+     * Item's fields
      */
     private List<Metadata.Field> fields;
 
     /**
-     * Constructs a table's Metadata.
+     * Constructs a item's Metadata.
      *
-     * @param tableName the table name
-     * @param fields the table's fields
+     * @param itemName the item name
+     * @param fields the item's fields
      */
-    public Metadata(Metadata.Table tableName,
+    public Metadata(Item itemName,
             List<Metadata.Field> fields) {
-        this.table = tableName;
+        this.item = itemName;
         this.fields = fields;
     }
 
-    public Metadata(Metadata.Table tableName) {
-        this(tableName, new ArrayList<Metadata.Field>());
+    public Metadata(Item itemName) {
+        this(itemName, new ArrayList<Metadata.Field>());
     }
 
-    public Metadata.Table getTable() {
-        return table;
+    public Item getItem() {
+        return item;
     }
 
     public List<Metadata.Field> getFields() {

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/854f1aa4/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/MetadataFetcher.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/MetadataFetcher.java b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/MetadataFetcher.java
index 5f449e6..1808774 100644
--- a/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/MetadataFetcher.java
+++ b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/MetadataFetcher.java
@@ -1,5 +1,7 @@
 package org.apache.hawq.pxf.api;
 
+import java.util.List;
+
 /*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -21,10 +23,10 @@ package org.apache.hawq.pxf.api;
 
 
 /**
- * Abstract class that defines getting metadata of a table.
+ * Abstract class that defines getting metadata of an item.
  */
 public abstract class MetadataFetcher {
-    protected Metadata metadata;
+    protected List<Metadata> metadata;
 
     /**
      * Constructs a MetadataFetcher.
@@ -37,9 +39,9 @@ public abstract class MetadataFetcher {
     /**
      * Gets a metadata of a given table
      *
-     * @param tableName table name
-     * @return metadata of given table
+     * @param itemName table/file name or pattern
+     * @return metadata of given item(s)
      * @throws Exception if metadata information could not be retrieved
      */
-    public abstract Metadata getTableMetadata(String tableName) throws Exception;
+    public abstract List<Metadata> getMetadata(String itemName) throws Exception;
 }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/854f1aa4/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveDataFragmenter.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveDataFragmenter.java
b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveDataFragmenter.java
index af1a666..f8e4e08 100644
--- a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveDataFragmenter.java
+++ b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveDataFragmenter.java
@@ -141,7 +141,7 @@ public class HiveDataFragmenter extends Fragmenter {
 
     @Override
     public List<Fragment> getFragments() throws Exception {
-        Metadata.Table tblDesc = HiveUtilities.parseTableQualifiedName(inputData.getDataSource());
+        Metadata.Item tblDesc = HiveUtilities.extractTableFromName(inputData.getDataSource());
 
         fetchTableMetaData(tblDesc);
 
@@ -175,7 +175,7 @@ public class HiveDataFragmenter extends Fragmenter {
      * Goes over the table partitions metadata and extracts the splits and the
      * InputFormat and Serde per split.
      */
-    private void fetchTableMetaData(Metadata.Table tblDesc) throws Exception {
+    private void fetchTableMetaData(Metadata.Item tblDesc) throws Exception {
 
         Table tbl = HiveUtilities.getHiveTable(client, tblDesc);
 
@@ -210,15 +210,15 @@ public class HiveDataFragmenter extends Fragmenter {
             // API call to Hive Metastore, will return a List of all the
             // partitions for this table, that matches the partition filters
             // Defined in filterStringForHive.
-            partitions = client.listPartitionsByFilter(tblDesc.getDbName(),
-                    tblDesc.getTableName(), filterStringForHive, ALL_PARTS);
+            partitions = client.listPartitionsByFilter(tblDesc.getPath(),
+                    tblDesc.getName(), filterStringForHive, ALL_PARTS);
 
             // No matched partitions for the filter, no fragments to return.
             if (partitions == null || partitions.isEmpty()) {
 
                 if (LOG.isDebugEnabled()) {
-                    LOG.debug("Table -  " + tblDesc.getDbName() + "."
-                            + tblDesc.getTableName()
+                    LOG.debug("Item -  " + tblDesc.getPath() + "."
+                            + tblDesc.getName()
                             + " Has no matched partitions for the filter : "
                             + filterStringForHive);
                 }
@@ -226,16 +226,16 @@ public class HiveDataFragmenter extends Fragmenter {
             }
 
             if (LOG.isDebugEnabled()) {
-                LOG.debug("Table -  " + tblDesc.getDbName() + "."
-                        + tblDesc.getTableName()
+                LOG.debug("Item -  " + tblDesc.getPath() + "."
+                        + tblDesc.getName()
                         + " Matched partitions list size: " + partitions.size());
             }
 
         } else {
             // API call to Hive Metastore, will return a List of all the
             // partitions for this table (no filtering)
-            partitions = client.listPartitions(tblDesc.getDbName(),
-                    tblDesc.getTableName(), ALL_PARTS);
+            partitions = client.listPartitions(tblDesc.getPath(),
+                    tblDesc.getName(), ALL_PARTS);
         }
 
         StorageDescriptor descTable = tbl.getSd();
@@ -251,11 +251,11 @@ public class HiveDataFragmenter extends Fragmenter {
                 StorageDescriptor descPartition = partition.getSd();
                 props = MetaStoreUtils.getSchema(descPartition, descTable,
                         null, // Map<string, string> parameters - can be empty
-                        tblDesc.getDbName(), tblDesc.getTableName(), // table
+                        tblDesc.getPath(), tblDesc.getName(), // table
                                                                      // name
                         partitionKeys);
                 fetchMetaDataForPartitionedTable(descPartition, props,
-                        partition, partitionKeys, tblDesc.getTableName());
+                        partition, partitionKeys, tblDesc.getName());
             }
         }
     }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/854f1aa4/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveMetadataFetcher.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveMetadataFetcher.java
b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveMetadataFetcher.java
index f1aab21..20014c2 100644
--- a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveMetadataFetcher.java
+++ b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveMetadataFetcher.java
@@ -20,6 +20,7 @@ package org.apache.hawq.pxf.plugins.hive;
  */
 
 
+import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.commons.logging.Log;
@@ -49,23 +50,29 @@ public class HiveMetadataFetcher extends MetadataFetcher {
     }
 
     @Override
-    public Metadata getTableMetadata(String tableName) throws Exception {
+    public List<Metadata> getMetadata(String itemName) throws Exception {
 
-        Metadata.Table tblDesc = HiveUtilities.parseTableQualifiedName(tableName);
-        Metadata metadata = new Metadata(tblDesc);
+        List<Metadata.Item> tblsDesc = HiveUtilities.extractTablesFromPattern(client,
itemName);
+        List<Metadata> metadataList = new ArrayList<Metadata>();
 
-        Table tbl = HiveUtilities.getHiveTable(client, tblDesc);
-
-        getSchema(tbl, metadata);
+        for(Metadata.Item tblDesc: tblsDesc) {
+            Metadata metadata = new Metadata(tblDesc);
+            Table tbl = HiveUtilities.getHiveTable(client, tblDesc);
+            getSchema(tbl, metadata);
+            metadataList.add(metadata);
+        }
 
-        return metadata;
+        return metadataList;
     }
 
 
     /**
-     * Populates the given metadata object with the given table's fields and partitions,
+     * Populates the given metadata item with the given table's fields and partitions,
      * The partition fields are added at the end of the table schema.
      * Throws an exception if the table contains unsupported field types.
+     * Supported HCatalog types: TINYINT,
+     * SMALLINT, INT, BIGINT, BOOLEAN, FLOAT, DOUBLE, STRING, BINARY, TIMESTAMP,
+     * DATE, DECIMAL, VARCHAR, CHAR.
      *
      * @param tbl Hive table
      * @param metadata schema of given table
@@ -91,7 +98,7 @@ public class HiveMetadataFetcher extends MetadataFetcher {
                 metadata.addField(HiveUtilities.mapHiveType(hivePart));
             }
         } catch (UnsupportedTypeException e) {
-            String errorMsg = "Failed to retrieve metadata for table " + metadata.getTable()
+ ". " +
+            String errorMsg = "Failed to retrieve metadata for table " + metadata.getItem()
+ ". " +
                     e.getMessage();
             throw new UnsupportedTypeException(errorMsg);
         }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/854f1aa4/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilities.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilities.java
b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilities.java
index 651e0fe..3a3cf0f 100644
--- a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilities.java
+++ b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilities.java
@@ -20,6 +20,7 @@ package org.apache.hawq.pxf.plugins.hive.utilities;
  */
 
 
+import java.util.List;
 import java.util.ArrayList;
 
 import org.apache.commons.lang.StringUtils;
@@ -42,6 +43,7 @@ import org.apache.hawq.pxf.api.UnsupportedTypeException;
 public class HiveUtilities {
 
     private static final Log LOG = LogFactory.getLog(HiveUtilities.class);
+    private static final String WILDCARD = "*";
 
     /**
      * Default Hive DB (schema) name.
@@ -64,13 +66,13 @@ public class HiveUtilities {
         return client;
     }
 
-    public static Table getHiveTable(HiveMetaStoreClient client, Metadata.Table tableName)
+    public static Table getHiveTable(HiveMetaStoreClient client, Metadata.Item itemName)
             throws Exception {
-        Table tbl = client.getTable(tableName.getDbName(), tableName.getTableName());
+        Table tbl = client.getTable(itemName.getPath(), itemName.getName());
         String tblType = tbl.getTableType();
 
         if (LOG.isDebugEnabled()) {
-            LOG.debug("Table: " + tableName.getDbName() + "." + tableName.getTableName()
+ ", type: " + tblType);
+            LOG.debug("Item: " + itemName.getPath() + "." + itemName.getName() + ", type:
" + tblType);
         }
 
         if (TableType.valueOf(tblType) == TableType.VIRTUAL_VIEW) {
@@ -203,19 +205,40 @@ public class HiveUtilities {
      * It can be either <code>table_name</code> or <code>db_name.table_name</code>.
      *
      * @param qualifiedName Hive table name
-     * @return {@link org.apache.hawq.pxf.api.Metadata.Table} object holding the full table
name
+     * @return {@link Metadata.Item} object holding the full table name
      */
-    public static Metadata.Table parseTableQualifiedName(String qualifiedName) {
+    public static Metadata.Item extractTableFromName(String qualifiedName) {
+        List<Metadata.Item> items = extractTablesFromPattern(null, qualifiedName);
+        if(items.isEmpty()) {
+            return null;
+        }
+        return items.get(0);
+    }
+
+    /**
+     * Extracts the db_name and table_name from the qualifiedName.
+     * qualifiedName is the Hive table name or pattern that the user enters in the CREATE
EXTERNAL TABLE statement
+     * or when querying HCatalog table.
+     * It can be either <code>table_name_pattern</code> or <code>db_name_pattern.table_name_pattern</code>.
+     *
+     * @param pattern Hive table name or pattern
+     * @return {@link Metadata.Item} object holding the full table name
+     */
+    public static List<Metadata.Item> extractTablesFromPattern(HiveMetaStoreClient
client, String pattern) {
+
+
+        String dbPattern, tablePattern;
+
+        System.out.println(pattern);
 
-        String dbName, tableName;
         String errorMsg = " is not a valid Hive table name. "
                 + "Should be either <table_name> or <db_name.table_name>";
 
-        if (StringUtils.isBlank(qualifiedName)) {
+        if (StringUtils.isBlank(pattern)) {
             throw new IllegalArgumentException("empty string" + errorMsg);
         }
 
-        String[] rawToks = qualifiedName.split("[.]");
+        String[] rawToks = pattern.split("[.]");
         ArrayList<String> toks = new ArrayList<String>();
         for (String tok: rawToks) {
             if (StringUtils.isBlank(tok)) {
@@ -225,15 +248,53 @@ public class HiveUtilities {
         }
 
         if (toks.size() == 1) {
-            dbName = HIVE_DEFAULT_DBNAME;
-            tableName = toks.get(0);
+            dbPattern = HIVE_DEFAULT_DBNAME;
+            tablePattern = toks.get(0);
         } else if (toks.size() == 2) {
-            dbName = toks.get(0);
-            tableName = toks.get(1);
+            dbPattern = toks.get(0);
+            tablePattern = toks.get(1);
         } else {
-            throw new IllegalArgumentException("\"" + qualifiedName + "\"" + errorMsg);
+            throw new IllegalArgumentException("\"" + pattern + "\"" + errorMsg);
+        }
+
+        return getTablesFromPattern(client, dbPattern, tablePattern);
+
+    }
+
+    private static List<Metadata.Item> getTablesFromPattern(HiveMetaStoreClient client,
String dbPattern, String tablePattern) {
+
+        List<String> databases = null;
+        List<Metadata.Item> itemList = new ArrayList<Metadata.Item>();
+        List<String> tables = new ArrayList<String>();
+
+        if(client == null || (!dbPattern.contains(WILDCARD) && !tablePattern.contains(WILDCARD))
) {
+            /* This case occurs when the call is invoked as part of the fragmenter api or
when metadata is requested for a specific table name */
+            itemList.add(new Metadata.Item(dbPattern, tablePattern));
+            return itemList;
         }
 
-        return new Metadata.Table(dbName, tableName);
+        try {
+            /*if(dbPattern.contains(WILDCARD)) {
+                databases.addAll(client.getAllDatabases());
+            }*/
+            databases = client.getDatabases(dbPattern);
+            if(databases.isEmpty()) {
+                throw new IllegalArgumentException("no database found for the given pattern");
+            }
+            for(String dbName: databases) {
+                for(String tableName: client.getTables(dbName, tablePattern)) {
+                    if (!tableName.isEmpty()) {
+                        itemList.add(new Metadata.Item(dbName, tableName));
+                    }
+                }
+            }
+            if(itemList.isEmpty()) {
+                throw new IllegalArgumentException("no tables found");
+            }
+            return itemList;
+
+        } catch (MetaException cause) {
+            throw new RuntimeException("Failed connecting to Hive MetaStore service: " +
cause.getMessage(), cause);
+        }
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/854f1aa4/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveMetadataFetcherTest.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveMetadataFetcherTest.java
b/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveMetadataFetcherTest.java
index c300214..2cc8b38 100644
--- a/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveMetadataFetcherTest.java
+++ b/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveMetadataFetcherTest.java
@@ -55,7 +55,7 @@ public class HiveMetadataFetcherTest {
     HiveConf hiveConfiguration;
     HiveMetaStoreClient hiveClient;
     HiveMetadataFetcher fetcher;
-    Metadata metadata;
+    List<Metadata> metadataList;
 
     @Before
     public void SetupCompressionFactory() {
@@ -90,7 +90,7 @@ public class HiveMetadataFetcherTest {
         String tableName = "t.r.o.u.b.l.e.m.a.k.e.r";
 
         try {
-            fetcher.getTableMetadata(tableName);
+            fetcher.getMetadata(tableName);
             fail("Expected an IllegalArgumentException");
         } catch (IllegalArgumentException ex) {
             assertEquals("\"t.r.o.u.b.l.e.m.a.k.e.r\" is not a valid Hive table name. Should
be either <table_name> or <db_name.table_name>", ex.getMessage()); 
@@ -110,7 +110,7 @@ public class HiveMetadataFetcherTest {
         when(hiveClient.getTable("default", tableName)).thenReturn(hiveTable);
 
         try {
-            metadata = fetcher.getTableMetadata(tableName);
+            metadataList = fetcher.getMetadata(tableName);
             fail("Expected an UnsupportedOperationException because PXF doesn't support views");
         } catch (UnsupportedOperationException e) {
             assertEquals("Hive views are not supported by HAWQ", e.getMessage());
@@ -137,9 +137,10 @@ public class HiveMetadataFetcherTest {
         when(hiveClient.getTable("default", tableName)).thenReturn(hiveTable);
 
         // get metadata
-        metadata = fetcher.getTableMetadata(tableName);
+        metadataList = fetcher.getMetadata(tableName);
+        Metadata metadata = metadataList.get(0);
 
-        assertEquals("default.cause", metadata.getTable().toString());
+        assertEquals("default.cause", metadata.getItem().toString());
 
         List<Metadata.Field> resultFields = metadata.getFields();
         assertNotNull(resultFields);

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/854f1aa4/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilitiesTest.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilitiesTest.java
b/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilitiesTest.java
index 03cca72..466dedb 100644
--- a/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilitiesTest.java
+++ b/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilitiesTest.java
@@ -31,7 +31,7 @@ import org.apache.hawq.pxf.api.UnsupportedTypeException;
 public class HiveUtilitiesTest {
 
     FieldSchema hiveColumn;
-    Metadata.Table tblDesc;
+    Metadata.Item tblDesc;
 
     static String[][] typesMappings = {
         /* hive type -> hawq type */
@@ -154,19 +154,19 @@ public class HiveUtilitiesTest {
     @Test
     public void parseTableQualifiedNameNoDbName() throws Exception {
         String name = "orphan";
-        tblDesc = HiveUtilities.parseTableQualifiedName(name);
+        tblDesc = HiveUtilities.extractTableFromName(name);
 
-        assertEquals("default", tblDesc.getDbName());
-        assertEquals(name, tblDesc.getTableName());
+        assertEquals("default", tblDesc.getPath());
+        assertEquals(name, tblDesc.getName());
     }
 
     @Test
     public void parseTableQualifiedName() throws Exception {
         String name = "not.orphan";
-        tblDesc = HiveUtilities.parseTableQualifiedName(name);
+        tblDesc = HiveUtilities.extractTableFromName(name);
 
-        assertEquals("not", tblDesc.getDbName());
-        assertEquals("orphan", tblDesc.getTableName());
+        assertEquals("not", tblDesc.getPath());
+        assertEquals("orphan", tblDesc.getName());
     }
 
     @Test
@@ -206,7 +206,7 @@ public class HiveUtilitiesTest {
 
     private void parseTableQualifiedNameNegative(String name, String errorMsg, String reason)
throws Exception {
         try {
-            tblDesc = HiveUtilities.parseTableQualifiedName(name);
+            tblDesc = HiveUtilities.extractTableFromName(name);
             fail("test should fail because of " + reason);
         } catch (IllegalArgumentException e) {
             assertEquals(errorMsg, e.getMessage());

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/854f1aa4/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataFetcherFactory.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataFetcherFactory.java
b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataFetcherFactory.java
index c059f59..a0c0f05 100644
--- a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataFetcherFactory.java
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataFetcherFactory.java
@@ -28,7 +28,14 @@ import org.apache.hawq.pxf.api.MetadataFetcher;
  * abstract class which is returned by the MetadataFetcherFactory. 
  */
 public class MetadataFetcherFactory {
-    static public MetadataFetcher create(String fetcherName) throws Exception {
-        return (MetadataFetcher) Class.forName(fetcherName).newInstance();
+    public static MetadataFetcher create(String profile) throws Exception {
+        String fetcherClass;
+        /* TODO: The metadata class will be moved to the pxf-profile.xml in the future */
+        if ( "hcatalog".equals(profile) || "hive".equals(profile) ) {
+            fetcherClass = "org.apache.hawq.pxf.plugins.hive.HiveMetadataFetcher";
+        } else {
+            throw new IllegalArgumentException("Metadata access for profile " + profile +
" not supported");
+        }
+        return (MetadataFetcher) Class.forName(fetcherClass).newInstance();
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/854f1aa4/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponseFormatter.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponseFormatter.java
b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponseFormatter.java
index eb83627..cb69f94 100644
--- a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponseFormatter.java
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponseFormatter.java
@@ -21,6 +21,8 @@ package org.apache.hawq.pxf.service;
 
 
 import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -39,15 +41,28 @@ public class MetadataResponseFormatter {
     /**
      * Converts {@link Metadata} to JSON String format.
      *
-     * @param metadata metadata to convert
+     * @param metadata metadata objects to convert
      * @return JSON formatted response
      * @throws IOException if converting the data to JSON fails
      */
     public static String formatResponseString(Metadata metadata) throws IOException {
+        List<Metadata> metadataList = new ArrayList<Metadata>();
+        metadataList.add(metadata);
+        return formatResponseString(metadataList);
+    }
+
+    /**
+     * Converts list of {@link Metadata} to JSON String format.
+     *
+     * @param metadataList list of metadata objects to convert
+     * @return JSON formatted response
+     * @throws IOException if converting the data to JSON fails
+     */
+    public static String formatResponseString(List<Metadata> metadataList) throws IOException
{
         /* print the metadata before serialization */
-        LOG.debug(MetadataResponseFormatter.metadataToString(metadata));
+        LOG.debug(MetadataResponseFormatter.metadataToString(metadataList));
 
-        return MetadataResponseFormatter.metadataToJSON(metadata);
+        return MetadataResponseFormatter.metadataToJSON(metadataList);
     }
 
     /**
@@ -55,50 +70,65 @@ public class MetadataResponseFormatter {
      * To be used as the result string for HAWQ.
      * An example result is as follows:
      *
-     * {"PXFMetadata":[{"table":{"dbName":"default","tableName":"t1"},"fields":[{"name":"a","type":"int"},{"name":"b","type":"float"}]}]}
+     * {"PXFMetadata":[{"item":{"path":"default","name":"t1"},"fields":[{"name":"a","type":"int"},{"name":"b","type":"float"}]}]}
      */
-    private static String metadataToJSON(Metadata metadata) throws IOException {
+    private static String metadataToJSON(List<Metadata> metadataList) throws IOException
{
 
-        if (metadata == null) {
-            throw new IllegalArgumentException("metadata object is null - cannot serialize");
+        if (metadataList == null || metadataList.isEmpty()) {
+            throw new IllegalArgumentException("no metadata objects found - cannot serialize");
         }
 
-        if ((metadata.getFields() == null) || metadata.getFields().isEmpty()) {
-            throw new IllegalArgumentException("metadata contains no fields - cannot serialize");
+        StringBuilder result = null;
+
+        for(Metadata metadata: metadataList) {
+            if(metadata == null) {
+                throw new IllegalArgumentException("metadata object is null - cannot serialize");
+            }
+            if ((metadata.getFields() == null) || metadata.getFields().isEmpty()) {
+                throw new IllegalArgumentException("metadata contains no fields - cannot
serialize");
+            }
+            if (result == null) {
+                result = new StringBuilder("{\"PXFMetadata\":["); /* prefix info */
+            } else {
+                result.append(",");
+            }
+
+            ObjectMapper mapper = new ObjectMapper();
+            mapper.setSerializationInclusion(Inclusion.NON_EMPTY); // ignore empty fields
+            result.append(mapper.writeValueAsString(metadata));
         }
 
-        ObjectMapper mapper = new ObjectMapper();
-        mapper.setSerializationInclusion(Inclusion.NON_EMPTY); // ignore empty fields
+        return result.append("]}").toString(); /* append suffix info */
 
-        StringBuilder result = new StringBuilder("{\"PXFMetadata\":");
-        String prefix = "["; // preparation for supporting multiple tables
-        result.append(prefix).append(mapper.writeValueAsString(metadata));
-        return result.append("]}").toString();
     }
 
     /**
-     * Converts metadata to a readable string.
+     * Converts metadata list to a readable string.
      * Intended for debugging purposes only.
      */
-    private static String metadataToString(Metadata metadata) {
-        StringBuilder result = new StringBuilder("Metadata for table \"");
+    private static String metadataToString(List<Metadata> metadataList) {
+        StringBuilder result = new StringBuilder("Metadata:");
 
-        if (metadata == null) {
-            return "No metadata";
-        }
+        for(Metadata metadata: metadataList) {
+            result.append(" Metadata for item \"");
 
-        result.append(metadata.getTable()).append("\": ");
+            if (metadata == null) {
+                return "No metadata";
+            }
 
-        if ((metadata.getFields() == null) || metadata.getFields().isEmpty()) {
-            result.append("no fields in table");
-            return result.toString();
-        }
+            result.append(metadata.getItem()).append("\": ");
+
+            if ((metadata.getFields() == null) || metadata.getFields().isEmpty()) {
+                result.append("no fields in item");
+                return result.toString();
+            }
 
-        int i = 0;
-        for (Metadata.Field field: metadata.getFields()) {
-            result.append("Field #").append(++i).append(": [")
-                .append("Name: ").append(field.getName())
-                .append(", Type: ").append(field.getType()).append("] ");
+            int i = 0;
+            for (Metadata.Field field : metadata.getFields()) {
+                result.append("Field #").append(++i).append(": [")
+                        .append("Name: ").append(field.getName())
+                        .append(", Type: ").append(field.getType()).append("] ");
+            }
         }
 
         return result.toString();

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/854f1aa4/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/MetadataResource.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/MetadataResource.java
b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/MetadataResource.java
index 12a1904..a080f18 100644
--- a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/MetadataResource.java
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/MetadataResource.java
@@ -20,6 +20,7 @@ package org.apache.hawq.pxf.service.rest;
  */
 
 import java.io.IOException;
+import java.util.List;
 
 import javax.servlet.ServletContext;
 import javax.ws.rs.GET;
@@ -56,51 +57,44 @@ public class MetadataResource extends RestResource {
     }
 
     /**
-     * This function queries the HiveMetaStore to get the given table's
-     * metadata: Table name, field names, field types. The types are converted
-     * from HCatalog types to HAWQ types. Supported HCatalog types: TINYINT,
-     * SMALLINT, INT, BIGINT, BOOLEAN, FLOAT, DOUBLE, STRING, BINARY, TIMESTAMP,
-     * DATE, DECIMAL, VARCHAR, CHAR. <br>
+     * This function queries the underlying store based on the given profile to get schema
for the given item(s)
+     * metadata: Item name, field names, field types. The types are converted
+     * from the underlying types to HAWQ types.
      * Unsupported types result in an error. <br>
      * Response Examples:<br>
      * For a table <code>default.t1</code> with 2 fields (a int, b float) will
      * be returned as:
-     * <code>{"PXFMetadata":[{"table":{"dbName":"default","tableName":"t1"},"fields":[{"name":"a","type":"int"},{"name":"b","type":"float"}]}]}</code>
+     * <code>{"PXFMetadata":[{"item":{"path":"default","name":"t1"},"fields":[{"name":"a","type":"int"},{"name":"b","type":"float"}]}]}</code>
      *
      * @param servletContext servlet context
      * @param headers http headers
-     * @param table HCatalog table name
-     * @return JSON formatted response with metadata for given table
-     * @throws Exception if connection to Hcatalog failed, table didn't exist or
+     * @param profile based on this the metadata source can be inferred
+     * @param item table/file name or pattern in the given source
+     * @return JSON formatted response with metadata for given item(s)
+     * @throws Exception if connection to the source/catalog failed, item didn't exist or
      *             its type or fields are not supported
      */
     @GET
-    @Path("getTableMetadata")
+    @Path("getMetadata")
     @Produces("application/json")
     public Response read(@Context final ServletContext servletContext,
                          @Context final HttpHeaders headers,
-                         @QueryParam("table") final String table)
+                         @QueryParam("profile") final String profile,
+                         @QueryParam("item") final String item)
             throws Exception {
-        LOG.debug("getTableMetadata started");
+        LOG.debug("getMetadata started");
         String jsonOutput;
         try {
             // 1. start MetadataFetcher
-            MetadataFetcher metadataFetcher = MetadataFetcherFactory.create("org.apache.hawq.pxf.plugins.hive.HiveMetadataFetcher");
// TODO:
-                                                                                        
                                            // nhorn
-                                                                                        
                                            // -
-                                                                                        
                                            // 09-03-15
-                                                                                        
                                            // -
-                                                                                        
                                            // pass
-                                                                                        
                                            // as
-                                                                                        
                                            // param
+            MetadataFetcher metadataFetcher = MetadataFetcherFactory.create(profile);
 
             // 2. get Metadata
-            Metadata metadata = metadataFetcher.getTableMetadata(table);
+            List<Metadata> metadata = metadataFetcher.getMetadata(item);
 
             // 3. serialize to JSON
             jsonOutput = MetadataResponseFormatter.formatResponseString(metadata);
 
-            LOG.debug("getTableMetadata output: " + jsonOutput);
+            LOG.debug("getMetadata output: " + jsonOutput);
 
         } catch (ClientAbortException e) {
             LOG.error("Remote connection closed by HAWQ", e);

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/854f1aa4/pxf/pxf-service/src/main/resources/pxf-profiles-default.xml
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/resources/pxf-profiles-default.xml b/pxf/pxf-service/src/main/resources/pxf-profiles-default.xml
index 5b57d21..4664345 100644
--- a/pxf/pxf-service/src/main/resources/pxf-profiles-default.xml
+++ b/pxf/pxf-service/src/main/resources/pxf-profiles-default.xml
@@ -53,7 +53,7 @@ under the License.
     </profile>
     <profile>
         <name>HiveRC</name>
-        <description>This profile is suitable only for Hive tables stored in RC files
+        <description>This profile is suitable only for Hive items stored in RC files
             and serialized with either the ColumnarSerDe or the LazyBinaryColumnarSerDe.
             It is much faster than the general purpose Hive profile.
             DELIMITER parameter is mandatory.
@@ -66,7 +66,7 @@ under the License.
     </profile>
     <profile>
         <name>HiveText</name>
-        <description>This profile is suitable only for Hive tables stored as Text files.
+        <description>This profile is suitable only for Hive items stored as Text files.
             It is much faster than the general purpose Hive profile.
             DELIMITER parameter is mandatory.
         </description>

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/854f1aa4/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/MetadataResponseFormatterTest.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/MetadataResponseFormatterTest.java
b/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/MetadataResponseFormatterTest.java
index a47952a..bea8067 100644
--- a/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/MetadataResponseFormatterTest.java
+++ b/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/MetadataResponseFormatterTest.java
@@ -36,14 +36,14 @@ public class MetadataResponseFormatterTest {
     @Test
     public void formatResponseString() throws Exception {
         List<Metadata.Field> fields = new ArrayList<Metadata.Field>();
-        Metadata.Table tableName = new Metadata.Table("default", "table1");
-        Metadata metadata = new Metadata(tableName, fields);
+        Metadata.Item itemName = new Metadata.Item("default", "table1");
+        Metadata metadata = new Metadata(itemName, fields);
         fields.add(new Metadata.Field("field1", "int"));
         fields.add(new Metadata.Field("field2", "text"));
 
         result = MetadataResponseFormatter.formatResponseString(metadata);
         String expected = "{\"PXFMetadata\":[{"
-                + "\"table\":{\"dbName\":\"default\",\"tableName\":\"table1\"},"
+                + "\"item\":{\"path\":\"default\",\"name\":\"table1\"},"
                 + "\"fields\":[{\"name\":\"field1\",\"type\":\"int\"},{\"name\":\"field2\",\"type\":\"text\"}]}]}";
 
         assertEquals(expected, result);
@@ -52,14 +52,14 @@ public class MetadataResponseFormatterTest {
     @Test
     public void formatResponseStringWithNullModifier() throws Exception {
         List<Metadata.Field> fields = new ArrayList<Metadata.Field>();
-        Metadata.Table tableName = new Metadata.Table("default", "table1");
-        Metadata metadata = new Metadata(tableName, fields);
+        Metadata.Item itemName = new Metadata.Item("default", "table1");
+        Metadata metadata = new Metadata(itemName, fields);
         fields.add(new Metadata.Field("field1", "int", null));
         fields.add(new Metadata.Field("field2", "text", new String[] {}));
 
         result = MetadataResponseFormatter.formatResponseString(metadata);
         String expected = "{\"PXFMetadata\":[{"
-                + "\"table\":{\"dbName\":\"default\",\"tableName\":\"table1\"},"
+                + "\"item\":{\"path\":\"default\",\"name\":\"table1\"},"
                 + "\"fields\":[{\"name\":\"field1\",\"type\":\"int\"},{\"name\":\"field2\",\"type\":\"text\"}]}]}";
 
         assertEquals(expected, result);
@@ -68,8 +68,8 @@ public class MetadataResponseFormatterTest {
     @Test
     public void formatResponseStringWithModifiers() throws Exception {
         List<Metadata.Field> fields = new ArrayList<Metadata.Field>();
-        Metadata.Table tableName = new Metadata.Table("default", "table1");
-        Metadata metadata = new Metadata(tableName, fields);
+        Metadata.Item itemName = new Metadata.Item("default", "table1");
+        Metadata metadata = new Metadata(itemName, fields);
         fields.add(new Metadata.Field("field1", "int"));
         fields.add(new Metadata.Field("field2", "numeric",
                 new String[] {"1349", "1789"}));
@@ -78,7 +78,7 @@ public class MetadataResponseFormatterTest {
 
         result = MetadataResponseFormatter.formatResponseString(metadata);
         String expected = "{\"PXFMetadata\":[{"
-                + "\"table\":{\"dbName\":\"default\",\"tableName\":\"table1\"},"
+                + "\"item\":{\"path\":\"default\",\"name\":\"table1\"},"
                 + "\"fields\":["
                 + "{\"name\":\"field1\",\"type\":\"int\"},"
                 + "{\"name\":\"field2\",\"type\":\"numeric\",\"modifiers\":[\"1349\",\"1789\"]},"
@@ -102,8 +102,8 @@ public class MetadataResponseFormatterTest {
 
     @Test
     public void formatResponseStringNoFields() throws Exception {
-        Metadata.Table tableName = new Metadata.Table("default", "table1");
-        Metadata metadata = new Metadata(tableName, null);
+        Metadata.Item itemName = new Metadata.Item("default", "table1");
+        Metadata metadata = new Metadata(itemName, null);
 
         try {
             result = MetadataResponseFormatter.formatResponseString(metadata);
@@ -113,7 +113,7 @@ public class MetadataResponseFormatterTest {
         }
 
         ArrayList<Metadata.Field> fields = new ArrayList<Metadata.Field>();
-        metadata = new Metadata(tableName, fields);
+        metadata = new Metadata(itemName, fields);
 
         try {
             result = MetadataResponseFormatter.formatResponseString(metadata);

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/854f1aa4/pxf/src/scripts/pxf_manual_failover.py
----------------------------------------------------------------------
diff --git a/pxf/src/scripts/pxf_manual_failover.py b/pxf/src/scripts/pxf_manual_failover.py
index a9f51e5..4668ac2 100755
--- a/pxf/src/scripts/pxf_manual_failover.py
+++ b/pxf/src/scripts/pxf_manual_failover.py
@@ -17,7 +17,7 @@
 # under the License.
 
 # pxf_manual_failover.py
-# This python script will adapt the PXF external tables to the new NameNode in case
+# This python script will adapt the PXF external items to the new NameNode in case
 # of High Availability manual failover. 
 # The script receives as input the new namenode host and then goes over each external
 # table entry in the catalog table pg_exttable and updates the LOCATION field - 



Mime
View raw message