hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ga...@apache.org
Subject hive git commit: HIVE-15401 Import constraints into HBase metastore (Alan Gates, reviewed by Daniel Dai)
Date Mon, 12 Dec 2016 22:38:00 GMT
Repository: hive
Updated Branches:
  refs/heads/master d139667ad -> 4f2fd77b7


HIVE-15401 Import constraints into HBase metastore (Alan Gates, reviewed by Daniel Dai)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/4f2fd77b
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/4f2fd77b
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/4f2fd77b

Branch: refs/heads/master
Commit: 4f2fd77b78ca6a81defde03889ab4c79ce5896b5
Parents: d139667
Author: Alan Gates <gates@hortonworks.com>
Authored: Mon Dec 12 14:37:14 2016 -0800
Committer: Alan Gates <gates@hortonworks.com>
Committed: Mon Dec 12 14:37:14 2016 -0800

----------------------------------------------------------------------
 .../hive/metastore/hbase/TestHBaseImport.java   | 110 ++++++++++++++++++-
 .../apache/hadoop/hive/metastore/RawStore.java  |  12 ++
 .../hive/metastore/hbase/HBaseImport.java       |  19 ++++
 .../hadoop/hive/metastore/hbase/HBaseStore.java |   6 +-
 .../hadoop/hive/metastore/TestObjectStore.java  |  21 ++++
 .../hive/metastore/hbase/TestHBaseStore.java    |  84 ++++++++------
 6 files changed, 208 insertions(+), 44 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/4f2fd77b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseImport.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseImport.java
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseImport.java
index 21f851e..b1d3174 100644
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseImport.java
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseImport.java
@@ -18,6 +18,8 @@
  */
 package org.apache.hadoop.hive.metastore.hbase;
 
+import org.apache.hadoop.hive.metastore.api.SQLForeignKey;
+import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.metastore.ObjectStore;
@@ -50,6 +52,7 @@ import org.junit.rules.ExpectedException;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.Collections;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
@@ -65,6 +68,8 @@ public class TestHBaseImport extends HBaseIntegrationTests {
   private static final String[] partVals = new String[] {"na", "emea", "latam", "apac"};
   private static final String[] funcNames = new String[] {"allfunc1", "allfunc2"};
   private static final String[] indexNames = new String[] {"allindex1", "allindex2"};
+  private static final String[] pkNames = new String[] {"allnonparttable_pk", "allparttable_pk"};
+  private static final String[] fkNames = new String[] {"", "allparttable_fk"};
 
   private static final List<Integer> masterKeySeqs = new ArrayList<Integer>();
   @Rule
@@ -345,6 +350,64 @@ public class TestHBaseImport extends HBaseIntegrationTests {
   }
 
   @Test
+  public void importTablesWithConstraints() throws Exception {
+    RawStore rdbms;
+    rdbms = new ObjectStore();
+    rdbms.setConf(conf);
+
+    String[] dbNames = new String[] {"onetabwcdb1", "onetabwcdb2"};
+    int now = (int)System.currentTimeMillis() / 1000;
+
+    setupObjectStore(rdbms, dbNames, now, true);
+
+    // Create the database so I can put the table in it.
+    store.createDatabase(
+        new Database(dbNames[0], "no description", "file:/tmp", emptyParameters));
+
+    HBaseImport importer = new HBaseImport("-d", dbNames[0]);
+    importer.setConnections(rdbms, store);
+    importer.run();
+
+    Database db = store.getDatabase(dbNames[0]);
+    Assert.assertNotNull(db);
+
+    Table table = store.getTable(db.getName(), tableNames[1]);
+    Assert.assertNotNull(table);
+
+    List<SQLPrimaryKey> pk = store.getPrimaryKeys(dbNames[0], tableNames[1]);
+    Assert.assertNotNull(pk);
+    Assert.assertEquals(1, pk.size());
+    Assert.assertEquals(dbNames[0], pk.get(0).getTable_db());
+    Assert.assertEquals(tableNames[1], pk.get(0).getTable_name());
+    Assert.assertEquals(0, pk.get(0).getKey_seq());
+    Assert.assertEquals("col1", pk.get(0).getColumn_name());
+    Assert.assertEquals(dbNames[0] + "_" + pkNames[1], pk.get(0).getPk_name());
+    Assert.assertTrue(pk.get(0).isEnable_cstr());
+    Assert.assertFalse(pk.get(0).isValidate_cstr());
+    Assert.assertTrue(pk.get(0).isRely_cstr());
+
+    List<SQLForeignKey> fk =
+        store.getForeignKeys(dbNames[0], tableNames[0], dbNames[0], tableNames[1]);
+    Assert.assertNotNull(fk);
+    Assert.assertEquals(1, fk.size());
+    Assert.assertEquals(dbNames[0], fk.get(0).getPktable_db());
+    Assert.assertEquals(tableNames[0], fk.get(0).getPktable_name());
+    Assert.assertEquals("col1", fk.get(0).getPkcolumn_name());
+    Assert.assertEquals(dbNames[0], fk.get(0).getFktable_db());
+    Assert.assertEquals(tableNames[1], fk.get(0).getFktable_name());
+    Assert.assertEquals("col1", fk.get(0).getFkcolumn_name());
+    Assert.assertEquals(0, fk.get(0).getKey_seq());
+    Assert.assertEquals(1, fk.get(0).getUpdate_rule());
+    Assert.assertEquals(2, fk.get(0).getDelete_rule());
+    Assert.assertEquals(dbNames[0] + "_" + fkNames[1], fk.get(0).getFk_name());
+    Assert.assertTrue(pk.get(0).isEnable_cstr());
+    Assert.assertFalse(pk.get(0).isValidate_cstr());
+    Assert.assertTrue(pk.get(0).isRely_cstr());
+
+
+  }
+
+  @Test
   public void importOneTablePartitioned() throws Exception {
     RawStore rdbms;
     rdbms = new ObjectStore();
@@ -492,8 +555,23 @@ public class TestHBaseImport extends HBaseIntegrationTests {
   private void setupObjectStore(RawStore rdbms, String[] roles, String[] dbNames,
                                 String[] tokenIds, String[] tokens, String[] masterKeys,
int now)
       throws MetaException, InvalidObjectException, NoSuchObjectException {
-    for (int i = 0; i < roles.length; i++) {
-      rdbms.addRole(roles[i], "me");
+    setupObjectStore(rdbms, roles, dbNames, tokenIds, tokens, masterKeys, now, false);
+  }
+
+  private void setupObjectStore(RawStore rdbms, String[] dbNames, int now,
+                                boolean putConstraintsOnTables)
+      throws MetaException, InvalidObjectException, NoSuchObjectException {
+    setupObjectStore(rdbms, null, dbNames, null, null, null, now, putConstraintsOnTables);
+  }
+
+  private void setupObjectStore(RawStore rdbms, String[] roles, String[] dbNames,
+                                String[] tokenIds, String[] tokens, String[] masterKeys,
int now,
+                                boolean putConstraintsOnTables)
+      throws MetaException, InvalidObjectException, NoSuchObjectException {
+    if (roles != null) {
+      for (int i = 0; i < roles.length; i++) {
+        rdbms.addRole(roles[i], "me");
+      }
     }
 
     for (int i = 0; i < dbNames.length; i++) {
@@ -507,11 +585,29 @@ public class TestHBaseImport extends HBaseIntegrationTests {
           serde, null, null, emptyParameters);
       rdbms.createTable(new Table(tableNames[0], dbNames[i], "me", now, now, 0, sd, null,
           emptyParameters, null, null, null));
+      if (putConstraintsOnTables) {
+        rdbms.addPrimaryKeys(Collections.singletonList(
+            new SQLPrimaryKey(dbNames[i], tableNames[0], "col1", 0, dbNames[i] + "_" + pkNames[0],
+                true, false, true)
+        ));
+      }
 
       List<FieldSchema> partCols = new ArrayList<>();
       partCols.add(new FieldSchema("region", "string", ""));
       rdbms.createTable(new Table(tableNames[1], dbNames[i], "me", now, now, 0, sd, partCols,
           emptyParameters, null, null, null));
+      if (putConstraintsOnTables) {
+        rdbms.addPrimaryKeys(Arrays.asList(
+            new SQLPrimaryKey(dbNames[i], tableNames[1], "col1", 0, dbNames[i] + "_" + pkNames[1],
+                true, false, true)
+        ));
+        rdbms.addForeignKeys(Collections.singletonList(
+            new SQLForeignKey(dbNames[i], tableNames[0], "col1", dbNames[i], tableNames[1],
+                "col1", 0, 1, 2, dbNames[i] + "_" + fkNames[1], dbNames[i] + "_" + pkNames[0],
+                true, false, true)
+        ));
+
+      }
 
       for (int j = 0; j < partVals.length; j++) {
         StorageDescriptor psd = new StorageDescriptor(sd);
@@ -537,9 +633,13 @@ public class TestHBaseImport extends HBaseIntegrationTests {
             now, now, indexTableName, sd, emptyParameters, false));
       }
     }
-    for (int i = 0; i < tokenIds.length; i++) rdbms.addToken(tokenIds[i], tokens[i]);
-    for (int i = 0; i < masterKeys.length; i++) {
-      masterKeySeqs.add(rdbms.addMasterKey(masterKeys[i]));
+    if (tokenIds != null) {
+      for (int i = 0; i < tokenIds.length; i++) rdbms.addToken(tokenIds[i], tokens[i]);
+    }
+    if (masterKeys != null) {
+      for (int i = 0; i < masterKeys.length; i++) {
+        masterKeySeqs.add(rdbms.addMasterKey(masterKeys[i]));
+      }
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/4f2fd77b/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java
index a3dd4e5..6f4f031 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java
@@ -672,6 +672,18 @@ public interface RawStore extends Configurable {
   public abstract List<SQLPrimaryKey> getPrimaryKeys(String db_name,
     String tbl_name) throws MetaException;
 
+  /**
+   * Get the foreign keys for a table.  All foreign keys for a particular table can be fetched
by
+   * passing null for the last two arguments.
+   * @param parent_db_name Database the table referred to is in.  This can be null to match
all
+   *                       databases.
+   * @param parent_tbl_name Table that is referred to.  This can be null to match all tables.
+   * @param foreign_db_name Database the table with the foreign key is in.
+   * @param foreign_tbl_name Table with the foreign key.
+   * @return List of all matching foreign key columns.  Note that if more than one foreign
key
+   * matches the arguments the results here will be all mixed together into a single list.
+   * @throws MetaException if something goes wrong.
+   */
   public abstract List<SQLForeignKey> getForeignKeys(String parent_db_name,
     String parent_tbl_name, String foreign_db_name, String foreign_tbl_name)
     throws MetaException;

http://git-wip-us.apache.org/repos/asf/hive/blob/4f2fd77b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseImport.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseImport.java b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseImport.java
index b005b4e..5f89769 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseImport.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseImport.java
@@ -27,6 +27,8 @@ import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
 import org.apache.commons.cli.ParseException;
 import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.hive.metastore.api.SQLForeignKey;
+import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -368,6 +370,23 @@ public class HBaseImport {
             }
             screen("Copying table " + name[0] + "." + name[1]);
             hbaseStore.get().createTable(table);
+
+            // See if the table has any constraints, and if so copy those as well
+            List<SQLPrimaryKey> pk =
+                rdbmsStore.get().getPrimaryKeys(table.getDbName(), table.getTableName());
+            if (pk != null && pk.size() > 0) {
+              LOG.debug("Found primary keys, adding them");
+              hbaseStore.get().addPrimaryKeys(pk);
+            }
+
+            // Passing null as the target table name results in all of the foreign keys being
+            // retrieved.
+            List<SQLForeignKey> fks =
+                rdbmsStore.get().getForeignKeys(null, null, table.getDbName(), table.getTableName());
+            if (fks != null && fks.size() > 0) {
+              LOG.debug("Found foreign keys, adding them");
+              hbaseStore.get().addForeignKeys(fks);
+            }
           }
         } catch (InterruptedException | MetaException | InvalidObjectException e) {
           throw new RuntimeException(e);

http://git-wip-us.apache.org/repos/asf/hive/blob/4f2fd77b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java
index 07cc0da..6593fa6 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java
@@ -2713,12 +2713,12 @@ public class HBaseStore implements RawStore {
     boolean commit = false;
     openTransaction();
     try {
-      List<SQLForeignKey> fks = getHBase().getForeignKeys(parent_db_name, parent_tbl_name);
+      List<SQLForeignKey> fks = getHBase().getForeignKeys(foreign_db_name, foreign_tbl_name);
       if (fks == null || fks.size() == 0) return null;
       List<SQLForeignKey> result = new ArrayList<>(fks.size());
       for (SQLForeignKey fkcol : fks) {
-        if (fkcol.getFktable_db().equals(parent_db_name) &&
-            fkcol.getFktable_name().equals(parent_tbl_name)) {
+        if ((parent_db_name == null || fkcol.getPktable_db().equals(parent_db_name)) &&
+            (parent_tbl_name == null || fkcol.getPktable_name().equals(parent_tbl_name)))
{
           result.add(fkcol);
         }
       }

http://git-wip-us.apache.org/repos/asf/hive/blob/4f2fd77b/metastore/src/test/org/apache/hadoop/hive/metastore/TestObjectStore.java
----------------------------------------------------------------------
diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/TestObjectStore.java b/metastore/src/test/org/apache/hadoop/hive/metastore/TestObjectStore.java
index 0497159..aef1149 100644
--- a/metastore/src/test/org/apache/hadoop/hive/metastore/TestObjectStore.java
+++ b/metastore/src/test/org/apache/hadoop/hive/metastore/TestObjectStore.java
@@ -19,7 +19,9 @@ package org.apache.hadoop.hive.metastore;
 
 import java.util.Arrays;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.List;
+import java.util.Set;
 
 import org.apache.hadoop.hive.common.metrics.common.MetricsConstant;
 import org.apache.hadoop.hive.common.metrics.common.MetricsFactory;
@@ -39,6 +41,8 @@ import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
 import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.hadoop.hive.metastore.api.PrincipalType;
 import org.apache.hadoop.hive.metastore.api.Role;
+import org.apache.hadoop.hive.metastore.api.SQLForeignKey;
+import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey;
 import org.apache.hadoop.hive.metastore.api.SerDeInfo;
 import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
 import org.apache.hadoop.hive.metastore.api.Table;
@@ -323,6 +327,23 @@ public class TestObjectStore {
           for (Partition part : parts) {
             store.dropPartition(db, tbl, part.getValues());
           }
+          // Find any constraints and drop them
+          Set<String> constraints = new HashSet<>();
+          List<SQLPrimaryKey> pk = store.getPrimaryKeys(db, tbl);
+          if (pk != null) {
+            for (SQLPrimaryKey pkcol : pk) {
+              constraints.add(pkcol.getPk_name());
+            }
+          }
+          List<SQLForeignKey> fks = store.getForeignKeys(null, null, db, tbl);
+          if (fks != null) {
+            for (SQLForeignKey fkcol : fks) {
+              constraints.add(fkcol.getFk_name());
+            }
+          }
+          for (String constraint : constraints) {
+            store.dropConstraint(db, tbl, constraint);
+          }
           store.dropTable(db, tbl);
         }
         store.dropDatabase(db);

http://git-wip-us.apache.org/repos/asf/hive/blob/4f2fd77b/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestHBaseStore.java
----------------------------------------------------------------------
diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestHBaseStore.java
b/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestHBaseStore.java
index fb0a8e7..a34f8ac 100644
--- a/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestHBaseStore.java
+++ b/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestHBaseStore.java
@@ -1445,7 +1445,7 @@ public class TestHBaseStore {
 
     store.createTableWithConstraints(table, null, fk);
 
-    fk = store.getForeignKeys(DB, tableName, DB, pkTable);
+    fk = store.getForeignKeys(DB, pkTable, DB, tableName);
 
     Assert.assertNotNull(fk);
     Assert.assertEquals(1, fk.size());
@@ -1524,7 +1524,7 @@ public class TestHBaseStore {
     store.createTable(table);
     store.addForeignKeys(fk);
 
-    fk = store.getForeignKeys(DB, tableName, DB, pkTable);
+    fk = store.getForeignKeys(DB, pkTable, DB, tableName);
 
     Assert.assertNotNull(fk);
     Assert.assertEquals(2, fk.size());
@@ -1586,10 +1586,10 @@ public class TestHBaseStore {
     store.createTable(table);
     store.addForeignKeys(fk);
 
-    fk = store.getForeignKeys(DB, tableName, DB, pkTable);
+    fk = store.getForeignKeys(DB, pkTable, DB, tableName);
 
     Assert.assertNotNull(fk);
-    Assert.assertEquals(3, fk.size());
+    Assert.assertEquals(2, fk.size());
     SQLForeignKey[] sorted = fk.toArray(new SQLForeignKey[2]);
     Arrays.sort(sorted, new Comparator<SQLForeignKey>() {
       @Override
@@ -1618,20 +1618,23 @@ public class TestHBaseStore {
       Assert.assertFalse(sorted[i].isValidate_cstr());
       Assert.assertTrue(sorted[i].isRely_cstr());
     }
-    Assert.assertEquals(DB, sorted[2].getPktable_db());
-    Assert.assertEquals(pkTable2, sorted[2].getPktable_name());
-    Assert.assertEquals(pkColNames2[0], sorted[2].getPkcolumn_name());
-    Assert.assertEquals(DB, sorted[2].getFktable_db());
-    Assert.assertEquals(tableName, sorted[2].getFktable_name());
-    Assert.assertEquals(fkColNames[0], sorted[2].getFkcolumn_name());
-    Assert.assertEquals(0, sorted[2].getKey_seq());
-    Assert.assertEquals(1, sorted[2].getUpdate_rule());
-    Assert.assertEquals(2, sorted[2].getDelete_rule());
-    Assert.assertEquals(fkName2, sorted[2].getFk_name());
-    Assert.assertEquals(pkName2, sorted[2].getPk_name());
-    Assert.assertTrue(sorted[2].isEnable_cstr());
-    Assert.assertFalse(sorted[2].isValidate_cstr());
-    Assert.assertTrue(sorted[2].isRely_cstr());
+    fk = store.getForeignKeys(DB, pkTable2, DB, tableName);
+    Assert.assertNotNull(fk);
+    Assert.assertEquals(1, fk.size());
+    Assert.assertEquals(DB, fk.get(0).getPktable_db());
+    Assert.assertEquals(pkTable2, fk.get(0).getPktable_name());
+    Assert.assertEquals(pkColNames2[0], fk.get(0).getPkcolumn_name());
+    Assert.assertEquals(DB, fk.get(0).getFktable_db());
+    Assert.assertEquals(tableName, fk.get(0).getFktable_name());
+    Assert.assertEquals(fkColNames[0], fk.get(0).getFkcolumn_name());
+    Assert.assertEquals(0, fk.get(0).getKey_seq());
+    Assert.assertEquals(1, fk.get(0).getUpdate_rule());
+    Assert.assertEquals(2, fk.get(0).getDelete_rule());
+    Assert.assertEquals(fkName2, fk.get(0).getFk_name());
+    Assert.assertEquals(pkName2, fk.get(0).getPk_name());
+    Assert.assertTrue(fk.get(0).isEnable_cstr());
+    Assert.assertFalse(fk.get(0).isValidate_cstr());
+    Assert.assertTrue(fk.get(0).isRely_cstr());
 
   }
 
@@ -1666,10 +1669,10 @@ public class TestHBaseStore {
     );
     store.addForeignKeys(fk);
 
-    fk = store.getForeignKeys(DB, tableName, DB, pkTable);
+    fk = store.getForeignKeys(DB, pkTable, DB, tableName);
 
     Assert.assertNotNull(fk);
-    Assert.assertEquals(3, fk.size());
+    Assert.assertEquals(2, fk.size());
     SQLForeignKey[] sorted = fk.toArray(new SQLForeignKey[2]);
     Arrays.sort(sorted, new Comparator<SQLForeignKey>() {
       @Override
@@ -1698,24 +1701,33 @@ public class TestHBaseStore {
       Assert.assertFalse(sorted[i].isValidate_cstr());
       Assert.assertTrue(sorted[i].isRely_cstr());
     }
-    Assert.assertEquals(DB, sorted[2].getPktable_db());
-    Assert.assertEquals(pkTable2, sorted[2].getPktable_name());
-    Assert.assertEquals(pkColNames2[0], sorted[2].getPkcolumn_name());
-    Assert.assertEquals(DB, sorted[2].getFktable_db());
-    Assert.assertEquals(tableName, sorted[2].getFktable_name());
-    Assert.assertEquals(fkColNames[0], sorted[2].getFkcolumn_name());
-    Assert.assertEquals(0, sorted[2].getKey_seq());
-    Assert.assertEquals(1, sorted[2].getUpdate_rule());
-    Assert.assertEquals(2, sorted[2].getDelete_rule());
-    Assert.assertEquals(fkName2, sorted[2].getFk_name());
-    Assert.assertEquals(pkName2, sorted[2].getPk_name());
-    Assert.assertTrue(sorted[2].isEnable_cstr());
-    Assert.assertFalse(sorted[2].isValidate_cstr());
-    Assert.assertTrue(sorted[2].isRely_cstr());
+
+    fk = store.getForeignKeys(DB, pkTable2, DB, tableName);
+    Assert.assertNotNull(fk);
+    Assert.assertEquals(1, fk.size());
+    Assert.assertEquals(DB, fk.get(0).getPktable_db());
+    Assert.assertEquals(pkTable2, fk.get(0).getPktable_name());
+    Assert.assertEquals(pkColNames2[0], fk.get(0).getPkcolumn_name());
+    Assert.assertEquals(DB, fk.get(0).getFktable_db());
+    Assert.assertEquals(tableName, fk.get(0).getFktable_name());
+    Assert.assertEquals(fkColNames[0], fk.get(0).getFkcolumn_name());
+    Assert.assertEquals(0, fk.get(0).getKey_seq());
+    Assert.assertEquals(1, fk.get(0).getUpdate_rule());
+    Assert.assertEquals(2, fk.get(0).getDelete_rule());
+    Assert.assertEquals(fkName2, fk.get(0).getFk_name());
+    Assert.assertEquals(pkName2, fk.get(0).getPk_name());
+    Assert.assertTrue(fk.get(0).isEnable_cstr());
+    Assert.assertFalse(fk.get(0).isValidate_cstr());
+    Assert.assertTrue(fk.get(0).isRely_cstr());
+
+    // Check that passing null gets all the foreign keys
+    fk = store.getForeignKeys(null, null, DB, tableName);
+    Assert.assertNotNull(fk);
+    Assert.assertEquals(3, fk.size());
 
     store.dropConstraint(DB, tableName, fkName);
 
-    fk = store.getForeignKeys(DB, tableName, DB, pkTable);
+    fk = store.getForeignKeys(DB, pkTable2, DB, tableName);
     Assert.assertNotNull(fk);
     Assert.assertEquals(1, fk.size());
     Assert.assertEquals(DB, fk.get(0).getPktable_db());
@@ -1735,7 +1747,7 @@ public class TestHBaseStore {
 
     store.dropConstraint(DB, tableName, fkName2);
 
-    fk = store.getForeignKeys(DB, tableName, DB, pkTable);
+    fk = store.getForeignKeys(DB, pkTable2, DB, tableName);
     Assert.assertNull(fk);
   }
 


Mime
View raw message