hawq-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From iw...@apache.org
Subject [1/3] incubator-hawq git commit: HAWQ-969. Modify some functions in hdfs_config.cpp and yarn_config.cpp
Date Wed, 24 Aug 2016 01:51:57 GMT
Repository: incubator-hawq
Updated Branches:
  refs/heads/master 8cc4a042e -> 56b704343


HAWQ-969. Modify some functions in hdfs_config.cpp and yarn_config.cpp


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/56b70434
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/56b70434
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/56b70434

Branch: refs/heads/master
Commit: 56b704343ebc67bcdd5594134dea731e81042b31
Parents: 6617a83
Author: Chunling Wang <wangchunling14@126.com>
Authored: Sat Aug 20 14:10:41 2016 +0800
Committer: ivan <iweng@pivotal.io>
Committed: Wed Aug 24 09:50:40 2016 +0800

----------------------------------------------------------------------
 src/test/feature/lib/hdfs_config.cpp  | 206 +++++++++++++++--------------
 src/test/feature/lib/hdfs_config.h    |  37 +++---
 src/test/feature/lib/xml_parser.cpp   |   6 +-
 src/test/feature/lib/xml_parser.h     |   2 +-
 src/test/feature/lib/yarn_config.cpp  | 169 ++++++++++++-----------
 src/test/feature/lib/yarn_config.h    |  27 ++--
 src/test/feature/testlib/test_lib.cpp | 124 ++++++++---------
 7 files changed, 300 insertions(+), 271 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/56b70434/src/test/feature/lib/hdfs_config.cpp
----------------------------------------------------------------------
diff --git a/src/test/feature/lib/hdfs_config.cpp b/src/test/feature/lib/hdfs_config.cpp
index e59a02e..ee72a17 100644
--- a/src/test/feature/lib/hdfs_config.cpp
+++ b/src/test/feature/lib/hdfs_config.cpp
@@ -14,6 +14,58 @@ using std::string;
 namespace hawq {
 namespace test {
 
+void HdfsConfig::runCommand(const string &command, 
+                            bool ishdfsuser, 
+                            string &result) {
+  string cmd = "";
+  if (ishdfsuser) {
+    cmd = "sudo -u ";
+    cmd.append(getHdfsUser());
+    cmd.append(" ");
+    cmd.append(command);
+  } else {
+    cmd = command;
+  }
+  Command c(cmd);
+  result = c.run().getResultOutput();
+}
+
+bool HdfsConfig::runCommandAndFind(const string &command, 
+                                   bool ishdfsuser, 
+                                   const string &findstring) {
+  string result = "";
+  runCommand(command, ishdfsuser, result);
+  auto lines = hawq::test::split(result, '\n');
+  for (size_t i=0; i<lines.size(); i++) {
+    string valueLine = lines[i];
+    int find = valueLine.find(findstring);
+    if (find >= 0) {
+        return true;
+    }
+  }
+  return false;
+}
+
+void HdfsConfig::runCommandAndGetNodesPorts(const string &command, 
+                                            std::vector<string> &datanodelist,
+                                            std::vector<int> &port) {
+  string result = "";
+  runCommand(command, true, result);
+  auto lines = hawq::test::split(result, '\n');
+  for (size_t i = 0; i < lines.size(); i++) {
+    string valueLine = lines[i];
+    auto datanodeInfo = hawq::test::split(valueLine, ':');
+    if (datanodeInfo.size() == 3) {
+      int portStart = datanodeInfo[2].find_first_of('(');
+      int portEnd = datanodeInfo[2].find_first_of(')');
+      string datanodePort = datanodeInfo[2].substr(0, portStart);
+      string datanodeHost = datanodeInfo[2].substr(portStart+1, portEnd-portStart-1);
+      datanodelist.push_back(hawq::test::trim(datanodeHost));
+      port.push_back(std::stoi(hawq::test::trim(datanodePort)));
+    }
+  }
+}
+
 string HdfsConfig::getHdfsUser() {
   string cmd = "ps aux|grep hdfs.server|grep -v grep";
   Command c(cmd);
@@ -26,6 +78,9 @@ string HdfsConfig::getHdfsUser() {
 }
 
 bool HdfsConfig::LoadFromHawqConfigFile() {
+  if (isLoadFromHawqConfigFile) {
+    return true;
+  }
   const char *env = getenv("GPHOME");
   string confPath = env ? env : "";
   if (confPath != "") {
@@ -35,22 +90,31 @@ bool HdfsConfig::LoadFromHawqConfigFile() {
   }
 
   hawqxmlconf.reset(new XmlConfig(confPath));
-  hawqxmlconf->parse();
+  if (!hawqxmlconf->parse())
+    return false;
+
+  isLoadFromHawqConfigFile = true;
   return true;
 }
 
 bool HdfsConfig::LoadFromHdfsConfigFile() {
+  if (isLoadFromHdfsConfigFile) {
+    return true;
+  }
   string confPath=getHadoopHome();
   if (confPath == "") {
     return false;
   }
   confPath.append("/etc/hadoop/hdfs-site.xml");
   hdfsxmlconf.reset(new XmlConfig(confPath));
-  hdfsxmlconf->parse();
+  if (!hdfsxmlconf->parse())
+    return false;
+  
+  isLoadFromHdfsConfigFile = true;
   return true;
 }
 
-bool HdfsConfig::isHA() {
+int HdfsConfig::isHA() {
   const hawq::test::PSQLQueryResult &result = psql.getQueryResult(
        "SELECT substring(fselocation from length('hdfs:// ') for (position('/' in substring(fselocation
from length('hdfs:// ')))-1)::int) "
        "FROM pg_filespace pgfs, pg_filespace_entry pgfse "
@@ -59,53 +123,46 @@ bool HdfsConfig::isHA() {
   if (table.size() > 0) {
     int find = table[0][0].find(":");
     if (find < 0) {
-      return true;
+      return 1;
     } else {
-      return false;
+      return 0;
     }
   }
-  return false;
+  return -1;
 }
 
-bool HdfsConfig::isConfigKerberos() {
+int HdfsConfig::isConfigKerberos() {
   bool ret = LoadFromHawqConfigFile();
   if (!ret) {
-    throw GetHawqHomeException();
+    return -1;
   }
   string authentication = hawqxmlconf->getString("hadoop.security.authentication");
   if (authentication == "kerberos") {
-    return true;
+    return 1;
   } else {
-    return false;
+    return 0;
   }
 }
 
-bool HdfsConfig::isTruncate() {
-  string cmd = "hadoop fs -truncate";
-  Command c(cmd);
-  string result = c.run().getResultOutput();
-  auto lines = hawq::test::split(result, '\n');
-  if (lines.size() >= 1) {
-    string valueLine = lines[0];
-    int find = valueLine.find("-truncate: Unknown command");
-    if (find < 0) {
-      return true;
-    }
+int HdfsConfig::isTruncate() {
+  if (runCommandAndFind("hadoop fs -truncate", false, "-truncate: Unknown command")) {
+    return 0;
+  } else {
+    return 1;
   }
-  return false;
 }
 
 string HdfsConfig::getHadoopHome() {
-  string cmd = "ps -ef|grep hadoop";
-  Command c(cmd);
-  string result = c.run().getResultOutput();
+  string result = "";
+  runCommand("ps -ef|grep hadoop", true, result);
   string hadoopHome = "";
   auto lines = hawq::test::split(result, '\n');
   for (size_t i=0; i<lines.size()-1; i++) {
     string valueLine = lines[i];
-    int pos = valueLine.find("-Dhadoop.home.dir=");
+    string findstring = "-Dhadoop.home.dir=";
+    int pos = valueLine.find(findstring);
     if (pos >=0 ) {
-      string valueTmp = valueLine.substr(pos+18); 
+      string valueTmp = valueLine.substr(pos+findstring.size()); 
       int valueEnd = valueTmp.find_first_of(" ");
       string value = valueTmp.substr(0, valueEnd);
       hadoopHome = hawq::test::trim(value);
@@ -139,19 +196,11 @@ bool HdfsConfig::getHANamenode(const string &namenodetype,
   auto haNamenodes = hawq::test::split(haNamenodesValue, ',');
   for (size_t i = 0; i < haNamenodes.size(); i++) {
     string haNamenode = hawq::test::trim(haNamenodes[i]);
-    string cmd = "sudo -u ";
-    cmd.append(getHdfsUser());
-    cmd.append(" hdfs haadmin -getServiceState ");
+    string cmd = "hdfs haadmin -getServiceState ";
     cmd.append(haNamenode);
-    Command c(cmd);
-    string result = c.run().getResultOutput();
-    auto lines = hawq::test::split(result, '\n');
-    if (lines.size() >= 1) {
-      string valueLine = lines[0];
-      if (valueLine == namenodetype) {
-        namenodeService = haNamenode;
-        break;
-      }
+    if (runCommandAndFind(cmd, true, namenodetype)) {
+      namenodeService = haNamenode;
+      break;
     }
   }
   string rpcAddressName = "dfs.namenode.rpc-address.gphd-cluster.";
@@ -165,11 +214,8 @@ bool HdfsConfig::getHANamenode(const string &namenodetype,
 
 void HdfsConfig::getNamenodes(std::vector<string> &namenodes,
                               std::vector<int> &port) {
-  string cmd = "sudo -u ";
-  cmd.append(getHdfsUser());
-  cmd.append(" hdfs getconf -nnRpcAddresses");
-  Command c(cmd);
-  string result = c.run().getResultOutput();
+  string result = "";
+  runCommand("hdfs getconf -nnRpcAddresses", true, result);
   auto lines = hawq::test::split(result, '\n');
   for (size_t i = 0; i < lines.size(); i++) {
     string valueLine = lines[i];
@@ -183,71 +229,28 @@ void HdfsConfig::getNamenodes(std::vector<string> &namenodes,
 
 void HdfsConfig::getDatanodelist(std::vector<string> &datanodelist,
                                  std::vector<int> &port) {
-  string cmd = "sudo -u ";
-  cmd.append(getHdfsUser());
-  cmd.append(" hdfs dfsadmin -report | grep Name");
-  Command c(cmd);
-  string result = c.run().getResultOutput();
-  auto lines = hawq::test::split(result, '\n');
-  for (size_t i = 0; i < lines.size(); i++) {
-    string valueLine = lines[i];
-    auto datanodeInfo = hawq::test::split(valueLine, ':');
-    if (datanodeInfo.size() == 3) {
-      int portStart = datanodeInfo[2].find_first_of('(');
-      int portEnd = datanodeInfo[2].find_first_of(')');
-      string datanodePort = datanodeInfo[2].substr(0, portStart);
-      string datanodeHost = datanodeInfo[2].substr(portStart+1, portEnd-portStart-1);
-      datanodelist.push_back(hawq::test::trim(datanodeHost));
-      port.push_back(std::stoi(hawq::test::trim(datanodePort)));
-    }
-  }
+  runCommandAndGetNodesPorts("hdfs dfsadmin -report | grep Name", datanodelist, port);
 }
 
 void HdfsConfig::getActiveDatanodes(std::vector<string> &activedatanodes,
                                     std::vector<int> &port) {
-  string cmd = "sudo -u ";
-  cmd.append(getHdfsUser());
-  cmd.append(" hdfs dfsadmin -report -live | grep Name");
-  Command c(cmd);
-  string result = c.run().getResultOutput();
-  auto lines = hawq::test::split(result, '\n');
-  for (size_t i = 0; i < lines.size(); i++) {
-    string valueLine = lines[i];
-    auto datanodeInfo = hawq::test::split(valueLine, ':');
-    if (datanodeInfo.size() == 3) {
-      int portStart = datanodeInfo[2].find_first_of('(');
-      int portEnd = datanodeInfo[2].find_first_of(')');
-      string datanodePort = datanodeInfo[2].substr(0, portStart);
-      string datanodeHost = datanodeInfo[2].substr(portStart+1, portEnd-portStart-1);
-      activedatanodes.push_back(hawq::test::trim(datanodeHost));
-      port.push_back(std::stoi(hawq::test::trim(datanodePort)));
-    }
-  }
+  runCommandAndGetNodesPorts("hdfs dfsadmin -report -live | grep Name", activedatanodes,
port);
 }
 
-
-bool HdfsConfig::isSafemode() {
-  string cmd = "hadoop fs -mkdir /tmp_hawq_test";
-  Command c(cmd);
-  string result = c.run().getResultOutput();
-  auto lines = hawq::test::split(result, '\n');
-  if (lines.size() >= 1) {
-    string valueLine = lines[0];
-    int find = valueLine.find("Name node is in safe mode.");
-    if (find >= 0) {
-      return true;
-    }
+int HdfsConfig::isSafemode() {
+  if (runCommandAndFind("hadoop fs -mkdir /tmp_hawq_test", false, "Name node is in safe mode."))
{
+    return 1;
   }
-  cmd = "hadoop fs -rm -r /tmp_hawq_test";
+  string cmd = "hadoop fs -rm -r /tmp_hawq_test";
   Command c_teardown(cmd);
-  result = c_teardown.run().getResultOutput();
-  return false;
+  string result = c_teardown.run().getResultOutput();
+  return 0;
 }
 
 string HdfsConfig::getParameterValue(const string &parameterName) {
   bool ret = LoadFromHdfsConfigFile();
   if (!ret) {
-    throw GetHadoopHomeException();
+    return "Error: failed to load from HDFS configuration file";
   }
 
   return hdfsxmlconf->getString(parameterName);
@@ -255,11 +258,12 @@ string HdfsConfig::getParameterValue(const string &parameterName)
{
 
 string HdfsConfig::getParameterValue(const string &parameterName,
                                      const string &conftype) {
-  if (conftype == "hdfs" || conftype == "HDFS")
+  if (hawq::test::lower(conftype) == "hdfs") {
     return getParameterValue(parameterName);
+  }
   bool ret = LoadFromHawqConfigFile();
   if (!ret) {
-    throw GetHawqHomeException();
+    return "Error: failed to load from HAWQ configuration file";
   }
 
   return hawqxmlconf->getString(parameterName);
@@ -269,7 +273,7 @@ bool HdfsConfig::setParameterValue(const string &parameterName,
                                    const string &parameterValue) { 
   bool ret = LoadFromHdfsConfigFile();
   if (!ret) {
-    throw GetHadoopHomeException();
+    return false;
   }
 
   return hdfsxmlconf->setString(parameterName, parameterValue);
@@ -278,12 +282,12 @@ bool HdfsConfig::setParameterValue(const string &parameterName,
 bool HdfsConfig::setParameterValue(const string &parameterName,
                                    const string &parameterValue,
                                    const string &conftype) {
-  if (conftype == "hdfs" || conftype == "HDFS") {
+  if (hawq::test::lower(conftype) == "hdfs") {
     return setParameterValue(parameterName, parameterValue);
   }
   bool ret = LoadFromHawqConfigFile();
   if (!ret) {
-    throw GetHawqHomeException();
+    return false;
   }
 
   return hawqxmlconf->setString(parameterName, parameterValue);

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/56b70434/src/test/feature/lib/hdfs_config.h
----------------------------------------------------------------------
diff --git a/src/test/feature/lib/hdfs_config.h b/src/test/feature/lib/hdfs_config.h
index 34e6f56..2b68db1 100644
--- a/src/test/feature/lib/hdfs_config.h
+++ b/src/test/feature/lib/hdfs_config.h
@@ -21,7 +21,10 @@ class HdfsConfig {
     /**
      * HdfsConfig constructor
      */
-    HdfsConfig(): psql(HAWQ_DB, HAWQ_HOST, HAWQ_PORT, HAWQ_USER, HAWQ_PASSWORD) {}
+    HdfsConfig(): psql(HAWQ_DB, HAWQ_HOST, HAWQ_PORT, HAWQ_USER, HAWQ_PASSWORD) {
+      isLoadFromHawqConfigFile = false;
+      isLoadFromHdfsConfigFile = false;
+    }
 
     /**
      * HdfsConfig destructor
@@ -30,21 +33,21 @@ class HdfsConfig {
 
     /**
      * whether HDFS is in HA mode
-     * @return true if HDFS is HA
+     * @return 1 if HDFS is HA, 0 if HDFS is not HA, -1 if there is an error
      */
-    bool isHA();
+    int isHA();
 
     /**
      * whether HDFS is kerbos
-     * @return true if HDFS is kerberos
+     * @return 1 if HDFS is configured kerberos, 0 if HDFS is not configured kerberos, -1
if failed to load from HAWQ configuration file
      */
-    bool isConfigKerberos();
+    int isConfigKerberos();
 
     /**
      * whether HDFS supports truncate operation
-     * @return true if HDFS supports truncate operation
+     * @return 1 if HDFS supports truncate operation, 0 if HDFS does not support truncate
operation
      */
-    bool isTruncate();
+    int isTruncate();
 
     /**
      * get HADOOP working directory
@@ -96,9 +99,9 @@ class HdfsConfig {
 
     /**
      * whether HDFS is in safe mode
-     * @return true if HDFS is in safe node
+     * @return 1 if HDFS is in safe node, 0 if HDFS is in not safe node
      */
-    bool isSafemode();
+    int isSafemode();
 
     /**
      * get parameter value in ./etc/hdfs-client.xml or ./etc/hadoop/hdfs-site.xml according
to parameter name
@@ -133,10 +136,17 @@ class HdfsConfig {
     bool setParameterValue(const std::string &parameterName, const std::string &parameterValue);
 
   private:
+    void runCommand(const std::string &command, bool ishdfsuser, std::string &result);
+    
+    bool runCommandAndFind(const std::string &command, bool ishdfsuser, const std::string
&findstring);
+    
+    void runCommandAndGetNodesPorts(const std::string &command, std::vector<std::string>
&datanodelist, std::vector<int> &port);
+    
     /**
      * @return hdfs user
      */
     std::string getHdfsUser();
+
     /**
      * load key-value parameters in ./etc/hdfs-client.xml
      * @return true if succeeded
@@ -156,19 +166,16 @@ class HdfsConfig {
      * @param port, namenode port reference which will be set
      * @return true if getHANamenode succeeded
      */
-    bool getHANamenode(const std::string &namenodetype,
-                       std::string &namenode,
-                                   int &port);
+    bool getHANamenode(const std::string &namenodetype, std::string &namenode, int
&port);
 
   private:
     std::unique_ptr<XmlConfig> hawqxmlconf;
     std::unique_ptr<XmlConfig> hdfsxmlconf;
+    bool isLoadFromHawqConfigFile;
+    bool isLoadFromHdfsConfigFile;
     hawq::test::PSQL psql;
 };
 
-class GetHawqHomeException {};
-class GetHadoopHomeException {};
-
 } // namespace test
 } // namespace hawq
 

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/56b70434/src/test/feature/lib/xml_parser.cpp
----------------------------------------------------------------------
diff --git a/src/test/feature/lib/xml_parser.cpp b/src/test/feature/lib/xml_parser.cpp
index c731ca8..e9619e6 100644
--- a/src/test/feature/lib/xml_parser.cpp
+++ b/src/test/feature/lib/xml_parser.cpp
@@ -36,18 +36,20 @@ void XmlConfig::closeAndSave() {
     xmlFreeDoc(doc);
 }
 
-void XmlConfig::parse() {
+bool XmlConfig::parse() {
   LIBXML_TEST_VERSION kv
   .clear();
 
   if (!open()) {
-    return;
+    return false;
   }
   try {
     readConfigItems(doc);
     closeNotSave();
+    return true;
   } catch (...) {
     closeNotSave();
+    return false;
   }
 }
 

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/56b70434/src/test/feature/lib/xml_parser.h
----------------------------------------------------------------------
diff --git a/src/test/feature/lib/xml_parser.h b/src/test/feature/lib/xml_parser.h
index e2a83a7..fa1a4b6 100644
--- a/src/test/feature/lib/xml_parser.h
+++ b/src/test/feature/lib/xml_parser.h
@@ -27,7 +27,7 @@ class XmlConfig {
   void closeAndSave();
 
   // parse the configuration file
-  void parse();
+  bool parse();
 
   // @param key The key of the configuration item
   // @param value The updated value

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/56b70434/src/test/feature/lib/yarn_config.cpp
----------------------------------------------------------------------
diff --git a/src/test/feature/lib/yarn_config.cpp b/src/test/feature/lib/yarn_config.cpp
index 5582c0a..0587362 100644
--- a/src/test/feature/lib/yarn_config.cpp
+++ b/src/test/feature/lib/yarn_config.cpp
@@ -14,10 +14,62 @@ using std::string;
 namespace hawq {
 namespace test {
 
-string YarnConfig::getYarnUser() {
-  string cmd = "ps aux|grep yarn.server|grep -v grep";
+void YarnConfig::runCommand(const string &command, 
+                            bool isyarnuser, 
+                            string &result) {
+  string cmd = "";
+  if (isyarnuser) {
+    cmd = "sudo -u ";
+    cmd.append(getYarnUser());
+    cmd.append(" ");
+    cmd.append(command);
+  } else {
+    cmd = command;
+  }
   Command c(cmd);
-  string result = c.run().getResultOutput();
+  result = c.run().getResultOutput();
+}
+
+bool YarnConfig::runCommandAndFind(const string &command, 
+                                   bool isyarnuser, 
+                                   const string &findstring) {
+  string result = "";
+  runCommand(command, isyarnuser, result);
+  auto lines = hawq::test::split(result, '\n');
+  for (size_t i=0; i<lines.size(); i++) {
+    string valueLine = lines[i];
+    int find = valueLine.find(findstring);
+    if (find >= 0) {
+        return true;
+    }
+  }
+  return false;
+}
+
+void YarnConfig::runCommandAndGetNodesPorts(const string &command,
+                                            bool isyarnuser,
+                                            std::vector<string> &nodemanagers,
+                                            std::vector<int> &port) {
+  string result = "";
+  runCommand(command, isyarnuser, result);
+  auto lines = hawq::test::split(result, '\n');
+  bool begin = false;
+  for (size_t i=0; i<lines.size(); i++) {
+    if (!begin) {
+      if (lines[i].find("Node-Id") != string::npos) {
+        begin = true;
+      }
+    } else {
+      string values = hawq::test::split(lines[i], '\t')[0];
+      nodemanagers.push_back(hawq::test::trim(hawq::test::split(values, ':')[0]));
+      port.push_back(std::stoi(hawq::test::trim(hawq::test::split(values, ':')[1])));
+    }
+  }
+}
+
+string YarnConfig::getYarnUser() {
+  string result = "";
+  runCommand("ps aux|grep yarn.server|grep -v grep", false, result);
   auto lines = hawq::test::split(result, '\n');
   if (lines.size() >= 1) {
     return hawq::test::trim(hawq::test::split(lines[lines.size()-1], ' ')[0]);
@@ -26,6 +78,9 @@ string YarnConfig::getYarnUser() {
 }
 
 bool YarnConfig::LoadFromHawqConfigFile() {
+  if (isLoadFromHawqConfigFile) {
+    return true;
+  }
   const char *env = getenv("GPHOME");
   string confPath = env ? env : "";
   if (confPath != "") {
@@ -35,34 +90,39 @@ bool YarnConfig::LoadFromHawqConfigFile() {
   }
 
   hawqxmlconf.reset(new XmlConfig(confPath));
-  hawqxmlconf->parse();
+  if (!hawqxmlconf->parse())
+    return false;
   return true;
 }
 
 bool YarnConfig::LoadFromYarnConfigFile() {
+  if (isLoadFromYarnConfigFile) {
+    return true;
+  }
   string confPath=getHadoopHome();
   if (confPath == "") {
     return false;
   }
   confPath.append("/etc/hadoop/yarn-site.xml");
   yarnxmlconf.reset(new XmlConfig(confPath));
-  yarnxmlconf->parse();
+  if (!yarnxmlconf->parse())
+    return false;
   return true;
 }
 
-bool YarnConfig::isConfigYarn() {
+int YarnConfig::isConfigYarn() {
   bool ret = LoadFromYarnConfigFile();
   if (!ret) {
-    throw GetHadoopHomeException();
+    return -1;
   }
   string rm = yarnxmlconf->getString("yarn.resourcemanager.address.rm1");
   if (rm == "") {
-    return false;
+    return 0;
   }
-  return true;
+  return 1;
 }
 
-bool YarnConfig::isHA() {
+int YarnConfig::isHA() {
   const hawq::test::PSQLQueryResult &result = psql.getQueryResult(
        "SELECT substring(fselocation from length('hdfs:// ') for (position('/' in substring(fselocation
from length('hdfs:// ')))-1)::int) "
        "FROM pg_filespace pgfs, pg_filespace_entry pgfse "
@@ -71,31 +131,30 @@ bool YarnConfig::isHA() {
   if (table.size() > 0) {
     int find = table[0][0].find(":");
     if (find < 0) {
-      return true;
+      return 1;
     } else {
-      return false;
+      return 0;
     }
   }
-  return false;
+  return -1;
 }
 
-bool YarnConfig::isConfigKerberos() {
+int YarnConfig::isConfigKerberos() {
   bool ret = LoadFromHawqConfigFile();
   if (!ret) {
-    throw GetHawqHomeException();
+    return -1;
   }
   string authentication = hawqxmlconf->getString("hadoop.security.authentication");
   if (authentication == "kerberos") {
-    return true;
+    return 1;
   } else {
-    return false;
+    return 0;
   }
 }
 
 string YarnConfig::getHadoopHome() {
-  string cmd = "ps -ef|grep hadoop";
-  Command c(cmd);
-  string result = c.run().getResultOutput();
+  string result = "";
+  runCommand("ps -ef|grep hadoop", false, result);
   string hadoopHome = "";
   auto lines = hawq::test::split(result, '\n');
   for (size_t i=0; i<lines.size()-1; i++) {
@@ -133,24 +192,16 @@ bool YarnConfig::getHARM(const string &RMtype,
   auto haRMs = hawq::test::split(haRMValue, ',');
   for (size_t i = 0; i < haRMs.size(); i++) {
     string haRM = hawq::test::trim(haRMs[i]);
-    string cmd ="sudo -u ";
-    cmd.append(getYarnUser());
-    cmd.append(" yarn rmadmin -getServiceState ");
+    string cmd = "yarn rmadmin -getServiceState ";
     cmd.append(haRM);
-    Command c(cmd);
-    string result = c.run().getResultOutput();
-    auto lines = hawq::test::split(result, '\n');
-    if (lines.size() >= 2) {
-      string valueLine = lines[1];
-      if (valueLine == RMtype) {
-        RMService = haRM;
-        break;
-      }
+    if (runCommandAndFind(cmd, true, RMtype)) {
+      RMService = haRM;
+      break;
     }
   }
   bool ret = LoadFromYarnConfigFile();
   if (!ret) {
-    throw GetHadoopHomeException();
+    return false;
   }
   string rpcAddressName = "yarn.resourcemanager.address.";
   rpcAddressName.append(RMService);
@@ -177,7 +228,7 @@ bool YarnConfig::getRMList(std::vector<string> &RMList,
 
   bool ret = LoadFromYarnConfigFile();
   if (!ret) {
-    throw GetHadoopHomeException();
+    return false;
   }
 
   string RMAddressName = "yarn.resourcemanager.address";
@@ -195,53 +246,19 @@ bool YarnConfig::getRMList(std::vector<string> &RMList,
 
 void YarnConfig::getNodeManagers(std::vector<string> &nodemanagers,
                                  std::vector<int> &port) {
-  string cmd = "sudo -u ";
-  cmd.append(getYarnUser());
-  cmd.append(" yarn node -list -all");
-  Command c(cmd);
-  string result = c.run().getResultOutput();
-  auto lines = hawq::test::split(result, '\n');
-  bool begin = false;
-  for (size_t i=0; i<lines.size(); i++) {
-    if (!begin) {
-      if (lines[i].find("Node-Id") != string::npos) {
-        begin = true;
-      }
-    } else {
-      string values = hawq::test::split(lines[i], '\t')[0];
-      nodemanagers.push_back(hawq::test::trim(hawq::test::split(values, ':')[0]));
-      port.push_back(std::stoi(hawq::test::trim(hawq::test::split(values, ':')[1])));
-    }
-  }
+  runCommandAndGetNodesPorts("yarn node -list -all", true, nodemanagers, port);
 }
 
 void YarnConfig::getActiveNodeManagers(std::vector<string> &nodemanagers,
                                  std::vector<int> &port) {
-  string cmd = "sudo -u ";
-  cmd.append(getYarnUser());
-  cmd.append(" yarn node -list -states RUNNING");
-  Command c(cmd);
-  string result = c.run().getResultOutput();
-  auto lines = hawq::test::split(result, '\n');
-  bool begin = false;
-  for (size_t i=0; i<lines.size(); i++) {
-    if (!begin) {
-      if (lines[i].find("Node-Id") != string::npos) {
-        begin = true;
-      }
-    } else {
-      string values = hawq::test::split(lines[i], '\t')[0];
-      nodemanagers.push_back(hawq::test::trim(hawq::test::split(values, ':')[0]));
-      port.push_back(std::stoi(hawq::test::trim(hawq::test::split(values, ':')[1])));
-    }
-  }
+  runCommandAndGetNodesPorts("yarn node -list -states RUNNING", true, nodemanagers, port);
 }
 
 
 string YarnConfig::getParameterValue(const string &parameterName) {
   bool ret = LoadFromYarnConfigFile();
   if (!ret) {
-    throw GetHadoopHomeException();
+    return "Error: failed to load from YARN configuration file";
   }
 
   return yarnxmlconf->getString(parameterName);
@@ -249,12 +266,12 @@ string YarnConfig::getParameterValue(const string &parameterName)
{
 
 string YarnConfig::getParameterValue(const string &parameterName,
                                      const string &conftype) {
-  if (conftype == "yarn" || conftype == "YARN") {
+  if (hawq::test::lower(conftype) == "yarn") {
     return getParameterValue(parameterName);
   }
   bool ret = LoadFromHawqConfigFile();
   if (!ret) {
-    throw GetHadoopHomeException();
+    return "Error: failed to load from HAWQ configuration file";
   }
 
   return hawqxmlconf->getString(parameterName);
@@ -264,7 +281,7 @@ bool YarnConfig::setParameterValue(const string &parameterName,
                                    const string &parameterValue) {
   bool ret = LoadFromYarnConfigFile();
   if (!ret) {
-    throw GetHadoopHomeException();
+    return false;
   }
 
   return yarnxmlconf->setString(parameterName, parameterValue);
@@ -273,12 +290,12 @@ bool YarnConfig::setParameterValue(const string &parameterName,
 bool YarnConfig::setParameterValue(const string &parameterName,
                                    const string &parameterValue,
                                    const string &conftype) {
-  if (conftype == "yarn" || conftype == "YARN") {
+  if (hawq::test::lower(conftype) == "yarn") {
     return setParameterValue(parameterName, parameterValue);
   }
   bool ret = LoadFromHawqConfigFile();
   if (!ret) {
-    throw GetHawqHomeException();
+    return false;
   }
 
   return hawqxmlconf->setString(parameterName, parameterValue);

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/56b70434/src/test/feature/lib/yarn_config.h
----------------------------------------------------------------------
diff --git a/src/test/feature/lib/yarn_config.h b/src/test/feature/lib/yarn_config.h
index cccf5e9..41fc09f 100644
--- a/src/test/feature/lib/yarn_config.h
+++ b/src/test/feature/lib/yarn_config.h
@@ -7,7 +7,6 @@
 #include "psql.h"
 #include "sql_util.h"
 #include "xml_parser.h"
-#include "hdfs_config.h"
 
 namespace hawq {
 namespace test {
@@ -22,7 +21,10 @@ class YarnConfig {
     /**
       * YarnConfig constructor
       */
-    YarnConfig(): psql(HAWQ_DB, HAWQ_HOST, HAWQ_PORT, HAWQ_USER, HAWQ_PASSWORD) {}
+    YarnConfig(): psql(HAWQ_DB, HAWQ_HOST, HAWQ_PORT, HAWQ_USER, HAWQ_PASSWORD) {
+      isLoadFromHawqConfigFile = false;
+      isLoadFromYarnConfigFile = false;
+    }
 
     /**
       * YarnConfig destructor
@@ -31,21 +33,21 @@ class YarnConfig {
 
     /**
      * whether YARN is configured
-     * @return true if YARN is configured; if return false, following functions should not
be called
+     * @return 1 if YARN is configured; 0 and following functions should not be called; -1
if failed to load from YARN configuration file
      */
-    bool isConfigYarn();
+    int isConfigYarn();
 
     /**
      * whether YARN is in HA mode
-     * @return true if YARN is HA
+     * @return 1 if YARN is HA, 0 if YARN is not HA, -1 if there is an error
      */
-    bool isHA();
+    int isHA();
 
     /**
      * whether YARN is kerberos
-     * @return true if YARN is kerbos
+     * @return 1 if YARN is kerberos, 0 if YARN is not kerberos, -1 if failed to load from
HAWQ configuration file 
      */
-    bool isConfigKerberos();
+    int isConfigKerberos();
 
     /**
      * get HADOOP working directory
@@ -123,10 +125,17 @@ class YarnConfig {
     bool setParameterValue(const std::string &parameterName, const std::string &parameterValue,
const std::string &conftype);
 
   private:
+    void runCommand(const std::string &command, bool ishdfsuser, std::string &result);
+    
+    bool runCommandAndFind(const std::string &command, bool ishdfsuser, const std::string
&findstring);
+    
+    void runCommandAndGetNodesPorts(const std::string &command, bool isyarnuser, std::vector<std::string>
&nodemanagers, std::vector<int> &port);
+    
     /**
      * @return yarn user
      */
     std::string getYarnUser();
+    
     /**
      * load key-value parameters in ./etc/yarn-client.xml
      * @return true if succeeded
@@ -151,6 +160,8 @@ class YarnConfig {
   private:
     std::unique_ptr<XmlConfig> hawqxmlconf;
     std::unique_ptr<XmlConfig> yarnxmlconf;
+    bool isLoadFromHawqConfigFile;
+    bool isLoadFromYarnConfigFile;
     hawq::test::PSQL psql;
 };
 

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/56b70434/src/test/feature/testlib/test_lib.cpp
----------------------------------------------------------------------
diff --git a/src/test/feature/testlib/test_lib.cpp b/src/test/feature/testlib/test_lib.cpp
index a58a0d9..03a254f 100644
--- a/src/test/feature/testlib/test_lib.cpp
+++ b/src/test/feature/testlib/test_lib.cpp
@@ -60,80 +60,68 @@ TEST_F(TestCommonLib, TestHawqConfig) {
 
 TEST_F(TestCommonLib, TestHdfsConfig) {
   hawq::test::HdfsConfig hc;
-  try {
-    hc.isHA();
-    hc.isConfigKerberos();
-    hc.isTruncate();
-    std::string hadoopHome = hc.getHadoopHome();
-
-    std::string hostname = "";
-    int port = 0;
-    hc.getActiveNamenode(hostname, port);
-
-    hostname = "";
-    port = 0;
-    hc.getStandbyNamenode(hostname, port);
-
-    std::vector<std::string> hostList;
-    std::vector<int> portList;
-    hc.getNamenodes(hostList, portList);
-
-    hostList.clear();
-    portList.clear();
-    hc.getDatanodelist(hostList, portList);
-
-    hostList.clear();
-    portList.clear();
-    hc.getActiveDatanodes(hostList, portList);
-
-    hc.isSafemode();
-
-    hc.getParameterValue("dfs.replication");
-    hc.setParameterValue("dfs.replication", "1");
-  } catch (hawq::test::GetHawqHomeException &e) {
-    printf("Failed to get HAWQ home!");
-  } catch (hawq::test::GetHadoopHomeException &e) {
-    printf("Failed to get HADOOP home!");
-  }
+  hc.isHA();
+  hc.isConfigKerberos();
+  hc.isTruncate();
+  std::string hadoopHome = hc.getHadoopHome();
+
+  std::string hostname = "";
+  int port = 0;
+  hc.getActiveNamenode(hostname, port);
+
+  hostname = "";
+  port = 0;
+  hc.getStandbyNamenode(hostname, port);
+
+  std::vector<std::string> hostList;
+  std::vector<int> portList;
+  hc.getNamenodes(hostList, portList);
+
+  hostList.clear();
+  portList.clear();
+  hc.getDatanodelist(hostList, portList);
+
+  hostList.clear();
+  portList.clear();
+  hc.getActiveDatanodes(hostList, portList);
+
+  hc.isSafemode();
+
+  hc.getParameterValue("dfs.replication");
+  hc.setParameterValue("dfs.replication", "1");
 }
 
 TEST_F(TestCommonLib, TestYarnConfig) {
   hawq::test::YarnConfig hc;
-  if (!hc.isConfigYarn()) {
+  if (hc.isConfigYarn() == 0) {
     return;
   }
-  try {
-    hc.isHA();
-    hc.isConfigKerberos();
-    std::string hadoopHome = hc.getHadoopHome();
-
-    std::string hostname = "";
-    int port = 0;
-    hc.getActiveRM(hostname, port);
-
-    hostname = "";
-    port = 0;
-    hc.getStandbyRM(hostname, port);
-
-    std::vector<std::string> hostList;
-    std::vector<int> portList;
-    hc.getRMList(hostList, portList);
-
-    hostList.clear();
-    portList.clear();
-    hc.getNodeManagers(hostList, portList);
-
-    hostList.clear();
-    portList.clear();
-    hc.getActiveNodeManagers(hostList, portList);
-
-    hc.getParameterValue("yarn.scheduler.minimum-allocation-mb");
-    hc.setParameterValue("yarn.scheduler.minimum-allocation-mb", "1024");
-  } catch (hawq::test::GetHawqHomeException &e) {
-    printf("Failed to get HAWQ home!");
-  } catch (hawq::test::GetHadoopHomeException &e) {
-    printf("Failed to get HADOOP home!");
-  }
+  hc.isHA();
+  hc.isConfigKerberos();
+  std::string hadoopHome = hc.getHadoopHome();
+
+  std::string hostname = "";
+  int port = 0;
+  hc.getActiveRM(hostname, port);
+
+  hostname = "";
+  port = 0;
+  hc.getStandbyRM(hostname, port);
+
+  std::vector<std::string> hostList;
+  std::vector<int> portList;
+  hc.getRMList(hostList, portList);
+
+  hostList.clear();
+  portList.clear();
+  hc.getNodeManagers(hostList, portList);
+
+  hostList.clear();
+  portList.clear();
+  hc.getActiveNodeManagers(hostList, portList);
+
+  hc.getParameterValue("yarn.scheduler.minimum-allocation-mb");
+  hc.setParameterValue("yarn.scheduler.minimum-allocation-mb", "1024");
 }
 
 TEST_F(TestCommonLib, TestCommand) {


Mime
View raw message