incubator-hcatalog-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ga...@apache.org
Subject svn commit: r1294877 - in /incubator/hcatalog/trunk: ./ src/test/e2e/hcatalog/ src/test/e2e/hcatalog/conf/ src/test/e2e/hcatalog/drivers/ src/test/e2e/hcatalog/tests/ src/test/e2e/hcatalog/tools/generate/
Date Tue, 28 Feb 2012 22:45:14 GMT
Author: gates
Date: Tue Feb 28 22:45:14 2012
New Revision: 1294877

URL: http://svn.apache.org/viewvc?rev=1294877&view=rev
Log:
HCATALOG-243 HCat e2e tests need to change to not use StorageDrivers

Modified:
    incubator/hcatalog/trunk/CHANGES.txt
    incubator/hcatalog/trunk/src/test/e2e/hcatalog/build.xml
    incubator/hcatalog/trunk/src/test/e2e/hcatalog/conf/default.conf
    incubator/hcatalog/trunk/src/test/e2e/hcatalog/drivers/Util.pm
    incubator/hcatalog/trunk/src/test/e2e/hcatalog/tests/hadoop.conf
    incubator/hcatalog/trunk/src/test/e2e/hcatalog/tests/pig.conf
    incubator/hcatalog/trunk/src/test/e2e/hcatalog/tools/generate/generate_data.pl

Modified: incubator/hcatalog/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/CHANGES.txt?rev=1294877&r1=1294876&r2=1294877&view=diff
==============================================================================
--- incubator/hcatalog/trunk/CHANGES.txt (original)
+++ incubator/hcatalog/trunk/CHANGES.txt Tue Feb 28 22:45:14 2012
@@ -39,6 +39,8 @@ Trunk (unreleased changes)
   HCAT-2 Support nested schema conversion between Hive an Pig (julienledem via hashutosh)
 
   IMPROVEMENTS
+  HCAT-243 HCat e2e tests need to change to not use StorageDrivers (gaets)
+
   HCAT-256 HCatalog should provide its classpath to tools (gates)
 
   HCAT-266 Upgrade HBase dependency to 0.92. (thw via toffer)

Modified: incubator/hcatalog/trunk/src/test/e2e/hcatalog/build.xml
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/e2e/hcatalog/build.xml?rev=1294877&r1=1294876&r2=1294877&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/e2e/hcatalog/build.xml (original)
+++ incubator/hcatalog/trunk/src/test/e2e/hcatalog/build.xml Tue Feb 28 22:45:14 2012
@@ -56,6 +56,7 @@
 
   <property name="test.location" value="${basedir}/testdist"/>
   <property name="benchmark.location" value="${test.location}/benchmarks"/>
+  <property name="hadoop.core.path" value="${harness.hadoop.home}"/>
 
   <!-- Build the UDFs -->
   <target name="udfs" >

Modified: incubator/hcatalog/trunk/src/test/e2e/hcatalog/conf/default.conf
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/e2e/hcatalog/conf/default.conf?rev=1294877&r1=1294876&r2=1294877&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/e2e/hcatalog/conf/default.conf (original)
+++ incubator/hcatalog/trunk/src/test/e2e/hcatalog/conf/default.conf Tue Feb 28 22:45:14 2012
@@ -62,7 +62,7 @@ $cfg = {
     , 'hcat_data_dir'    => '/user/hcat/tests/data'
     , 'hivehome'          => $ENV{'PH_HIVE_HOME'}
     , 'hcathome'          => $ENV{'HCAT_INSTALL_DIR'}
-    , 'hcatalog.jar' => "$ENV{HCAT_JAR},$ENV{HIVE_ROOT}/build/dist/lib/hive-serde-$hive_version.jar,$ENV{HIVE_ROOT}/build/dist/lib/hive-exec-$hive_version.jar,$ENV{PIG_HOME}/pig-0.9.2-withouthadoop.jar,$ENV{HIVE_ROOT}/build/dist/lib/hive-metastore-$hive_version.jar,$ENV{HIVE_ROOT}/build/dist/lib/libfb303-0.7.0.jar,$ENV{HIVE_ROOT}/build/dist/lib/jdo2-api-2.3-ec.jar,$ENV{HCAT_ROOT}/storage-drivers/hbase/build/ivy/lib/hbase-storage-driver/hbase-0.90.5.jar,$ENV{HCAT_ROOT}/storage-drivers/hbase/build/ivy/lib/hbase-storage-driver/zookeeper-3.4.0.jar,$ENV{HCAT_ROOT}/storage-drivers/hbase/build/hbase-storage-driver/hbase-storage-driver-0.1.0.jar,$ENV{HCAT_ROOT}/hive/external/build/dist/lib/hive-hbase-handler-$hive_version.jar"
+    , 'hcatalog.jar' => "$ENV{HCAT_JAR},$ENV{HIVE_ROOT}/build/dist/lib/hive-serde-$hive_version.jar,$ENV{HIVE_ROOT}/build/dist/lib/hive-exec-$hive_version.jar,$ENV{PIG_HOME}/pig-0.9.2-withouthadoop.jar,$ENV{HIVE_ROOT}/build/dist/lib/hive-metastore-$hive_version.jar,$ENV{HIVE_ROOT}/build/dist/lib/libfb303-0.7.0.jar,$ENV{HIVE_ROOT}/build/dist/lib/jdo2-api-2.3-ec.jar,$ENV{HCAT_ROOT}/storage-drivers/hbase/build/ivy/lib/hbase-storage-driver/hbase-0.92.0-SNAPSHOT.jar,$ENV{HCAT_ROOT}/storage-drivers/hbase/build/ivy/lib/hbase-storage-driver/zookeeper-3.4.3.jar,$ENV{HCAT_ROOT}/storage-drivers/hbase/build/hbase-storage-driver/hbase-storage-driver-0.1.0.jar,$ENV{HCAT_ROOT}/hive/external/build/dist/lib/hive-hbase-handler-$hive_version.jar"
 
     #PIG
     , 'testconfigpath'   => "$ENV{PH_CLUSTER}"
@@ -72,7 +72,7 @@ $cfg = {
     , 'pigpath'          => "$ENV{PIG_HOME}"
     , 'pigjar'           => "$ENV{PIG_JAR}" # Pig jar that doesn't have Antlr
     , 'oldpigpath'       => "$ENV{PH_OLDPIG}"
-    , 'additionaljars' =>  "$ENV{HCAT_ROOT}/build/hcatalog/hcatalog-0.4.0.jar:$ENV{HCAT_ROOT}/hive/external/build/metastore/hive-metastore-$hive_version.jar:$ENV{HCAT_ROOT}/hive/external/build/dist/lib/libthrift.jar:$ENV{HCAT_ROOT}/hive/external/build/dist/lib/hive-exec-$hive_version.jar:$ENV{HCAT_ROOT}/hive/external/build/dist/lib/libfb303.jar:$ENV{HCAT_ROOT}/hive/external/build/dist/lib/jdo2-api-2.3-ec.jar::$ENV{HCAT_ROOT}/storage-drivers/hbase/build/ivy/lib/hbase-storage-driver/hbase-0.90.5.jar:$ENV{HCAT_ROOT}/storage-drivers/hbase/build/ivy/lib/hbase-storage-driver/zookeeper-3.4.0.jar:$ENV{HCAT_ROOT}/storage-drivers/hbase/build/hbase-storage-driver/hbase-storage-driver-0.1.0.jar:$ENV{HCAT_ROOT}/hive/external/build/dist/lib/hive-hbase-handler-$hive_version.jar:$ENV{'HCAT_INSTALL_DIR'}/etc/hcatalog"
+    , 'additionaljars' =>  "$ENV{HCAT_ROOT}/build/hcatalog/hcatalog-0.4.0.jar:$ENV{HCAT_ROOT}/hive/external/build/metastore/hive-metastore-$hive_version.jar:$ENV{HCAT_ROOT}/hive/external/build/dist/lib/libthrift.jar:$ENV{HCAT_ROOT}/hive/external/build/dist/lib/hive-exec-$hive_version.jar:$ENV{HCAT_ROOT}/hive/external/build/dist/lib/libfb303.jar:$ENV{HCAT_ROOT}/hive/external/build/dist/lib/jdo2-api-2.3-ec.jar::$ENV{HCAT_ROOT}/storage-drivers/hbase/build/ivy/lib/hbase-storage-driver/hbase-0.92.0-SNAPSHOT.jar:$ENV{HCAT_ROOT}/storage-drivers/hbase/build/ivy/lib/hbase-storage-driver/zookeeper-3.4.3.jar:$ENV{HCAT_ROOT}/storage-drivers/hbase/build/hbase-storage-driver/hbase-storage-driver-0.1.0.jar:$ENV{HCAT_ROOT}/hive/external/build/dist/lib/hive-hbase-handler-$hive_version.jar:$ENV{'HCAT_INSTALL_DIR'}/etc/hcatalog"
 
     #HADOOP
     , 'hadoopHome'       => "$ENV{HCAT_ROOT}/lib"
@@ -87,5 +87,6 @@ $cfg = {
     , 'hive_bin_location' => "$ENV{HIVE_ROOT}/build/dist/bin" 
     , 'hbaseconfigpath'   => "$ENV{HBASE_CONF_DIR}"
     , 'hivehome' => "$ENV{HIVE_HOME}"
+    , 'hive.additionaljars' =>  "$ENV{HCAT_ROOT}/build/hcatalog/hcatalog-0.4.0.jar"
 
 };

Modified: incubator/hcatalog/trunk/src/test/e2e/hcatalog/drivers/Util.pm
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/e2e/hcatalog/drivers/Util.pm?rev=1294877&r1=1294876&r2=1294877&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/e2e/hcatalog/drivers/Util.pm (original)
+++ incubator/hcatalog/trunk/src/test/e2e/hcatalog/drivers/Util.pm Tue Feb 28 22:45:14 2012
@@ -151,25 +151,27 @@ sub runHiveCmdFromFile($$;$$$$)
     my @cmd = ("$cfg->{'hivehome'}/bin/hive");
 
     # Add all of the modified properties we want to set
-    push(@cmd, "--hiveconf", "hive.metastore.uris=$cfg->{'thriftserver'}");
-    push(@cmd, "--hiveconf", "hive.metastore.local=false");
+#   push(@cmd, "--hiveconf", "hive.metastore.uris=$cfg->{'thriftserver'}");
+#   push(@cmd, "--hiveconf", "hive.metastore.local=false");
 
-    if( defined($cfg->{'metastore.principal'}) && ($cfg->{'metastore.principal'}
=~ m/\S+/)
-        &&  ($cfg->{'metastore.principal'} ne '${metastore.principal}')){
-        push(@cmd, "--hiveconf", "hive.metastore.sasl.enabled=true",  "--hiveconf", "hive.metastore.kerberos.principal=$cfg->{'metastore.principal'}");
-    } else {
-        push(@cmd, "--hiveconf", "hive.metastore.sasl.enabled=false");
-    }
-
-    if (defined($cfg->{'additionaljarspath'})) {
-        $ENV{'HIVE_AUX_JARS_PATH'} = $cfg->{'additionaljarspath'};
-    }
-
-    if (defined($cfg->{'hiveconf'})) {
-        foreach my $hc (@{$cfg->{'hiveconf'}}) {
-            push(@cmd, "--hiveconf", $hc);
-        }
-    }
+#   if( defined($cfg->{'metastore.principal'}) && ($cfg->{'metastore.principal'}
=~ m/\S+/)
+#       &&  ($cfg->{'metastore.principal'} ne '${metastore.principal}')){
+#       push(@cmd, "--hiveconf", "hive.metastore.sasl.enabled=true",  "--hiveconf", "hive.metastore.kerberos.principal=$cfg->{'metastore.principal'}");
+#   } else {
+#       push(@cmd, "--hiveconf", "hive.metastore.sasl.enabled=false");
+#   }
+
+    $ENV{'HIVE_CONF_DIR'} = "$cfg->{'hcathome'}/etc/hcatalog/";
+
+    if (defined($cfg->{'hive.additionaljars'})) {
+        $ENV{'HIVE_AUX_JARS_PATH'} = $cfg->{'hive.additionaljars'};
+    }
+
+#   if (defined($cfg->{'hiveconf'})) {
+#       foreach my $hc (@{$cfg->{'hiveconf'}}) {
+#           push(@cmd, "--hiveconf", $hc);
+#       }
+#   }
 
     if (defined($cfg->{'hivecmdargs'})) {
         push(@cmd, @{$cfg->{'hivecmdargs'}});

Modified: incubator/hcatalog/trunk/src/test/e2e/hcatalog/tests/hadoop.conf
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/e2e/hcatalog/tests/hadoop.conf?rev=1294877&r1=1294876&r2=1294877&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/e2e/hcatalog/tests/hadoop.conf (original)
+++ incubator/hcatalog/trunk/src/test/e2e/hcatalog/tests/hadoop.conf Tue Feb 28 22:45:14 2012
@@ -153,7 +153,7 @@ create table hadoop_write_2(
             d double,
             m map<string, string>,
             bb array<struct<a: int, b: string>>)
-            row format serde 'org/apache/hcatalog/data/JsonSerDe'
+            row format serde 'org.apache.hcatalog.data.JsonSerDe'
             stored as textfile;\
                                 ,'hadoop' => q\
 jar :FUNCPATH:/testudf.jar org.apache.hcatalog.utils.WriteJson -libjars :HCAT_JAR: :THRIFTSERVER:
all100kjson hadoop_write_2
@@ -171,11 +171,7 @@ create table hadoop_write_3(
             name string,
             age int,
             gpa double)
-stored as rcfile
-TBLPROPERTIES (
-    'hcat.isd'='org.apache.hcatalog.rcfile.RCFileInputDriver',
-    'hcat.osd'='org.apache.hcatalog.rcfile.RCFileOutputDriver'
-);
+stored as rcfile;
 \,
                                 ,'hadoop' => q\
 jar :FUNCPATH:/testudf.jar org.apache.hcatalog.utils.WriteRC -libjars :HCAT_JAR: :THRIFTSERVER:
all100krc hadoop_write_3

Modified: incubator/hcatalog/trunk/src/test/e2e/hcatalog/tests/pig.conf
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/e2e/hcatalog/tests/pig.conf?rev=1294877&r1=1294876&r2=1294877&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/e2e/hcatalog/tests/pig.conf (original)
+++ incubator/hcatalog/trunk/src/test/e2e/hcatalog/tests/pig.conf Tue Feb 28 22:45:14 2012
@@ -164,10 +164,8 @@ create table pig_write_2(
             d double,
             m map<string, string>,
             bb array<struct<a: int, b: string>>)
-            STORED AS INPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat' OUTPUTFORMAT
'org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat'
-            INPUTDRIVER 'org.apache.hcatalog.pig.drivers.LoadFuncBasedInputDriver' OUTPUTDRIVER
'org.apache.hcatalog.pig.drivers.StoreFuncBasedOutputDriver'
-            TBLPROPERTIES ('hcat.pig.loader'='org.apache.pig.builtin.JsonLoader', 'hcat.pig.storer'='org.apache.pig.builtin.JsonStorage',
'hcat.pig.loader.args'=
-'s:chararray, i:int, d:double, m:map[chararray], bb:{t:(a:int, b:chararray)}', 'hcat.pig.args.delimiter'='
');
+            row format serde 'org.apache.hcatalog.data.JsonSerDe'
+            STORED AS TEXTFILE;
 \
                                 ,'pig' => q\a = load 'all100kjson' using org.apache.hcatalog.pig.HCatLoader();
 b = foreach a generate s, i, d;
@@ -184,11 +182,7 @@ create table pig_write_3(
             name string,
             age int,
             gpa double)
-stored as rcfile
-TBLPROPERTIES (
-    'hcat.isd'='org.apache.hcatalog.rcfile.RCFileInputDriver',
-    'hcat.osd'='org.apache.hcatalog.rcfile.RCFileOutputDriver'
-);
+stored as rcfile;
 \
                                 ,'pig' => q\a = load 'all100krc' using org.apache.hcatalog.pig.HCatLoader();
 b = foreach a generate name, age;

Modified: incubator/hcatalog/trunk/src/test/e2e/hcatalog/tools/generate/generate_data.pl
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/e2e/hcatalog/tools/generate/generate_data.pl?rev=1294877&r1=1294876&r2=1294877&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/e2e/hcatalog/tools/generate/generate_data.pl (original)
+++ incubator/hcatalog/trunk/src/test/e2e/hcatalog/tools/generate/generate_data.pl Tue Feb
28 22:45:14 2012
@@ -314,18 +314,12 @@ location '$location';\n";
     } elsif ($format eq "rc") {
         print $hivefp "
 stored as rcfile
-location '$location'
-TBLPROPERTIES (
-    'hcat.isd'='org.apache.hcatalog.rcfile.RCFileInputDriver',
-    'hcat.osd'='org.apache.hcatalog.rcfile.RCFileOutputDriver'
-);\n";
+location '$location';\n";
     } elsif ($format eq "json") {
-        print $hivefp " STORED AS
-INPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat' OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat'

-INPUTDRIVER 'org.apache.hcatalog.pig.drivers.LoadFuncBasedInputDriver' OUTPUTDRIVER 'org.apache.hcatalog.pig.drivers.StoreFuncBasedOutputDriver';
+        print $hivefp "
+row format serde 'org.apache.hcatalog.data.JsonSerDe'
+stored as textfile
 location '$location'
-TBLPROPERTIES ('hcat.pig.loader'='org.apache.pig.builtin.JsonLoader', 'hcat.pig.storer'='org.apache.pig.builtin.JsonStorage',
'hcat.pig.loader.args'=
-'s:chararray, i:int, d:double, m:map[chararray], bb:{t:(a:int, b:chararray)}, 'hcat.pig.args.delimiter'='\t')
 ;\n";
     } else {
         die "Unknown format $format\n";
@@ -528,9 +522,7 @@ sub findHiveJars()
         row format delimited
         fields terminated by ':'
         stored as textfile
-        location '$hdfsTargetDir/$tableName';\n
-        alter table $tableName set TBLPROPERTIES 
-         ('hcat.pig.loader.args'=':', 'hcat.pig.storer.args'=':');\n";
+        location '$hdfsTargetDir/$tableName';\n";
         for (my $i = 0; $i < $numRows; $i++) {
             printf HDFS "%d:%d:%d:%ld:%.2f:%.2f:%s\n",
                 (int(rand(2**8) - 2**7)),
@@ -669,11 +661,9 @@ for (my $i = 0; $i < $numRows; $i++) {
             d double,
             m map<string, string>,
             bb array<struct<a: int, b: string>>)
-            STORED AS INPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat' OUTPUTFORMAT
'org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat'
-            INPUTDRIVER 'org.apache.hcatalog.pig.drivers.LoadFuncBasedInputDriver' OUTPUTDRIVER
'org.apache.hcatalog.pig.drivers.StoreFuncBasedOutputDriver'
-            location '$hdfsTargetDir/$tableName'
-            TBLPROPERTIES ('hcat.pig.loader'='org.apache.pig.builtin.JsonLoader', 'hcat.pig.storer'='org.apache.pig.builtin.JsonStorage',
'hcat.pig.loader.args'=
-'s:chararray, i:int, d:double, m:map[chararray], bb:{t:(a:int, b:chararray)}', 'hcat.pig.args.delimiter'='\t');\n";
+            row format serde 'org.apache.hcatalog.data.JsonSerDe'
+            STORED AS TEXTFILE 
+            location '$hdfsTargetDir/$tableName';\n";
         open(PLAIN, ">$tableName.plain") or
             die("Cannot open file $tableName.hive.sql, $!\n");
         for (my $i = 0; $i < $numRows; $i++) {



Mime
View raw message