hadoop-hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From na...@apache.org
Subject svn commit: r952877 [2/2] - in /hadoop/hive/trunk: ./ common/src/java/org/apache/hadoop/hive/conf/ conf/ metastore/if/ metastore/src/gen-cpp/ metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/ metastore/src/gen-php/ metastore/src/gen-py/h...
Date Wed, 09 Jun 2010 01:37:49 GMT
Added: hadoop/hive/trunk/ql/src/test/results/clientnegative/archive1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/archive1.q.out?rev=952877&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/archive1.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/archive1.q.out Wed Jun  9 01:37:48
2010
@@ -0,0 +1,33 @@
+PREHOOK: query: -- Tests trying to archive a partition twice.
+-- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.17, 0.18, 0.19)
+
+CREATE TABLE srcpart_archived LIKE srcpart
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: -- Tests trying to archive a partition twice.
+-- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.17, 0.18, 0.19)
+
+CREATE TABLE srcpart_archived LIKE srcpart
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@srcpart_archived
+PREHOOK: query: INSERT OVERWRITE TABLE srcpart_archived PARTITION (ds='2008-04-08', hr='12')
+SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='12'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Output: default@srcpart_archived@ds=2008-04-08/hr=12
+POSTHOOK: query: INSERT OVERWRITE TABLE srcpart_archived PARTITION (ds='2008-04-08', hr='12')
+SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='12'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Output: default@srcpart_archived@ds=2008-04-08/hr=12
+POSTHOOK: Lineage: srcpart_archived PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:ds,
type:string, comment:null), ]
+POSTHOOK: Lineage: srcpart_archived PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:hr,
type:string, comment:null), ]
+PREHOOK: query: ALTER TABLE srcpart_archived ARCHIVE PARTITION (ds='2008-04-08', hr='12')
+PREHOOK: type: ALTERTABLE_ARCHIVE
+POSTHOOK: query: ALTER TABLE srcpart_archived ARCHIVE PARTITION (ds='2008-04-08', hr='12')
+POSTHOOK: type: ALTERTABLE_ARCHIVE
+POSTHOOK: Lineage: srcpart_archived PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:ds,
type:string, comment:null), ]
+POSTHOOK: Lineage: srcpart_archived PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:hr,
type:string, comment:null), ]
+PREHOOK: query: ALTER TABLE srcpart_archived ARCHIVE PARTITION (ds='2008-04-08', hr='12')
+PREHOOK: type: ALTERTABLE_ARCHIVE
+FAILED: Error in metadata: Specified partition is already archived
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask

Added: hadoop/hive/trunk/ql/src/test/results/clientnegative/archive2.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/archive2.q.out?rev=952877&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/archive2.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/archive2.q.out Wed Jun  9 01:37:48
2010
@@ -0,0 +1,7 @@
+PREHOOK: query: -- Tests trying to unarchive a non-archived partition
+-- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.17, 0.18, 0.19)
+
+ALTER TABLE srcpart UNARCHIVE PARTITION (ds='2008-04-08', hr='12')
+PREHOOK: type: ALTERTABLE_UNARCHIVE
+FAILED: Error in metadata: Specified partition is not archived
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask

Added: hadoop/hive/trunk/ql/src/test/results/clientnegative/archive3.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/archive3.q.out?rev=952877&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/archive3.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/archive3.q.out Wed Jun  9 01:37:48
2010
@@ -0,0 +1 @@
+FAILED: Error in semantic analysis: ARCHIVE can only be run on partitions

Added: hadoop/hive/trunk/ql/src/test/results/clientnegative/archive4.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/archive4.q.out?rev=952877&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/archive4.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/archive4.q.out Wed Jun  9 01:37:48
2010
@@ -0,0 +1 @@
+FAILED: Error in semantic analysis: ARCHIVE can only be run on a single partition

Added: hadoop/hive/trunk/ql/src/test/results/clientnegative/archive5.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/archive5.q.out?rev=952877&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/archive5.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/archive5.q.out Wed Jun  9 01:37:48
2010
@@ -0,0 +1 @@
+FAILED: Error in semantic analysis: Partition value contains a reserved substring (User value:
14_INTERMEDIATE_ORIGINAL Reserved substring: _INTERMEDIATE_ORIGINAL)

Added: hadoop/hive/trunk/ql/src/test/results/clientpositive/archive.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/archive.q.out?rev=952877&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/archive.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/archive.q.out Wed Jun  9 01:37:48
2010
@@ -0,0 +1,195 @@
+PREHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.17, 0.18, 0.19)
+
+SELECT SUM(hash(col)) FROM (SELECT transform(*) using 'tr "\t" "_"' AS col 
+FROM (SELECT * FROM srcpart WHERE ds='2008-04-08') subq1) subq2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Output: file:/data/users/pyang/mstore/trunk/VENDOR.hive/trunk/build/ql/scratchdir/hive_2010-06-08_15-02-31_915_8404207959149265563/10000
+POSTHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.17, 0.18, 0.19)
+
+SELECT SUM(hash(col)) FROM (SELECT transform(*) using 'tr "\t" "_"' AS col 
+FROM (SELECT * FROM srcpart WHERE ds='2008-04-08') subq1) subq2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Output: file:/data/users/pyang/mstore/trunk/VENDOR.hive/trunk/build/ql/scratchdir/hive_2010-06-08_15-02-31_915_8404207959149265563/10000
+48479881068
+PREHOOK: query: ALTER TABLE srcpart ARCHIVE PARTITION (ds='2008-04-08', hr='12')
+PREHOOK: type: ALTERTABLE_ARCHIVE
+POSTHOOK: query: ALTER TABLE srcpart ARCHIVE PARTITION (ds='2008-04-08', hr='12')
+POSTHOOK: type: ALTERTABLE_ARCHIVE
+PREHOOK: query: SELECT SUM(hash(col)) FROM (SELECT transform(*) using 'tr "\t" "_"' AS col

+FROM (SELECT * FROM srcpart WHERE ds='2008-04-08') subq1) subq2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Output: file:/data/users/pyang/mstore/trunk/VENDOR.hive/trunk/build/ql/scratchdir/hive_2010-06-08_15-02-39_278_6500531861845897423/10000
+POSTHOOK: query: SELECT SUM(hash(col)) FROM (SELECT transform(*) using 'tr "\t" "_"' AS col

+FROM (SELECT * FROM srcpart WHERE ds='2008-04-08') subq1) subq2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Output: file:/data/users/pyang/mstore/trunk/VENDOR.hive/trunk/build/ql/scratchdir/hive_2010-06-08_15-02-39_278_6500531861845897423/10000
+48479881068
+PREHOOK: query: ALTER TABLE srcpart UNARCHIVE PARTITION (ds='2008-04-08', hr='12')
+PREHOOK: type: ALTERTABLE_UNARCHIVE
+POSTHOOK: query: ALTER TABLE srcpart UNARCHIVE PARTITION (ds='2008-04-08', hr='12')
+POSTHOOK: type: ALTERTABLE_UNARCHIVE
+PREHOOK: query: SELECT SUM(hash(col)) FROM (SELECT transform(*) using 'tr "\t" "_"' AS col

+FROM (SELECT * FROM srcpart WHERE ds='2008-04-08') subq1) subq2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Output: file:/data/users/pyang/mstore/trunk/VENDOR.hive/trunk/build/ql/scratchdir/hive_2010-06-08_15-02-45_152_7929745238260502728/10000
+POSTHOOK: query: SELECT SUM(hash(col)) FROM (SELECT transform(*) using 'tr "\t" "_"' AS col

+FROM (SELECT * FROM srcpart WHERE ds='2008-04-08') subq1) subq2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Output: file:/data/users/pyang/mstore/trunk/VENDOR.hive/trunk/build/ql/scratchdir/hive_2010-06-08_15-02-45_152_7929745238260502728/10000
+48479881068
+PREHOOK: query: CREATE TABLE harbucket(key INT) 
+PARTITIONED by (ds STRING)
+CLUSTERED BY (key) INTO 10 BUCKETS
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE harbucket(key INT) 
+PARTITIONED by (ds STRING)
+CLUSTERED BY (key) INTO 10 BUCKETS
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@harbucket
+PREHOOK: query: INSERT OVERWRITE TABLE harbucket PARTITION(ds='1') SELECT CAST(key AS INT)
AS a FROM src WHERE key < 50
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@harbucket@ds=1
+POSTHOOK: query: INSERT OVERWRITE TABLE harbucket PARTITION(ds='1') SELECT CAST(key AS INT)
AS a FROM src WHERE key < 50
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@harbucket@ds=1
+POSTHOOK: Lineage: harbucket PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key,
type:string, comment:default), ]
+PREHOOK: query: SELECT key FROM harbucket TABLESAMPLE(BUCKET 1 OUT OF 10) SORT BY key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@harbucket@ds=1
+PREHOOK: Output: file:/data/users/pyang/mstore/trunk/VENDOR.hive/trunk/build/ql/scratchdir/hive_2010-06-08_15-02-55_224_4935516234179357829/10000
+POSTHOOK: query: SELECT key FROM harbucket TABLESAMPLE(BUCKET 1 OUT OF 10) SORT BY key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@harbucket@ds=1
+POSTHOOK: Output: file:/data/users/pyang/mstore/trunk/VENDOR.hive/trunk/build/ql/scratchdir/hive_2010-06-08_15-02-55_224_4935516234179357829/10000
+POSTHOOK: Lineage: harbucket PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key,
type:string, comment:default), ]
+0
+0
+0
+10
+20
+30
+PREHOOK: query: ALTER TABLE srcpart ARCHIVE PARTITION (ds='2008-04-08', hr='12')
+PREHOOK: type: ALTERTABLE_ARCHIVE
+POSTHOOK: query: ALTER TABLE srcpart ARCHIVE PARTITION (ds='2008-04-08', hr='12')
+POSTHOOK: type: ALTERTABLE_ARCHIVE
+POSTHOOK: Lineage: harbucket PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key,
type:string, comment:default), ]
+PREHOOK: query: SELECT key FROM harbucket TABLESAMPLE(BUCKET 1 OUT OF 10) SORT BY key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@harbucket@ds=1
+PREHOOK: Output: file:/data/users/pyang/mstore/trunk/VENDOR.hive/trunk/build/ql/scratchdir/hive_2010-06-08_15-03-01_089_7613007639376060720/10000
+POSTHOOK: query: SELECT key FROM harbucket TABLESAMPLE(BUCKET 1 OUT OF 10) SORT BY key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@harbucket@ds=1
+POSTHOOK: Output: file:/data/users/pyang/mstore/trunk/VENDOR.hive/trunk/build/ql/scratchdir/hive_2010-06-08_15-03-01_089_7613007639376060720/10000
+POSTHOOK: Lineage: harbucket PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key,
type:string, comment:default), ]
+0
+0
+0
+10
+20
+30
+PREHOOK: query: ALTER TABLE srcpart UNARCHIVE PARTITION (ds='2008-04-08', hr='12')
+PREHOOK: type: ALTERTABLE_UNARCHIVE
+POSTHOOK: query: ALTER TABLE srcpart UNARCHIVE PARTITION (ds='2008-04-08', hr='12')
+POSTHOOK: type: ALTERTABLE_UNARCHIVE
+POSTHOOK: Lineage: harbucket PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key,
type:string, comment:default), ]
+PREHOOK: query: SELECT key FROM harbucket TABLESAMPLE(BUCKET 1 OUT OF 10) SORT BY key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@harbucket@ds=1
+PREHOOK: Output: file:/data/users/pyang/mstore/trunk/VENDOR.hive/trunk/build/ql/scratchdir/hive_2010-06-08_15-03-05_256_2444261282224863204/10000
+POSTHOOK: query: SELECT key FROM harbucket TABLESAMPLE(BUCKET 1 OUT OF 10) SORT BY key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@harbucket@ds=1
+POSTHOOK: Output: file:/data/users/pyang/mstore/trunk/VENDOR.hive/trunk/build/ql/scratchdir/hive_2010-06-08_15-03-05_256_2444261282224863204/10000
+POSTHOOK: Lineage: harbucket PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key,
type:string, comment:default), ]
+0
+0
+0
+10
+20
+30
+PREHOOK: query: DROP TABLE harbucket
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE harbucket
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@harbucket
+POSTHOOK: Lineage: harbucket PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key,
type:string, comment:default), ]
+PREHOOK: query: CREATE TABLE old_name(key INT) 
+PARTITIONED by (ds STRING)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE old_name(key INT) 
+PARTITIONED by (ds STRING)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@old_name
+POSTHOOK: Lineage: harbucket PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key,
type:string, comment:default), ]
+PREHOOK: query: INSERT OVERWRITE TABLE old_name PARTITION(ds='1') SELECT CAST(key AS INT)
AS a FROM src WHERE key < 50
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@old_name@ds=1
+POSTHOOK: query: INSERT OVERWRITE TABLE old_name PARTITION(ds='1') SELECT CAST(key AS INT)
AS a FROM src WHERE key < 50
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@old_name@ds=1
+POSTHOOK: Lineage: harbucket PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key,
type:string, comment:default), ]
+POSTHOOK: Lineage: old_name PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key,
type:string, comment:default), ]
+PREHOOK: query: ALTER TABLE old_name ARCHIVE PARTITION (ds='1')
+PREHOOK: type: ALTERTABLE_ARCHIVE
+POSTHOOK: query: ALTER TABLE old_name ARCHIVE PARTITION (ds='1')
+POSTHOOK: type: ALTERTABLE_ARCHIVE
+POSTHOOK: Lineage: harbucket PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key,
type:string, comment:default), ]
+POSTHOOK: Lineage: old_name PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key,
type:string, comment:default), ]
+PREHOOK: query: SELECT SUM(hash(col)) FROM (SELECT transform(*) using 'tr "\t" "_"' AS col

+FROM (SELECT * FROM old_name WHERE ds='1') subq1) subq2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@old_name@ds=1
+PREHOOK: Output: file:/data/users/pyang/mstore/trunk/VENDOR.hive/trunk/build/ql/scratchdir/hive_2010-06-08_15-03-14_435_1169638822418513482/10000
+POSTHOOK: query: SELECT SUM(hash(col)) FROM (SELECT transform(*) using 'tr "\t" "_"' AS col

+FROM (SELECT * FROM old_name WHERE ds='1') subq1) subq2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@old_name@ds=1
+POSTHOOK: Output: file:/data/users/pyang/mstore/trunk/VENDOR.hive/trunk/build/ql/scratchdir/hive_2010-06-08_15-03-14_435_1169638822418513482/10000
+POSTHOOK: Lineage: harbucket PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key,
type:string, comment:default), ]
+POSTHOOK: Lineage: old_name PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key,
type:string, comment:default), ]
+48656137
+PREHOOK: query: ALTER TABLE old_name RENAME TO new_name
+PREHOOK: type: ALTERTABLE_RENAME
+POSTHOOK: query: ALTER TABLE old_name RENAME TO new_name
+POSTHOOK: type: ALTERTABLE_RENAME
+POSTHOOK: Input: default@old_name
+POSTHOOK: Output: default@new_name
+POSTHOOK: Lineage: harbucket PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key,
type:string, comment:default), ]
+POSTHOOK: Lineage: old_name PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key,
type:string, comment:default), ]
+PREHOOK: query: SELECT SUM(hash(col)) FROM (SELECT transform(*) using 'tr "\t" "_"' AS col

+FROM (SELECT * FROM new_name WHERE ds='1') subq1) subq2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@new_name@ds=1
+PREHOOK: Output: file:/data/users/pyang/mstore/trunk/VENDOR.hive/trunk/build/ql/scratchdir/hive_2010-06-08_15-03-19_685_3074346646787769085/10000
+POSTHOOK: query: SELECT SUM(hash(col)) FROM (SELECT transform(*) using 'tr "\t" "_"' AS col

+FROM (SELECT * FROM new_name WHERE ds='1') subq1) subq2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@new_name@ds=1
+POSTHOOK: Output: file:/data/users/pyang/mstore/trunk/VENDOR.hive/trunk/build/ql/scratchdir/hive_2010-06-08_15-03-19_685_3074346646787769085/10000
+POSTHOOK: Lineage: harbucket PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key,
type:string, comment:default), ]
+POSTHOOK: Lineage: old_name PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key,
type:string, comment:default), ]
+NULL
+PREHOOK: query: DROP TABLE new_name
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE new_name
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@new_name
+POSTHOOK: Lineage: harbucket PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key,
type:string, comment:default), ]
+POSTHOOK: Lineage: old_name PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key,
type:string, comment:default), ]

Modified: hadoop/hive/trunk/ql/src/test/templates/TestCliDriver.vm
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/templates/TestCliDriver.vm?rev=952877&r1=952876&r2=952877&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/templates/TestCliDriver.vm (original)
+++ hadoop/hive/trunk/ql/src/test/templates/TestCliDriver.vm Wed Jun  9 01:37:48 2010
@@ -84,6 +84,10 @@ public class $className extends TestCase
 
       qt.addFile("$qf.getCanonicalPath()");
 
+      if (qt.shouldBeSkipped("$fname")) {
+        return;
+      }
+      
       qt.cliInit("$fname");
       int ecode = qt.executeClient("$fname");
       if (ecode != 0) {

Modified: hadoop/hive/trunk/ql/src/test/templates/TestNegativeCliDriver.vm
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/templates/TestNegativeCliDriver.vm?rev=952877&r1=952876&r2=952877&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/templates/TestNegativeCliDriver.vm (original)
+++ hadoop/hive/trunk/ql/src/test/templates/TestNegativeCliDriver.vm Wed Jun  9 01:37:48 2010
@@ -59,6 +59,11 @@ public class $className extends TestCase
 
       qt.addFile("$qf.getCanonicalPath()");
 
+      if (qt.shouldBeSkipped("$fname")) {
+        System.out.println("Test $fname skipped");
+        return;
+      }
+      
       qt.cliInit("$fname");
       int ecode = qt.executeClient("$fname");
       if (ecode == 0) {

Modified: hadoop/hive/trunk/shims/build.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/shims/build.xml?rev=952877&r1=952876&r2=952877&view=diff
==============================================================================
--- hadoop/hive/trunk/shims/build.xml (original)
+++ hadoop/hive/trunk/shims/build.xml Wed Jun  9 01:37:48 2010
@@ -18,8 +18,8 @@
 -->
 
 
-<!-- 
-Before you can run these subtargets directly, you need 
+<!--
+Before you can run these subtargets directly, you need
 to call at top-level: ant deploy-contrib compile-core-test
 -->
 <project name="shims" default="jar">
@@ -27,6 +27,7 @@ to call at top-level: ant deploy-contrib
 
   <path id="classpath">
     <pathelement location="${hadoop.jar}"/>
+    <pathelement location="${hadoop.tools.jar}"/>
     <pathelement location="${hadoop.test.jar}"/>
     <fileset dir="${hadoop.root}/lib">
       <include name="**/*.jar" />

Modified: hadoop/hive/trunk/shims/src/0.17/java/org/apache/hadoop/hive/shims/Hadoop17Shims.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/shims/src/0.17/java/org/apache/hadoop/hive/shims/Hadoop17Shims.java?rev=952877&r1=952876&r2=952877&view=diff
==============================================================================
--- hadoop/hive/trunk/shims/src/0.17/java/org/apache/hadoop/hive/shims/Hadoop17Shims.java
(original)
+++ hadoop/hive/trunk/shims/src/0.17/java/org/apache/hadoop/hive/shims/Hadoop17Shims.java
Wed Jun  9 01:37:48 2010
@@ -127,4 +127,11 @@ public class Hadoop17Shims implements Ha
   public void setNullOutputFormat(JobConf conf) {
     conf.setOutputFormat(NullOutputFormat.class);
   }
+  
+  @Override
+  public int createHadoopArchive(Configuration conf, Path parentDir, Path destDir,
+      String archiveName) throws Exception {
+    throw new RuntimeException("Not implemented in this Hadoop version");
+  }
+
 }

Modified: hadoop/hive/trunk/shims/src/0.18/java/org/apache/hadoop/hive/shims/Hadoop18Shims.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/shims/src/0.18/java/org/apache/hadoop/hive/shims/Hadoop18Shims.java?rev=952877&r1=952876&r2=952877&view=diff
==============================================================================
--- hadoop/hive/trunk/shims/src/0.18/java/org/apache/hadoop/hive/shims/Hadoop18Shims.java
(original)
+++ hadoop/hive/trunk/shims/src/0.18/java/org/apache/hadoop/hive/shims/Hadoop18Shims.java
Wed Jun  9 01:37:48 2010
@@ -128,6 +128,12 @@ public class Hadoop18Shims implements Ha
     conf.set(varName, Float.toString(val));
   }
 
+  @Override
+  public int createHadoopArchive(Configuration conf, Path parentDir, Path destDir,
+      String archiveName) throws Exception {
+    throw new RuntimeException("Not implemented in this Hadoop version");
+  }
+
   public void setNullOutputFormat(JobConf conf) {
     conf.setOutputFormat(NullOutputFormat.class);
   }

Modified: hadoop/hive/trunk/shims/src/0.19/java/org/apache/hadoop/hive/shims/Hadoop19Shims.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/shims/src/0.19/java/org/apache/hadoop/hive/shims/Hadoop19Shims.java?rev=952877&r1=952876&r2=952877&view=diff
==============================================================================
--- hadoop/hive/trunk/shims/src/0.19/java/org/apache/hadoop/hive/shims/Hadoop19Shims.java
(original)
+++ hadoop/hive/trunk/shims/src/0.19/java/org/apache/hadoop/hive/shims/Hadoop19Shims.java
Wed Jun  9 01:37:48 2010
@@ -485,6 +485,12 @@ public class Hadoop19Shims implements Ha
     conf.set(varName, Float.toString(val));
   }
 
+  @Override
+  public int createHadoopArchive(Configuration conf, Path parentDir, Path destDir,
+      String archiveName) throws Exception {
+    throw new RuntimeException("Not implemented in this Hadoop version");
+  }
+
   public static class NullOutputCommitter extends OutputCommitter {
     public void setupJob(JobContext jobContext) { }
     public void cleanupJob(JobContext jobContext) { }

Modified: hadoop/hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java?rev=952877&r1=952876&r2=952877&view=diff
==============================================================================
--- hadoop/hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
(original)
+++ hadoop/hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
Wed Jun  9 01:37:48 2010
@@ -17,10 +17,12 @@
  */
 package org.apache.hadoop.hive.shims;
 
+import java.io.DataInput;
+import java.io.DataOutput;
 import java.io.IOException;
 import java.lang.reflect.Constructor;
-import java.io.DataOutput;
-import java.io.DataInput;
+import java.util.ArrayList;
+import java.util.List;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
@@ -29,23 +31,25 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.mapred.FileInputFormat;
 import org.apache.hadoop.mapred.InputFormat;
 import org.apache.hadoop.mapred.InputSplit;
 import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.JobContext;
 import org.apache.hadoop.mapred.JobStatus;
+import org.apache.hadoop.mapred.OutputCommitter;
 import org.apache.hadoop.mapred.RecordReader;
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapred.RunningJob;
+import org.apache.hadoop.mapred.TaskAttemptContext;
 import org.apache.hadoop.mapred.TaskCompletionEvent;
 import org.apache.hadoop.mapred.TaskID;
 import org.apache.hadoop.mapred.lib.CombineFileInputFormat;
 import org.apache.hadoop.mapred.lib.CombineFileSplit;
-import org.apache.hadoop.mapred.OutputCommitter;
-import org.apache.hadoop.mapred.TaskAttemptContext;
-import org.apache.hadoop.mapred.JobContext;
 import org.apache.hadoop.mapred.lib.NullOutputFormat;
-import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.tools.HadoopArchives;
+import org.apache.hadoop.util.ToolRunner;
 
 /**
  * Implemention of shims against Hadoop 0.20.0.
@@ -377,15 +381,50 @@ public class Hadoop20Shims implements Ha
     conf.setFloat(varName, val);
   }
 
+  @Override
+  public int createHadoopArchive(Configuration conf, Path sourceDir, Path destDir,
+      String archiveName) throws Exception {
+
+    HadoopArchives har = new HadoopArchives(conf);
+    List<String> args = new ArrayList<String>();
+
+    if (conf.get("hive.archive.har.parentdir.settable") == null) {
+      throw new RuntimeException("hive.archive.har.parentdir.settable is not set");
+    }
+    boolean parentSettable =
+      conf.getBoolean("hive.archive.har.parentdir.settable", false);
+
+    if (parentSettable) {
+      args.add("-archiveName");
+      args.add(archiveName);
+      args.add("-p");
+      args.add(sourceDir.toString());
+      args.add(destDir.toString());
+    } else {
+      args.add("-archiveName");
+      args.add(archiveName);
+      args.add(sourceDir.toString());
+      args.add(destDir.toString());
+    }
+
+    return ToolRunner.run(har, args.toArray(new String[0]));
+  }
+
   public static class NullOutputCommitter extends OutputCommitter {
+    @Override
     public void setupJob(JobContext jobContext) { }
+    @Override
     public void cleanupJob(JobContext jobContext) { }
 
+    @Override
     public void setupTask(TaskAttemptContext taskContext) { }
+    @Override
     public boolean needsTaskCommit(TaskAttemptContext taskContext) {
       return false;
     }
+    @Override
     public void commitTask(TaskAttemptContext taskContext) { }
+    @Override
     public void abortTask(TaskAttemptContext taskContext) { }
   }
 

Added: hadoop/hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/HiveHarFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/HiveHarFileSystem.java?rev=952877&view=auto
==============================================================================
--- hadoop/hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/HiveHarFileSystem.java
(added)
+++ hadoop/hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/HiveHarFileSystem.java
Wed Jun  9 01:37:48 2010
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.shims;
+
+import org.apache.hadoop.fs.HarFileSystem;
+
+/**
+ * HiveHarFileSystem - fixes issue with block locations
+ *
+ */
+public class HiveHarFileSystem extends HarFileSystem {
+  /*
+  @Override
+  public BlockLocation[] getFileBlockLocations(FileStatus file, long start,
+      long len) throws IOException {
+
+    // In some places (e.g. FileInputFormat) this BlockLocation is used to
+    // figure out sizes/offsets and so a completely blank one will not work.
+    String [] hosts = {"DUMMY_HOST"};
+    return new BlockLocation[]{new BlockLocation(null, hosts, 0, file.getLen())};
+  }
+  */
+}

Modified: hadoop/hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java?rev=952877&r1=952876&r2=952877&view=diff
==============================================================================
--- hadoop/hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java
(original)
+++ hadoop/hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java
Wed Jun  9 01:37:48 2010
@@ -131,6 +131,8 @@ public interface HadoopShims {
    */
   String[] getTaskJobIDs(TaskCompletionEvent t);
 
+  int createHadoopArchive(Configuration conf, Path parentDir, Path destDir,
+      String archiveName) throws Exception;
   /**
    * Hive uses side effect files exclusively for it's output. It also manages
    * the setup/cleanup/commit of output from the hive client. As a result it does

Modified: hadoop/hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/ShimLoader.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/ShimLoader.java?rev=952877&r1=952876&r2=952877&view=diff
==============================================================================
--- hadoop/hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/ShimLoader.java (original)
+++ hadoop/hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/ShimLoader.java Wed
Jun  9 01:37:48 2010
@@ -97,7 +97,7 @@ public abstract class ShimLoader {
    * This is simply the first two components of the version number
    * (e.g "0.18" or "0.20")
    */
-  private static String getMajorVersion() {
+  public static String getMajorVersion() {
     String vers = VersionInfo.getVersion();
 
     String[] parts = vers.split("\\.");



Mime
View raw message