hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From hashut...@apache.org
Subject svn commit: r1670462 [3/26] - in /hive/branches/cbo: ./ ant/src/org/apache/hadoop/hive/ant/ beeline/src/java/org/apache/hive/beeline/ common/src/java/org/apache/hadoop/hive/conf/ common/src/java/org/apache/hive/common/util/ common/src/test/org/apache/h...
Date Tue, 31 Mar 2015 20:14:07 GMT
Modified: hive/branches/cbo/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HiveDelegator.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HiveDelegator.java?rev=1670462&r1=1670461&r2=1670462&view=diff
==============================================================================
--- hive/branches/cbo/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HiveDelegator.java
(original)
+++ hive/branches/cbo/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HiveDelegator.java
Tue Mar 31 20:14:02 2015
@@ -27,6 +27,7 @@ import java.util.List;
 import java.util.Map;
 
 import org.apache.commons.exec.ExecuteException;
+import org.apache.hadoop.fs.Path;
 import org.apache.hive.hcatalog.templeton.tool.JobSubmissionConstants;
 import org.apache.hive.hcatalog.templeton.tool.TempletonControllerJob;
 import org.apache.hive.hcatalog.templeton.tool.TempletonUtils;
@@ -117,7 +118,7 @@ public class HiveDelegator extends Launc
   private List<String> makeBasicArgs(String execute, String srcFile, String otherFiles,
                                          String statusdir, String completedUrl,
                                          boolean enablelog)
-    throws URISyntaxException, FileNotFoundException, IOException,
+    throws URISyntaxException, IOException,
     InterruptedException
   {
     ArrayList<String> args = new ArrayList<String>();
@@ -142,6 +143,30 @@ public class HiveDelegator extends Launc
       args.add(appConf.hiveArchive());
     }
 
+    //ship additional artifacts, for example for Tez
+    String extras = appConf.get(AppConfig.HIVE_EXTRA_FILES); 
+    if(extras != null && extras.length() > 0) {
+      boolean foundFiles = false;
+      for(int i = 0; i < args.size(); i++) {
+        if(FILES.equals(args.get(i))) {
+          String value = args.get(i + 1);
+          args.set(i + 1, value + "," + extras);
+          foundFiles = true;
+        }
+      }
+      if(!foundFiles) {
+        args.add(FILES);
+        args.add(extras);
+      }
+      String[] extraFiles = appConf.getStrings(AppConfig.HIVE_EXTRA_FILES);
+      StringBuilder extraFileNames = new StringBuilder();
+      //now tell LaunchMapper which files it should add to HADOOP_CLASSPATH
+      for(String file : extraFiles) {
+        Path p = new Path(file);
+        extraFileNames.append(p.getName()).append(",");
+      }
+      addDef(args, JobSubmissionConstants.HADOOP_CLASSPATH_EXTRAS, extraFileNames.toString());
+    }
     return args;
   }
 }

Modified: hive/branches/cbo/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/TempletonDelegator.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/TempletonDelegator.java?rev=1670462&r1=1670461&r2=1670462&view=diff
==============================================================================
--- hive/branches/cbo/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/TempletonDelegator.java
(original)
+++ hive/branches/cbo/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/TempletonDelegator.java
Tue Mar 31 20:14:02 2015
@@ -28,6 +28,10 @@ public class TempletonDelegator {
    * http://hadoop.apache.org/docs/r1.0.4/commands_manual.html#Generic+Options
    */
   public static final String ARCHIVES = "-archives";
+  /**
+   * http://hadoop.apache.org/docs/current/hadoop-project-dist/hadoop-common/CommandsManual.html#Generic_Options
+   */
+  public static final String FILES = "-files";
   
   protected AppConfig appConf;
 

Modified: hive/branches/cbo/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobSubmissionConstants.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobSubmissionConstants.java?rev=1670462&r1=1670461&r2=1670462&view=diff
==============================================================================
--- hive/branches/cbo/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobSubmissionConstants.java
(original)
+++ hive/branches/cbo/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobSubmissionConstants.java
Tue Mar 31 20:14:02 2015
@@ -31,6 +31,12 @@ public interface JobSubmissionConstants
   public static final String EXIT_FNAME = "exit";
   public static final int WATCHER_TIMEOUT_SECS = 10;
   public static final int KEEP_ALIVE_MSEC = 60 * 1000;
+  /**
+   * A comma-separated list of files to be added to HADOOP_CLASSPATH in 
+   * {@link org.apache.hive.hcatalog.templeton.tool.LaunchMapper}.  Used to localize additional
+   * artifacts for job submission requests.
+   */
+  public static final String HADOOP_CLASSPATH_EXTRAS = "templeton.hadoop.classpath.extras";
   /*
    * The = sign in the string for TOKEN_FILE_ARG_PLACEHOLDER is required because
    * org.apache.hadoop.util.GenericOptionsParser.preProcessForWindows() prepares

Modified: hive/branches/cbo/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java?rev=1670462&r1=1670461&r2=1670462&view=diff
==============================================================================
--- hive/branches/cbo/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java
(original)
+++ hive/branches/cbo/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java
Tue Mar 31 20:14:02 2015
@@ -21,6 +21,7 @@ package org.apache.hive.hcatalog.templet
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.classification.InterfaceAudience;
@@ -33,7 +34,6 @@ import org.apache.hadoop.mapreduce.Mappe
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.util.StringUtils;
-import org.apache.hive.hcatalog.templeton.AppConfig;
 import org.apache.hive.hcatalog.templeton.BadParam;
 import org.apache.hive.hcatalog.templeton.LauncherDelegator;
 
@@ -115,6 +115,32 @@ public class LaunchMapper extends Mapper
       }
     }
   }
+  private static void handleHadoopClasspathExtras(Configuration conf, Map<String, String>
env)
+    throws IOException {
+    if(!TempletonUtils.isset(conf.get(JobSubmissionConstants.HADOOP_CLASSPATH_EXTRAS))) {
+      return;
+    }
+    LOG.debug(HADOOP_CLASSPATH_EXTRAS + "=" + conf.get(HADOOP_CLASSPATH_EXTRAS));
+    String[] files = conf.getStrings(HADOOP_CLASSPATH_EXTRAS);
+    StringBuilder paths = new StringBuilder();
+    FileSystem fs = FileSystem.getLocal(conf);//these have been localized already
+    for(String f : files) {
+      Path p = new Path(f);
+      FileStatus fileStatus = fs.getFileStatus(p);
+      paths.append(f);
+      if(fileStatus.isDirectory()) {
+        paths.append(File.separator).append("*");
+      }
+      paths.append(File.pathSeparator);
+    }
+    paths.setLength(paths.length() - 1);
+    if(TempletonUtils.isset(System.getenv("HADOOP_CLASSPATH"))) {
+      env.put("HADOOP_CLASSPATH", System.getenv("HADOOP_CLASSPATH") + File.pathSeparator
+ paths);
+    }
+    else {
+      env.put("HADOOP_CLASSPATH", paths.toString());
+    }
+  }
   protected Process startJob(Context context, String user, String overrideClasspath)
     throws IOException, InterruptedException {
     Configuration conf = context.getConfiguration();
@@ -135,6 +161,7 @@ public class LaunchMapper extends Mapper
     Map<String, String> env = TempletonUtils.hadoopUserEnv(user, overrideClasspath);
     handlePigEnvVars(conf, env);
     handleSqoop(conf, env);
+    handleHadoopClasspathExtras(conf, env);    
     List<String> jarArgsList = new LinkedList<String>(Arrays.asList(jarArgs));
     handleTokenFile(jarArgsList, JobSubmissionConstants.TOKEN_FILE_ARG_PLACEHOLDER, "mapreduce.job.credentials.binary");
     handleTokenFile(jarArgsList, JobSubmissionConstants.TOKEN_FILE_ARG_PLACEHOLDER_TEZ, "tez.credentials.path");

Modified: hive/branches/cbo/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java?rev=1670462&r1=1670461&r2=1670462&view=diff
==============================================================================
--- hive/branches/cbo/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java
(original)
+++ hive/branches/cbo/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java
Tue Mar 31 20:14:02 2015
@@ -18,9 +18,7 @@
  */
 package org.apache.hive.hcatalog.templeton.tool;
 
-import java.io.File;
 import java.io.IOException;
-import java.net.URI;
 import java.security.PrivilegedExceptionAction;
 import java.util.Arrays;
 
@@ -28,13 +26,9 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hive.common.classification.InterfaceAudience;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
-import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.JobClient;
@@ -47,7 +41,6 @@ import org.apache.hadoop.security.UserGr
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.util.Tool;
 import org.apache.hive.hcatalog.templeton.AppConfig;
-import org.apache.hive.hcatalog.templeton.Main;
 import org.apache.hive.hcatalog.templeton.SecureProxySupport;
 import org.apache.hive.hcatalog.templeton.UgiFactory;
 import org.apache.thrift.TException;
@@ -114,6 +107,15 @@ public class TempletonControllerJob exte
     if(memoryMb != null && memoryMb.length() != 0) {
       conf.set(AppConfig.HADOOP_MAP_MEMORY_MB, memoryMb);
     }
+    String amMemoryMB = appConf.amMemoryMb();
+    if (amMemoryMB != null && !amMemoryMB.isEmpty()) {
+      conf.set(AppConfig.HADOOP_MR_AM_MEMORY_MB, amMemoryMB);
+    }
+    String amJavaOpts = appConf.controllerAMChildOpts();
+    if (amJavaOpts != null && !amJavaOpts.isEmpty()) {
+      conf.set(AppConfig.HADOOP_MR_AM_JAVA_OPTS, amJavaOpts);
+    }
+
     String user = UserGroupInformation.getCurrentUser().getShortUserName();
     conf.set("user.name", user);
     Job job = new Job(conf);

Modified: hive/branches/cbo/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TrivialExecService.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TrivialExecService.java?rev=1670462&r1=1670461&r2=1670462&view=diff
==============================================================================
--- hive/branches/cbo/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TrivialExecService.java
(original)
+++ hive/branches/cbo/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TrivialExecService.java
Tue Mar 31 20:14:02 2015
@@ -76,32 +76,31 @@ final class TrivialExecService {
     }
   }
   /**
-   * Print files and directories in current directory. Will list files in the sub-directory
(only 1 level deep)
-   * time honored tradition in WebHCat of borrowing from Oozie
+   * Print files and directories in current {@code dir}.
    */
-  private static void printContentsOfDir(String dir) {
+  private static StringBuilder printContentsOfDir(String dir, int depth, StringBuilder sb)
{
+    StringBuilder indent = new StringBuilder();
+    for(int i = 0; i < depth; i++) {
+      indent.append("--");
+    }
     File folder = new File(dir);
-    StringBuilder sb = new StringBuilder("Files in '").append(dir).append("' dir:").append(folder.getAbsolutePath()).append('\n');
+    sb.append(indent).append("Files in '").append(dir).append("' dir:").append(folder.getAbsolutePath()).append('\n');
 
     File[] listOfFiles = folder.listFiles();
+    if(listOfFiles == null) {
+      return sb;
+    }
     for (File fileName : listOfFiles) {
       if (fileName.isFile()) {
-        sb.append("File: ").append(fileName.getName()).append('\n');
+        sb.append(indent).append("File: ").append(fileName.getName()).append('\n');
       }
       else if (fileName.isDirectory()) {
-        sb.append("Dir: ").append(fileName.getName()).append('\n');
-        File subDir = new File(fileName.getName());
-        File[] moreFiles = subDir.listFiles();
-        for (File subFileName : moreFiles) {
-          if (subFileName.isFile()) {
-            sb.append("--File: ").append(subFileName.getName()).append('\n');
-          }
-          else if (subFileName.isDirectory()) {
-            sb.append("--Dir: ").append(subFileName.getName()).append('\n');
-          }
-        }
+        printContentsOfDir(fileName.getName(), depth+1, sb);
       }
     }
-    LOG.info(sb.toString());
+    return sb;
+  }
+  private static void printContentsOfDir(String dir) {
+    LOG.info(printContentsOfDir(dir, 0, new StringBuilder()).toString());    
   }
 }

Modified: hive/branches/cbo/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHiveAuthFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHiveAuthFactory.java?rev=1670462&r1=1670461&r2=1670462&view=diff
==============================================================================
--- hive/branches/cbo/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHiveAuthFactory.java
(original)
+++ hive/branches/cbo/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHiveAuthFactory.java
Tue Mar 31 20:14:02 2015
@@ -41,26 +41,48 @@ public class TestHiveAuthFactory {
   }
 
   /**
-   * Verify that delegation token manager is started with no exception
+   * Verify that delegation token manager is started with no exception for MemoryTokenStore
    * @throws Exception
    */
   @Test
-  public void testStartTokenManager() throws Exception {
+  public void testStartTokenManagerForMemoryTokenStore() throws Exception {
     hiveConf.setVar(ConfVars.HIVE_SERVER2_AUTHENTICATION, HiveAuthFactory.AuthTypes.KERBEROS.getAuthName());
     String principalName = miniHiveKdc.getFullHiveServicePrincipal();
     System.out.println("Principal: " + principalName);
-    
+
+    hiveConf.setVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL, principalName);
+    String keyTabFile = miniHiveKdc.getKeyTabFile(miniHiveKdc.getHiveServicePrincipal());
+    System.out.println("keyTabFile: " + keyTabFile);
+    Assert.assertNotNull(keyTabFile);
+    hiveConf.setVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB, keyTabFile);
+
+    HiveAuthFactory authFactory = new HiveAuthFactory(hiveConf);
+    Assert.assertNotNull(authFactory);
+    Assert.assertEquals("org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge$Server$TUGIAssumingTransportFactory",
+        authFactory.getAuthTransFactory().getClass().getName());
+  }
+
+  /**
+   * Verify that delegation token manager is started with no exception for DBTokenStore
+   * @throws Exception
+   */
+  @Test
+  public void testStartTokenManagerForDBTokenStore() throws Exception {
+    hiveConf.setVar(ConfVars.HIVE_SERVER2_AUTHENTICATION, HiveAuthFactory.AuthTypes.KERBEROS.getAuthName());
+    String principalName = miniHiveKdc.getFullHiveServicePrincipal();
+    System.out.println("Principal: " + principalName);
+
     hiveConf.setVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL, principalName);
     String keyTabFile = miniHiveKdc.getKeyTabFile(miniHiveKdc.getHiveServicePrincipal());
     System.out.println("keyTabFile: " + keyTabFile);
     Assert.assertNotNull(keyTabFile);
     hiveConf.setVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB, keyTabFile);
 
-    System.out.println("rawStoreClassName =" +  hiveConf.getVar(ConfVars.METASTORE_RAW_STORE_IMPL));
+    hiveConf.setVar(ConfVars.METASTORE_CLUSTER_DELEGATION_TOKEN_STORE_CLS, "org.apache.hadoop.hive.thrift.DBTokenStore");
 
     HiveAuthFactory authFactory = new HiveAuthFactory(hiveConf);
     Assert.assertNotNull(authFactory);
-    Assert.assertEquals("org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge$Server$TUGIAssumingTransportFactory",

+    Assert.assertEquals("org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge$Server$TUGIAssumingTransportFactory",
         authFactory.getAuthTransFactory().getClass().getName());
   }
 }

Modified: hive/branches/cbo/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java?rev=1670462&r1=1670461&r2=1670462&view=diff
==============================================================================
--- hive/branches/cbo/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java
(original)
+++ hive/branches/cbo/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java
Tue Mar 31 20:14:02 2015
@@ -10,7 +10,6 @@ import org.apache.hadoop.hive.common.Val
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
 import org.apache.hadoop.hive.metastore.IMetaStoreClient;
-import org.apache.hadoop.hive.metastore.MetaStoreThread;
 import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
 import org.apache.hadoop.hive.metastore.api.CompactionRequest;
 import org.apache.hadoop.hive.metastore.api.CompactionType;
@@ -55,6 +54,8 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 import java.util.Map;
+import java.util.SortedSet;
+import java.util.TreeSet;
 import java.util.concurrent.atomic.AtomicBoolean;
 
 /**
@@ -88,6 +89,7 @@ public class TestCompactor {
     hiveConf.setVar(HiveConf.ConfVars.POSTEXECHOOKS, "");
     hiveConf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, TEST_WAREHOUSE_DIR);
     hiveConf.setVar(HiveConf.ConfVars.HIVEINPUTFORMAT, HiveInputFormat.class.getName());
+    hiveConf.setVar(HiveConf.ConfVars.DYNAMICPARTITIONINGMODE, "nonstrict");
     //"org.apache.hadoop.hive.ql.io.HiveInputFormat"
 
     TxnDbUtil.setConfValues(hiveConf);
@@ -281,6 +283,124 @@ public class TestCompactor {
   }
 
   @Test
+  public void dynamicPartitioningInsert() throws Exception {
+    String tblName = "dpct";
+    List<String> colNames = Arrays.asList("a", "b");
+    executeStatementOnDriver("drop table if exists " + tblName, driver);
+    executeStatementOnDriver("CREATE TABLE " + tblName + "(a INT, b STRING) " +
+      " PARTITIONED BY(ds string)" +
+      " CLUSTERED BY(a) INTO 2 BUCKETS" + //currently ACID requires table to be bucketed
+      " STORED AS ORC TBLPROPERTIES ('transactional'='true')", driver);
+    executeStatementOnDriver("insert into " + tblName + " partition (ds) values (1, 'fred',
" +
+        "'today'), (2, 'wilma', 'yesterday')", driver);
+
+    Initiator initiator = new Initiator();
+    initiator.setThreadId((int)initiator.getId());
+    conf.setIntVar(HiveConf.ConfVars.HIVE_COMPACTOR_DELTA_NUM_THRESHOLD, 0);
+    initiator.setHiveConf(conf);
+    AtomicBoolean stop = new AtomicBoolean();
+    stop.set(true);
+    initiator.init(stop, new AtomicBoolean());
+    initiator.run();
+
+    CompactionTxnHandler txnHandler = new CompactionTxnHandler(conf);
+    ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
+    List<ShowCompactResponseElement> compacts = rsp.getCompacts();
+    Assert.assertEquals(2, compacts.size());
+    SortedSet<String> partNames = new TreeSet<String>();
+    for (int i = 0; i < compacts.size(); i++) {
+      Assert.assertEquals("default", compacts.get(i).getDbname());
+      Assert.assertEquals(tblName, compacts.get(i).getTablename());
+      Assert.assertEquals("initiated", compacts.get(i).getState());
+      partNames.add(compacts.get(i).getPartitionname());
+    }
+    List<String> names = new ArrayList<String>(partNames);
+    Assert.assertEquals("ds=today", names.get(0));
+    Assert.assertEquals("ds=yesterday", names.get(1));
+  }
+
+  @Test
+  public void dynamicPartitioningUpdate() throws Exception {
+    String tblName = "udpct";
+    List<String> colNames = Arrays.asList("a", "b");
+    executeStatementOnDriver("drop table if exists " + tblName, driver);
+    executeStatementOnDriver("CREATE TABLE " + tblName + "(a INT, b STRING) " +
+      " PARTITIONED BY(ds string)" +
+      " CLUSTERED BY(a) INTO 2 BUCKETS" + //currently ACID requires table to be bucketed
+      " STORED AS ORC TBLPROPERTIES ('transactional'='true')", driver);
+    executeStatementOnDriver("insert into " + tblName + " partition (ds) values (1, 'fred',
" +
+        "'today'), (2, 'wilma', 'yesterday')", driver);
+
+    executeStatementOnDriver("update " + tblName + " set b = 'barney'", driver);
+
+    Initiator initiator = new Initiator();
+    initiator.setThreadId((int)initiator.getId());
+    // Set to 1 so insert doesn't set it off but update does
+    conf.setIntVar(HiveConf.ConfVars.HIVE_COMPACTOR_DELTA_NUM_THRESHOLD, 1);
+    initiator.setHiveConf(conf);
+    AtomicBoolean stop = new AtomicBoolean();
+    stop.set(true);
+    initiator.init(stop, new AtomicBoolean());
+    initiator.run();
+
+    CompactionTxnHandler txnHandler = new CompactionTxnHandler(conf);
+    ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
+    List<ShowCompactResponseElement> compacts = rsp.getCompacts();
+    Assert.assertEquals(2, compacts.size());
+    SortedSet<String> partNames = new TreeSet<String>();
+    for (int i = 0; i < compacts.size(); i++) {
+      Assert.assertEquals("default", compacts.get(i).getDbname());
+      Assert.assertEquals(tblName, compacts.get(i).getTablename());
+      Assert.assertEquals("initiated", compacts.get(i).getState());
+      partNames.add(compacts.get(i).getPartitionname());
+    }
+    List<String> names = new ArrayList<String>(partNames);
+    Assert.assertEquals("ds=today", names.get(0));
+    Assert.assertEquals("ds=yesterday", names.get(1));
+  }
+
+  @Test
+  public void dynamicPartitioningDelete() throws Exception {
+    String tblName = "ddpct";
+    List<String> colNames = Arrays.asList("a", "b");
+    executeStatementOnDriver("drop table if exists " + tblName, driver);
+    executeStatementOnDriver("CREATE TABLE " + tblName + "(a INT, b STRING) " +
+      " PARTITIONED BY(ds string)" +
+      " CLUSTERED BY(a) INTO 2 BUCKETS" + //currently ACID requires table to be bucketed
+      " STORED AS ORC TBLPROPERTIES ('transactional'='true')", driver);
+    executeStatementOnDriver("insert into " + tblName + " partition (ds) values (1, 'fred',
" +
+        "'today'), (2, 'wilma', 'yesterday')", driver);
+
+    executeStatementOnDriver("update " + tblName + " set a = 3", driver);
+
+    executeStatementOnDriver("delete from " + tblName + " where b = 'fred'", driver);
+
+    Initiator initiator = new Initiator();
+    initiator.setThreadId((int)initiator.getId());
+    // Set to 2 so insert and update don't set it off but delete does
+    conf.setIntVar(HiveConf.ConfVars.HIVE_COMPACTOR_DELTA_NUM_THRESHOLD, 2);
+    initiator.setHiveConf(conf);
+    AtomicBoolean stop = new AtomicBoolean();
+    stop.set(true);
+    initiator.init(stop, new AtomicBoolean());
+    initiator.run();
+
+    CompactionTxnHandler txnHandler = new CompactionTxnHandler(conf);
+    ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
+    List<ShowCompactResponseElement> compacts = rsp.getCompacts();
+    Assert.assertEquals(1, compacts.size());
+    SortedSet<String> partNames = new TreeSet<String>();
+    for (int i = 0; i < compacts.size(); i++) {
+      Assert.assertEquals("default", compacts.get(i).getDbname());
+      Assert.assertEquals(tblName, compacts.get(i).getTablename());
+      Assert.assertEquals("initiated", compacts.get(i).getState());
+      partNames.add(compacts.get(i).getPartitionname());
+    }
+    List<String> names = new ArrayList<String>(partNames);
+    Assert.assertEquals("ds=today", names.get(0));
+  }
+
+  @Test
   public void minorCompactWhileStreaming() throws Exception {
     String dbName = "default";
     String tblName = "cws";

Modified: hive/branches/cbo/itests/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/cbo/itests/pom.xml?rev=1670462&r1=1670461&r2=1670462&view=diff
==============================================================================
--- hive/branches/cbo/itests/pom.xml (original)
+++ hive/branches/cbo/itests/pom.xml Tue Mar 31 20:14:02 2015
@@ -93,6 +93,9 @@
                   mkdir -p $DOWNLOAD_DIR
                   download "http://d3jw87u4immizc.cloudfront.net/spark-tarball/spark-${spark.version}-bin-hadoop2-without-hive.tgz"
"spark"
                   cp -f $HIVE_ROOT/data/conf/spark/log4j.properties $BASE_DIR/spark/conf/
+                  sed '/package /d' ${basedir}/${hive.path.to.root}/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleAdd.java
> /tmp/UDFExampleAdd.java
+                  javac -cp  ${settings.localRepository}/org/apache/hive/hive-exec/${project.version}/hive-exec-${project.version}.jar
/tmp/UDFExampleAdd.java -d /tmp
+                  jar -cf /tmp/udfexampleadd-1.0.jar -C /tmp UDFExampleAdd.class
                 </echo>
               </target>
             </configuration>

Modified: hive/branches/cbo/metastore/if/hive_metastore.thrift
URL: http://svn.apache.org/viewvc/hive/branches/cbo/metastore/if/hive_metastore.thrift?rev=1670462&r1=1670461&r2=1670462&view=diff
==============================================================================
--- hive/branches/cbo/metastore/if/hive_metastore.thrift (original)
+++ hive/branches/cbo/metastore/if/hive_metastore.thrift Tue Mar 31 20:14:02 2015
@@ -651,6 +651,13 @@ struct ShowCompactResponse {
     1: required list<ShowCompactResponseElement> compacts,
 }
 
+struct AddDynamicPartitions {
+    1: required i64 txnid,
+    2: required string dbname,
+    3: required string tablename,
+    4: required list<string> partitionnames,
+}
+
 struct NotificationEventRequest {
     1: required i64 lastEvent,
     2: optional i32 maxEvents,
@@ -1164,6 +1171,7 @@ service ThriftHiveMetastore extends fb30
   HeartbeatTxnRangeResponse heartbeat_txn_range(1:HeartbeatTxnRangeRequest txns)
   void compact(1:CompactionRequest rqst) 
   ShowCompactResponse show_compact(1:ShowCompactRequest rqst)
+  void add_dynamic_partitions(1:AddDynamicPartitions rqst) throws (1:NoSuchTxnException o1,
2:TxnAbortedException o2)
 
   // Notification logging calls
   NotificationEventResponse get_next_notification(1:NotificationEventRequest rqst) 



Mime
View raw message