hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From br...@apache.org
Subject svn commit: r1613740 [29/29] - in /hive/branches/spark: ./ beeline/src/java/org/apache/hive/beeline/ beeline/src/main/resources/ beeline/src/test/org/apache/hive/beeline/ bin/ bin/ext/ common/ common/src/java/org/apache/hadoop/hive/ant/ common/src/java...
Date Sat, 26 Jul 2014 23:46:00 GMT
Modified: hive/branches/spark/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/TestParser.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/TestParser.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/TestParser.java
(original)
+++ hive/branches/spark/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/TestParser.java
Sat Jul 26 23:45:46 2014
@@ -21,12 +21,24 @@ package org.apache.hive.ptest.execution.
 import static com.google.common.base.Preconditions.checkNotNull;
 
 import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.OutputStreamWriter;
 import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
 import java.util.List;
+import java.util.Map;
+import java.util.Properties;
 import java.util.Set;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
 
 import org.apache.commons.io.FileUtils;
+import org.apache.log4j.ConsoleAppender;
+import org.apache.log4j.PatternLayout;
 import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.common.base.Joiner;
 import com.google.common.base.Splitter;
@@ -97,6 +109,8 @@ public class TestParser {
     return result;
   }
   private List<QFileTestBatch> parseQFileTests() {
+    Map<String, Properties> properties = parseQTestProperties();
+
     Splitter splitter = Splitter.on(" ").trimResults().omitEmptyStrings();
     List<QFileTestBatch> result = Lists.newArrayList();
     for(String alias : context.getString("qFileTests", "").split(" ")) {
@@ -111,22 +125,25 @@ public class TestParser {
       for(String excludedTestGroup : splitter.split(testContext.getString("exclude", "")))
{
         excludedTests.addAll(Arrays.asList(testContext.
             getString(Joiner.on(".").join("groups", excludedTestGroup), "").trim().split("
")));
+        expandTestProperties(excludedTests, properties);
       }
       Set<String> isolatedTests = Sets.newHashSet();
       for(String ioslatedTestGroup : splitter.split(testContext.getString("isolate", "")))
{
         isolatedTests.addAll(Arrays.asList(testContext.
             getString(Joiner.on(".").join("groups", ioslatedTestGroup), "").trim().split("
")));
+        expandTestProperties(isolatedTests, properties);
       }
 
       Set<String> includedTests = Sets.newHashSet();
       for(String includedTestGroup : splitter.split(testContext.getString("include", "")))
{
         includedTests.addAll(Arrays.asList(testContext.
             getString(Joiner.on(".").join("groups", includedTestGroup), "").trim().split("
")));
+        expandTestProperties(includedTests, properties);
       }
-      if(!includedTests.isEmpty() && !excludedTests.isEmpty()) {
-        throw new IllegalArgumentException(String.format("Included and excluded mutally exclusive."
+
-            " Included = %s, excluded = %s", includedTests.toString(), excludedTests.toString()));
-      }
+
+      //excluded overrides included
+      includedTests.removeAll(excludedTests);
+
       result.addAll(createQFileTestBatches(
           driver,
           checkNotNull(testContext.getString("queryFilesProperty"), "queryFilesProperty").trim(),
@@ -175,6 +192,72 @@ public class TestParser {
     return testBatches;
   }
 
+  /**
+   * @return properties loaded from files specified in qFileTests.propertyFiles.${fileName}=${filePath}
+   */
+  private Map<String, Properties> parseQTestProperties() {
+    Map<String, String> propFiles = context.getSubProperties("qFileTests.propertyFiles.");
+    Map<String, Properties> propertyMap = new HashMap<String, Properties>();
+    for (String propFile : propFiles.keySet()) {
+      Properties properties = new Properties();
+      String path = sourceDirectory + File.separator + propFiles.get(propFile);
+      FileInputStream fis = null;
+      try {
+        fis = new FileInputStream(path);
+        properties.load(fis);
+      } catch (IOException e) {
+        logger.warn("Error processing Qtest property file", e);
+        throw new IllegalArgumentException("Error processing Qtest property file: " + path);
+      } finally {
+        try {
+          if (fis != null) {
+            fis.close();
+          }
+        } catch (IOException e) { //ignore
+        }
+      }
+      propertyMap.put(propFile, properties);
+      logger.info("Loaded Qtest property file: " + path);
+    }
+    return propertyMap;
+  }
+
+  /**
+   * If any of given tests are of the form: ${fileName}.${property} (test list within a property
file),
+   * then expand them.  Then remove those markers from the list of tests.
+   */
+  private void expandTestProperties(Set<String> tests, Map<String, Properties>
propMap) {
+    Set<String> toRemove = new HashSet<String>();
+    Set<String> toAdd = new HashSet<String>();
+
+    String pattern = "([^\\.]*)\\.\\$\\{([^}]*)}";
+    Pattern r = Pattern.compile(pattern);
+    for (String test : tests) {
+      Matcher m = r.matcher(test);
+      if (m.find()) {
+        toRemove.add(test);
+        logger.info("Expanding qfile property: " + test);
+        String propName = m.group(1);
+        String propValue = m.group(2);
+        Properties props = propMap.get(propName);
+        if (props == null) {
+          logger.warn("No properties found for : " + propName);
+          throw new IllegalArgumentException("No properties found for : " + propName);
+        }
+        String result = (String) props.get(propValue);
+        if (result == null || result.isEmpty()) {
+          logger.warn("No properties found in file: " + propName + " for property: " + propValue);
+          throw new IllegalArgumentException("No propertifies found in file: " + propName
+ " for property: " + propValue);
+        }
+        Iterable<String> splits = Splitter.on(',').trimResults().omitEmptyStrings().split(result);
+        for (String split : splits) {
+          toAdd.add(split);
+        }
+      }
+    }
+    tests.removeAll(toRemove);
+    tests.addAll(toAdd);
+  }
 
   public Supplier<List<TestBatch>> parse() {
     return new Supplier<List<TestBatch>>() {
@@ -184,4 +267,25 @@ public class TestParser {
       }
     };
   }
+
+  /**
+   * Manually test this against any property file.
+   * @param args
+   * @throws Exception
+   */
+  public static void main(String[] args) throws Exception {
+    if (args.length < 1) {
+      throw new IllegalArgumentException("Enter the property file location");
+    }
+    Logger log = LoggerFactory
+        .getLogger(TestParser.class);
+    File workingDir = new File("../..");
+    File testConfigurationFile = new File(args[0]);
+    TestConfiguration conf = TestConfiguration.fromFile(testConfigurationFile, log);
+    TestParser testParser = new TestParser(conf.getContext(), "test", workingDir, log);
+    List<TestBatch> testBatches = testParser.parse().get();
+    for (TestBatch testBatch : testBatches) {
+      System.out.println(testBatch.getTestArguments());
+    }
+  }
 }

Modified: hive/branches/spark/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/context/CloudComputeService.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/context/CloudComputeService.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/context/CloudComputeService.java
(original)
+++ hive/branches/spark/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/context/CloudComputeService.java
Sat Jul 26 23:45:46 2014
@@ -34,6 +34,8 @@ import org.jclouds.compute.domain.NodeMe
 import org.jclouds.compute.domain.NodeMetadata.Status;
 import org.jclouds.compute.domain.Template;
 import org.jclouds.logging.log4j.config.Log4JLoggingModule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.common.base.Predicate;
 import com.google.common.base.Strings;
@@ -41,6 +43,8 @@ import com.google.common.collect.Immutab
 import com.google.common.collect.Sets;
 
 public class CloudComputeService {
+  private static final Logger LOG = LoggerFactory
+      .getLogger(CloudComputeService.class);
   private final ComputeServiceContext mComputeServiceContext;
   private final ComputeService mComputeService;
   private final String mInstanceType;
@@ -49,9 +53,12 @@ public class CloudComputeService {
   private final String mImageId;
   private final String mkeyPair;
   private final String mSecurityGroup;
-  private final float mMaxBid;
+  /**
+   * JClouds requests on-demand instances when null
+   */
+  private final Float mMaxBid;
   public CloudComputeService(String apiKey, String accessKey, String instanceType, String
groupName,
-      String imageId, String keyPair, String securityGroup, float maxBid) {
+      String imageId, String keyPair, String securityGroup, Float maxBid) {
     mInstanceType = instanceType;
     mGroupName = groupName;
     mImageId = imageId;
@@ -90,15 +97,20 @@ public class CloudComputeService {
         return nodeMetadata.getStatus() == Status.RUNNING && isPTestHost(nodeMetadata);
       }
       private boolean isPTestHost(NodeMetadata node) {
+        String result = "false non-ptest host";
         if(groupName.equalsIgnoreCase(node.getGroup())) {
+          result = "true due to group " + groupName;
           return true;
         }
         if(Strings.nullToEmpty(node.getName()).startsWith(groupName)) {
+          result = "true due to name " + groupName;
           return true;
         }
         if(node.getTags().contains(groupTag)) {
+          result = "true due to tag " + groupName;
           return true;
         }
+        LOG.debug("Found node: " + node + ", Result: " + result);
         return false;
       }
     };

Modified: hive/branches/spark/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/context/CloudExecutionContextProvider.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/context/CloudExecutionContextProvider.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/context/CloudExecutionContextProvider.java
(original)
+++ hive/branches/spark/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/context/CloudExecutionContextProvider.java
Sat Jul 26 23:45:46 2014
@@ -24,6 +24,7 @@ import java.io.RandomAccessFile;
 import java.util.Collections;
 import java.util.Date;
 import java.util.HashSet;
+import java.util.HashMap;
 import java.util.LinkedHashMap;
 import java.util.Map;
 import java.util.Set;
@@ -58,6 +59,7 @@ public class CloudExecutionContextProvid
   public static final String API_KEY = "apiKey";
   public static final String ACCESS_KEY = "accessKey";
   public static final String NUM_HOSTS = "numHosts";
+  public static final String MAX_HOSTS_PER_CREATE_REQUEST = "maxHostsPerCreateRequest";
   public static final String GROUP_NAME = "groupName";
   public static final String IMAGE_ID = "imageId";
   public static final String KEY_PAIR = "keyPair";
@@ -74,9 +76,11 @@ public class CloudExecutionContextProvid
   private final String[] mSlaveLocalDirs;
   private final int mNumThreads;
   private final int mNumHosts;
+  private final int mMaxHostsPerCreateRequest;
   private final long mRetrySleepInterval;
   private final CloudComputeService mCloudComputeService;
   private final Map<String, Long> mTerminatedHosts;
+  private final Map<String, Long> mLiveHosts;
   private final ExecutorService mTerminationExecutor;
   private final File mWorkingDir;
   private final SSHCommandExecutor mSSHCommandExecutor;
@@ -85,8 +89,9 @@ public class CloudExecutionContextProvid
   CloudExecutionContextProvider(String dataDir,
       int numHosts, CloudComputeService cloudComputeService, SSHCommandExecutor sshCommandExecutor,
       String workingDirectory, String privateKey, String user, String[] slaveLocalDirs, int
numThreads,
-      long retrySleepInterval) throws IOException {
+      long retrySleepInterval, int maxHostsPerCreateRequest) throws IOException {
     mNumHosts = numHosts;
+    mMaxHostsPerCreateRequest = maxHostsPerCreateRequest;
     mCloudComputeService = cloudComputeService;
     mPrivateKey = privateKey;
     mUser = user;
@@ -95,6 +100,7 @@ public class CloudExecutionContextProvid
     mRetrySleepInterval = retrySleepInterval;
     mSSHCommandExecutor = sshCommandExecutor;
     mWorkingDir = Dirs.create(new File(workingDirectory, "working"));
+    mLiveHosts = Collections.synchronizedMap(new HashMap<String, Long>());
     mTerminatedHosts = Collections
         .synchronizedMap(new LinkedHashMap<String, Long>() {
           private static final long serialVersionUID = 1L;
@@ -110,6 +116,7 @@ public class CloudExecutionContextProvid
   }
 
   private void initialize() throws IOException {
+    LOG.info("CloudExecutionContextProvider maxHostsPerCreateRequest = " + mMaxHostsPerCreateRequest);
     Set<String> hosts = Sets.newHashSet();
     String host = null;
     mHostLog.seek(0); // should already be true
@@ -164,7 +171,7 @@ public class CloudExecutionContextProvid
       terminate(hostsToTerminate, true);
       Set<NodeMetadata> nodes = createNodes(hostsToTerminate.size());
       for (NodeMetadata node : nodes) {
-        executionContext.addHost(new Host(node.getHostname(), mUser, mSlaveLocalDirs,
+        executionContext.addHost(new Host(publicIp(node), mUser, mSlaveLocalDirs,
             mNumThreads));
       }
     }
@@ -179,8 +186,8 @@ public class CloudExecutionContextProvid
       Set<NodeMetadata> nodes = createNodes(mNumHosts);
       Set<Host> hosts = Sets.newHashSet();
       for (NodeMetadata node : nodes) {
-        hosts.add(new Host(node.getHostname(), mUser, mSlaveLocalDirs,
-            mNumThreads));
+        hosts.add(new Host(publicIp(node), mUser, mSlaveLocalDirs,
+          mNumThreads));
       }
       return new ExecutionContext(this, hosts, mWorkingDir.getAbsolutePath(),
           mPrivateKey);
@@ -204,7 +211,7 @@ public class CloudExecutionContextProvid
       boolean error = false;
       LOG.info("Attempting to create " + numRequired + " nodes");
       try {
-        result.addAll(mCloudComputeService.createNodes(Math.min(2, numRequired)));
+        result.addAll(mCloudComputeService.createNodes(Math.min(mMaxHostsPerCreateRequest,
numRequired)));
       } catch (RunNodesException e) {
         error = true;
         LOG.warn("Error creating nodes", e);
@@ -212,6 +219,9 @@ public class CloudExecutionContextProvid
         result.addAll(e.getSuccessfulNodes());
       }
       result = verifyHosts(result);
+      for (NodeMetadata node : result) {
+        mLiveHosts.put(publicIpOrHostname(node), System.currentTimeMillis());
+      }
       LOG.info("Successfully created " + result.size() + " nodes");
       numRequired = numHosts - result.size();
       if (numRequired > 0) {
@@ -247,6 +257,23 @@ public class CloudExecutionContextProvid
     }
   }
 
+
+  private static String publicIpOrHostname(NodeMetadata node) {
+    Set<String> publicIps = node.getPublicAddresses();
+    if (publicIps.size() == 1) {
+      return Iterables.getOnlyElement(publicIps);
+    }
+    return node.getHostname();
+  }
+
+  private static String publicIp(NodeMetadata node) {
+    Set<String> publicIps = node.getPublicAddresses();
+    if (publicIps.size() == 1) {
+      return Iterables.getOnlyElement(publicIps);
+    }
+    throw new IllegalStateException("Node does not have exactly one public ip: " + node);
+  }
+
   private Set<NodeMetadata> verifyHosts(Set<? extends NodeMetadata> hosts)
       throws CreateHostsFailedException {
     final Set<NodeMetadata> result = Collections.synchronizedSet(new HashSet<NodeMetadata>());
@@ -258,7 +285,8 @@ public class CloudExecutionContextProvid
           executorService.submit(new Runnable() {
             @Override
             public void run() {
-              SSHCommand command = new SSHCommand(mSSHCommandExecutor, mPrivateKey, mUser,
node.getHostname(), 0, "pkill -f java");
+              String ip = publicIpOrHostname(node);
+              SSHCommand command = new SSHCommand(mSSHCommandExecutor, mPrivateKey, mUser,
ip, 0, "pkill -f java");
               mSSHCommandExecutor.execute(command);
               if(command.getExitCode() == Constants.EXIT_CODE_UNKNOWN ||
                   command.getException() != null) {
@@ -293,10 +321,13 @@ public class CloudExecutionContextProvid
       terminatedHosts.putAll(mTerminatedHosts);
     }
     for (NodeMetadata node : getRunningNodes()) {
-      if (terminatedHosts.containsKey(node.getHostname())) {
+      String ip = publicIpOrHostname(node);
+      if (terminatedHosts.containsKey(ip)) {
         terminateInternal(node);
         LOG.warn("Found zombie node: " + node + " previously terminated at "
-            + new Date(terminatedHosts.get(node.getHostname())));
+            + new Date(terminatedHosts.get(ip)));
+      } else if(!mLiveHosts.containsKey(ip)) {
+        LOG.warn("Found zombie node: " + node + " previously unknown to ptest");
       }
     }
   }
@@ -318,6 +349,7 @@ public class CloudExecutionContextProvid
 
   private void terminateInternal(final NodeMetadata node) {
     LOG.info("Submitting termination for " + node);
+    mLiveHosts.remove(publicIpOrHostname(node));
     mTerminationExecutor.submit(new Runnable() {
       @Override
       public void run() {
@@ -328,9 +360,10 @@ public class CloudExecutionContextProvid
          Thread.currentThread().interrupt();
         }
         try {
-          LOG.info("Terminating " + node.getHostname());
-          if (!mTerminatedHosts.containsKey(node.getHostname())) {
-            mTerminatedHosts.put(node.getHostname(), System.currentTimeMillis());
+          String ip = publicIpOrHostname(node);
+          LOG.info("Terminating " + ip);
+          if (!mTerminatedHosts.containsKey(ip)) {
+            mTerminatedHosts.put(ip, System.currentTimeMillis());
           }
           mCloudComputeService.destroyNode(node.getId());
         } catch (Exception e) {
@@ -343,8 +376,9 @@ public class CloudExecutionContextProvid
   private void persistHostnamesToLog(Set<? extends NodeMetadata> nodes) {
     for (NodeMetadata node : nodes) {
       try {
-        if(!Strings.nullToEmpty(node.getHostname()).trim().isEmpty()) {
-          mHostLog.writeBytes(node.getHostname() + "\n");
+        String ip = publicIpOrHostname(node);
+        if(!Strings.nullToEmpty(ip).trim().isEmpty()) {
+          mHostLog.writeBytes(ip + "\n");
         }
       } catch (IOException e) {
         Throwables.propagate(e);
@@ -364,7 +398,8 @@ public class CloudExecutionContextProvid
     LOG.info("Requesting termination of " + hosts);
     Set<NodeMetadata> nodesToTerminate = Sets.newHashSet();
     for (NodeMetadata node : getRunningNodes()) {
-      if (hosts.contains(node.getHostname())) {
+      String ip = publicIpOrHostname(node);
+      if (hosts.contains(ip)) {
         nodesToTerminate.add(node);
       }
     }
@@ -391,6 +426,7 @@ public class CloudExecutionContextProvid
         API_KEY + " is required");
     String accessKey = Preconditions.checkNotNull(
         context.getString(ACCESS_KEY), ACCESS_KEY + " is required");
+    int maxHostsPerCreateRequest = context.getInteger(MAX_HOSTS_PER_CREATE_REQUEST, 2);
     Integer numHosts = context.getInteger(NUM_HOSTS, 8);
     Preconditions.checkArgument(numHosts > 0, NUM_HOSTS
         + " must be greater than zero");
@@ -401,10 +437,9 @@ public class CloudExecutionContextProvid
         KEY_PAIR + " is required");
     String securityGroup = Preconditions.checkNotNull(
         context.getString(SECURITY_GROUP), SECURITY_GROUP + " is required");
-    Float maxBid = Preconditions.checkNotNull(context.getFloat(MAX_BID),
-        MAX_BID + " is required");
-    Preconditions.checkArgument(maxBid > 0, MAX_BID
-        + " must be greater than zero");
+    Float maxBid = context.getFloat(MAX_BID);
+    Preconditions.checkArgument(maxBid == null || maxBid > 0, MAX_BID
+        + " must be null or greater than zero");
     String privateKey = Preconditions.checkNotNull(
         context.getString(PRIVATE_KEY), PRIVATE_KEY + " is required");
     String user = context.getString(USERNAME, "hiveptest");
@@ -417,7 +452,7 @@ public class CloudExecutionContextProvid
         instanceType, groupName, imageId, keyPair, securityGroup, maxBid);
     CloudExecutionContextProvider service = new CloudExecutionContextProvider(
         dataDir, numHosts, cloudComputeService, new SSHCommandExecutor(LOG), workingDirectory,
-        privateKey, user, localDirs, numThreads, 60);
+        privateKey, user, localDirs, numThreads, 60, maxHostsPerCreateRequest);
     return service;
   }
 }

Modified: hive/branches/spark/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/ssh/RSyncCommandExecutor.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/ssh/RSyncCommandExecutor.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/ssh/RSyncCommandExecutor.java
(original)
+++ hive/branches/spark/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/ssh/RSyncCommandExecutor.java
Sat Jul 26 23:45:46 2014
@@ -30,20 +30,20 @@ import org.slf4j.Logger;
 
 
 public class RSyncCommandExecutor {
-
   private final Logger mLogger;
+  private final int mMaxRsyncThreads;
   private final LocalCommandFactory mLocalCommandFactory;
   private final Semaphore mSemaphore;
   private volatile boolean mShutdown;
 
-  public RSyncCommandExecutor(Logger logger, LocalCommandFactory localCommandFactory) {
+  public RSyncCommandExecutor(Logger logger, int maxRsyncThreads, LocalCommandFactory localCommandFactory)
{
     mLogger = logger;
+    mMaxRsyncThreads = Math.min(Runtime.getRuntime().availableProcessors() * 5, maxRsyncThreads);
     mLocalCommandFactory = localCommandFactory;
-    mSemaphore = new Semaphore(Math.min(Runtime.getRuntime().availableProcessors() * 5, 10));
+    mSemaphore = new Semaphore(mMaxRsyncThreads);
     mShutdown = false;
-  }
-  public RSyncCommandExecutor(Logger logger) {
-    this(logger, new LocalCommandFactory(logger));
+    mLogger.info("RSyncCommandExecutor has " + mMaxRsyncThreads + " threads on " + Runtime.getRuntime()
+      .availableProcessors() + " cpus");
   }
 
   /**
@@ -105,4 +105,4 @@ public class RSyncCommandExecutor {
   public void shutdownNow() {
     this.mShutdown = true;
   }
-}
\ No newline at end of file
+}

Modified: hive/branches/spark/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/MockRSyncCommandExecutor.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/MockRSyncCommandExecutor.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/MockRSyncCommandExecutor.java
(original)
+++ hive/branches/spark/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/MockRSyncCommandExecutor.java
Sat Jul 26 23:45:46 2014
@@ -34,7 +34,7 @@ public class MockRSyncCommandExecutor ex
   private final List<String> mCommands;
   private final Map<String, Queue<Integer>> mFailures;
   public MockRSyncCommandExecutor(Logger logger) {
-    super(logger);
+    super(logger, 0, null);
     mCommands = Lists.newArrayList();
     mFailures = Maps.newHashMap();
   }

Modified: hive/branches/spark/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/conf/TestTestParser.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/conf/TestTestParser.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/conf/TestTestParser.java
(original)
+++ hive/branches/spark/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/conf/TestTestParser.java
Sat Jul 26 23:45:46 2014
@@ -19,7 +19,10 @@
 package org.apache.hive.ptest.execution.conf;
 
 import java.io.File;
+import java.io.FileOutputStream;
+import java.io.OutputStream;
 import java.util.List;
+import java.util.Properties;
 
 import junit.framework.Assert;
 
@@ -46,6 +49,7 @@ public class TestTestParser {
   private File unitTestDir1;
   private File unitTestDir2;
   private File qFileTestDir;
+  private File propertyDir;
 
   @Before
   public void setup() throws Exception {
@@ -56,6 +60,7 @@ public class TestTestParser {
         "test", "classes")));
     unitTestDir2 = Dirs.create(new File(baseDir, Joiner.on("/").join("source", "build", "2",
"units", "test", "classes")));
     qFileTestDir = Dirs.create(new File(baseDir, Joiner.on("/").join("source", "qfiles")));
+    propertyDir = Dirs.create(new File(baseDir, Joiner.on("/").join("source", "props")));
     Assert.assertTrue((new File(unitTestDir1, "TestA.class")).createNewFile());
     Assert.assertTrue((new File(unitTestDir2, "TestB.class")).createNewFile());
     Assert.assertTrue((new File(unitTestDir1, "TestC.class")).createNewFile());
@@ -64,10 +69,20 @@ public class TestTestParser {
     Assert.assertTrue((new File(qFileTestDir, ".svn")).mkdirs());
     Assert.assertTrue((new File(qFileTestDir, "dir.q")).mkdirs());
     Assert.assertTrue((new File(qFileTestDir, "normal.q")).createNewFile());
+    Assert.assertTrue((new File(qFileTestDir, "normal2.q")).createNewFile());
+    Assert.assertTrue((new File(qFileTestDir, "normal3.q")).createNewFile());
+    Assert.assertTrue((new File(qFileTestDir, "normal4.q")).createNewFile());
     Assert.assertTrue((new File(qFileTestDir, "excluded.q")).createNewFile());
     Assert.assertTrue((new File(qFileTestDir, "isolated.q")).createNewFile());
     Assert.assertTrue((new File(qFileTestDir, "included.q")).createNewFile());
 
+    Properties normalProp = new Properties();
+    normalProp.setProperty("normal.one.group", "normal.q,normal2.q");
+    normalProp.setProperty("normal.two.group", "normal3.q,normal4.q");
+    normalProp.setProperty("excluded.group", "excluded.q");
+    normalProp.setProperty("isolated.group", "isolated.q");
+    normalProp.setProperty("included.group", "included.q");
+    serialize("normal.properties", normalProp);
   }
   @After
   public void teardown() {
@@ -107,4 +122,35 @@ public class TestTestParser {
     List<TestBatch> testBatches = testParser.parse().get();
     Assert.assertEquals(4, testBatches.size());
   }
+  @Test
+  public void testParsePropertyFile() throws Exception {
+    context.put("unitTests.directories", "build/1 build/2");
+    context.put("unitTests.include", "TestA TestB");
+    context.put("unitTests.isolate", "TestB");
+    context.put("qFileTests", "f");
+    context.put("qFileTests.propertyFiles.prop",
+      "props" + File.separator + "normal.properties");
+    context.put("qFileTest.f.driver", DRIVER);
+    context.put("qFileTest.f.directory", "qfiles");
+    context.put("qFileTest.f.include", "included");
+    context.put("qFileTest.f.isolate", "isolated");
+    context.put("qFileTest.f.exclude", "excluded");
+    context.put("qFileTest.f.queryFilesProperty", "qfile");
+    context.put("qFileTest.f.groups.included", "prop.${normal.one.group} prop.${normal.two.group}
prop.${isolated.group}");
+    context.put("qFileTest.f.groups.isolated", "prop.${isolated.group}");
+    context.put("qFileTest.f.groups.excluded", "prop.${excluded.group}");
+    testParser = new TestParser(context, "testcase", workingDirectory, LOG);
+    List<TestBatch> testBatches = testParser.parse().get();
+    Assert.assertEquals(4, testBatches.size());
+  }
+
+  private void serialize(String propFileName, Properties props) throws Exception {
+    File f = new File(propertyDir, propFileName);
+    OutputStream out = new FileOutputStream(f);
+    try {
+      props.store(out, null);
+    } finally {
+      out.close();
+    }
+  }
 }

Modified: hive/branches/spark/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/context/TestCloudExecutionContextProvider.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/context/TestCloudExecutionContextProvider.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/context/TestCloudExecutionContextProvider.java
(original)
+++ hive/branches/spark/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/context/TestCloudExecutionContextProvider.java
Sat Jul 26 23:45:46 2014
@@ -80,8 +80,11 @@ public class TestCloudExecutionContextPr
     when(template.getImage()).thenReturn(mock(Image.class));
     when(template.getHardware()).thenReturn(mock(Hardware.class));
     when(node1.getHostname()).thenReturn("node1");
+    when(node1.getPublicAddresses()).thenReturn(Collections.singleton("1.1.1.1"));
     when(node2.getHostname()).thenReturn("node2");
+    when(node2.getPublicAddresses()).thenReturn(Collections.singleton("1.1.1.2"));
     when(node3.getHostname()).thenReturn("node3");
+    when(node3.getPublicAddresses()).thenReturn(Collections.singleton("1.1.1.3"));
     runNodesException = new RunNodesException("", 2, template,
         Collections.singleton(node1), Collections.<String, Exception>emptyMap(),
         Collections.singletonMap(node2, new Exception("For testing")));
@@ -105,12 +108,12 @@ public class TestCloudExecutionContextPr
       }
     });
     CloudExecutionContextProvider provider = new CloudExecutionContextProvider(dataDir, NUM_NODES,
-        cloudComputeService, sshCommandExecutor, workingDir, PRIVATE_KEY, USER, SLAVE_DIRS,
1, 0);
+        cloudComputeService, sshCommandExecutor, workingDir, PRIVATE_KEY, USER, SLAVE_DIRS,
1, 0, 1);
     ExecutionContext executionContext = provider.createExecutionContext();
     Set<String> hosts = Sets.newHashSet();
     for(Host host : executionContext.getHosts()) {
       hosts.add(host.getName());
     }
-    Assert.assertEquals(Sets.newHashSet("node1", "node3"), hosts);
+    Assert.assertEquals(Sets.newHashSet("1.1.1.1", "1.1.1.3"), hosts);
   }
 }

Modified: hive/branches/spark/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/ssh/TestRSyncCommandExecutor.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/ssh/TestRSyncCommandExecutor.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/ssh/TestRSyncCommandExecutor.java
(original)
+++ hive/branches/spark/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/ssh/TestRSyncCommandExecutor.java
Sat Jul 26 23:45:46 2014
@@ -50,7 +50,7 @@ public class TestRSyncCommandExecutor {
   public void testShutdownBeforeWaitFor() throws Exception {
     LocalCommand localCommand = mock(LocalCommand.class);
     localCommandFactory.setInstance(localCommand);
-    RSyncCommandExecutor executor = new RSyncCommandExecutor(LOG, localCommandFactory);
+    RSyncCommandExecutor executor = new RSyncCommandExecutor(LOG, 1, localCommandFactory);
     Assert.assertFalse(executor.isShutdown());
     executor.shutdownNow();
     RSyncCommand command = new RSyncCommand(executor, "privateKey", "user", "host", 1, "local",
"remote", RSyncCommand.Type.FROM_LOCAL);
@@ -66,7 +66,7 @@ public class TestRSyncCommandExecutor {
   public void testShutdownDuringWaitFor() throws Exception {
     LocalCommand localCommand = mock(LocalCommand.class);
     localCommandFactory.setInstance(localCommand);
-    final RSyncCommandExecutor executor = new RSyncCommandExecutor(LOG, localCommandFactory);
+    final RSyncCommandExecutor executor = new RSyncCommandExecutor(LOG, 1, localCommandFactory);
     Assert.assertFalse(executor.isShutdown());
     when(localCommand.getExitCode()).thenAnswer(new Answer<Integer>() {
       @Override
@@ -84,4 +84,4 @@ public class TestRSyncCommandExecutor {
     }
     verify(localCommand, never()).kill();
   }
-}
\ No newline at end of file
+}



Mime
View raw message