incubator-bigtop-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From r..@apache.org
Subject svn commit: r1204715 [3/5] - in /incubator/bigtop/branches/hadoop-0.23: ./ bigtop-deploy/puppet/manifests/ bigtop-deploy/puppet/modules/hadoop-pig/tests/ bigtop-deploy/puppet/modules/hadoop/manifests/ bigtop-deploy/puppet/modules/kerberos/manifests/ bi...
Date Mon, 21 Nov 2011 21:31:12 GMT
Modified: incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/multi_insert/out
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/multi_insert/out?rev=1204715&r1=1204714&r2=1204715&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/multi_insert/out (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/multi_insert/out Mon Nov 21 21:31:06 2011
@@ -2,7 +2,7 @@
 -- contributor license agreements.  See the NOTICE file distributed with
 -- this work for additional information regarding copyright ownership.
 -- The ASF licenses this file to You under the Apache License, Version 2.0
--- (the "License"); you may not use this file except in compliance with
+-- (the "License") you may not use this file except in compliance with
 -- the License.  You may obtain a copy of the License at
 --
 --     http://www.apache.org/licenses/LICENSE-2.0

Modified: incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/rcfile_columnar/in
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/rcfile_columnar/in?rev=1204715&r1=1204714&r2=1204715&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/rcfile_columnar/in (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/rcfile_columnar/in Mon Nov 21 21:31:06 2011
@@ -2,7 +2,7 @@
 -- contributor license agreements.  See the NOTICE file distributed with
 -- this work for additional information regarding copyright ownership.
 -- The ASF licenses this file to You under the Apache License, Version 2.0
--- (the "License"); you may not use this file except in compliance with
+-- (the "License") you may not use this file except in compliance with
 -- the License.  You may obtain a copy of the License at
 --
 --     http://www.apache.org/licenses/LICENSE-2.0

Modified: incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/rcfile_columnar/out
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/rcfile_columnar/out?rev=1204715&r1=1204714&r2=1204715&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/rcfile_columnar/out (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/rcfile_columnar/out Mon Nov 21 21:31:06 2011
@@ -2,7 +2,7 @@
 -- contributor license agreements.  See the NOTICE file distributed with
 -- this work for additional information regarding copyright ownership.
 -- The ASF licenses this file to You under the Apache License, Version 2.0
--- (the "License"); you may not use this file except in compliance with
+-- (the "License") you may not use this file except in compliance with
 -- the License.  You may obtain a copy of the License at
 --
 --     http://www.apache.org/licenses/LICENSE-2.0

Modified: incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/stats8/in
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/stats8/in?rev=1204715&r1=1204714&r2=1204715&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/stats8/in (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/stats8/in Mon Nov 21 21:31:06 2011
@@ -2,7 +2,7 @@
 -- contributor license agreements.  See the NOTICE file distributed with
 -- this work for additional information regarding copyright ownership.
 -- The ASF licenses this file to You under the Apache License, Version 2.0
--- (the "License"); you may not use this file except in compliance with
+-- (the "License") you may not use this file except in compliance with
 -- the License.  You may obtain a copy of the License at
 --
 --     http://www.apache.org/licenses/LICENSE-2.0

Modified: incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/stats8/out
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/stats8/out?rev=1204715&r1=1204714&r2=1204715&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/stats8/out (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/stats8/out Mon Nov 21 21:31:06 2011
@@ -2,7 +2,7 @@
 -- contributor license agreements.  See the NOTICE file distributed with
 -- this work for additional information regarding copyright ownership.
 -- The ASF licenses this file to You under the Apache License, Version 2.0
--- (the "License"); you may not use this file except in compliance with
+-- (the "License") you may not use this file except in compliance with
 -- the License.  You may obtain a copy of the License at
 --
 --     http://www.apache.org/licenses/LICENSE-2.0

Modified: incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/union3/in
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/union3/in?rev=1204715&r1=1204714&r2=1204715&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/union3/in (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/union3/in Mon Nov 21 21:31:06 2011
@@ -2,7 +2,7 @@
 -- contributor license agreements.  See the NOTICE file distributed with
 -- this work for additional information regarding copyright ownership.
 -- The ASF licenses this file to You under the Apache License, Version 2.0
--- (the "License"); you may not use this file except in compliance with
+-- (the "License") you may not use this file except in compliance with
 -- the License.  You may obtain a copy of the License at
 --
 --     http://www.apache.org/licenses/LICENSE-2.0

Modified: incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/union3/out
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/union3/out?rev=1204715&r1=1204714&r2=1204715&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/union3/out (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/union3/out Mon Nov 21 21:31:06 2011
@@ -2,7 +2,7 @@
 -- contributor license agreements.  See the NOTICE file distributed with
 -- this work for additional information regarding copyright ownership.
 -- The ASF licenses this file to You under the Apache License, Version 2.0
--- (the "License"); you may not use this file except in compliance with
+-- (the "License") you may not use this file except in compliance with
 -- the License.  You may obtain a copy of the License at
 --
 --     http://www.apache.org/licenses/LICENSE-2.0

Modified: incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/uniquejoin/in
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/uniquejoin/in?rev=1204715&r1=1204714&r2=1204715&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/uniquejoin/in (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/uniquejoin/in Mon Nov 21 21:31:06 2011
@@ -2,7 +2,7 @@
 -- contributor license agreements.  See the NOTICE file distributed with
 -- this work for additional information regarding copyright ownership.
 -- The ASF licenses this file to You under the Apache License, Version 2.0
--- (the "License"); you may not use this file except in compliance with
+-- (the "License") you may not use this file except in compliance with
 -- the License.  You may obtain a copy of the License at
 --
 --     http://www.apache.org/licenses/LICENSE-2.0

Modified: incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/uniquejoin/out
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/uniquejoin/out?rev=1204715&r1=1204714&r2=1204715&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/uniquejoin/out (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/uniquejoin/out Mon Nov 21 21:31:06 2011
@@ -2,7 +2,7 @@
 -- contributor license agreements.  See the NOTICE file distributed with
 -- this work for additional information regarding copyright ownership.
 -- The ASF licenses this file to You under the Apache License, Version 2.0
--- (the "License"); you may not use this file except in compliance with
+-- (the "License") you may not use this file except in compliance with
 -- the License.  You may obtain a copy of the License at
 --
 --     http://www.apache.org/licenses/LICENSE-2.0

Copied: incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/mahout/pom.xml (from r1204645, incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hive/pom.xml)
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/mahout/pom.xml?p2=incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/mahout/pom.xml&p1=incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hive/pom.xml&r1=1204645&r2=1204715&rev=1204715&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hive/pom.xml (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/mahout/pom.xml Mon Nov 21 21:31:06 2011
@@ -21,12 +21,13 @@
   <parent>
     <groupId>org.apache.bigtop.itest</groupId>
     <artifactId>bigtop-smokes</artifactId>
-    <version>0.2.0-incubating-SNAPSHOT</version>
+    <version>0.3.0-hadoop23-incubating-SNAPSHOT</version>
     <relativePath>../pom.xml</relativePath>
   </parent>
-
   <groupId>org.apache.bigtop.itest</groupId>
-  <artifactId>hive-smoke</artifactId>
-  <version>0.2.0-incubating-SNAPSHOT</version>
-  <name>hivesmoke</name>
+  <artifactId>mahout-smoke</artifactId>
+  <version>0.3.0-hadoop23-incubating-SNAPSHOT</version>
+
+  <name>mahoutsmoke</name>
+
 </project>

Added: incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/mahout/src/main/groovy/org/apache/bigtop/itest/mahout/smoke/TestMahoutExamples.groovy
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/mahout/src/main/groovy/org/apache/bigtop/itest/mahout/smoke/TestMahoutExamples.groovy?rev=1204715&view=auto
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/mahout/src/main/groovy/org/apache/bigtop/itest/mahout/smoke/TestMahoutExamples.groovy (added)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/mahout/src/main/groovy/org/apache/bigtop/itest/mahout/smoke/TestMahoutExamples.groovy Mon Nov 21 21:31:06 2011
@@ -0,0 +1,244 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.bigtop.itest.mahout.smoke;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import org.apache.bigtop.itest.JarContent;
+import org.apache.bigtop.itest.shell.Shell;
+
+/**
+ * Test Mahout examples shipped with the distribution.
+ */
+public class TestMahoutExamples {
+  public static final String HADOOP_HOME =
+    System.getenv("HADOOP_HOME");
+  static {
+    assertNotNull("HADOOP_HOME is not set", HADOOP_HOME);
+  }
+
+  public static final String TEMP_DIR = "/tmp/mahout.${(new Date().getTime())}";
+  public static final String WORK_DIR = TEMP_DIR;
+  private static Shell sh = new Shell("/bin/bash -s");
+  public static String download_dir = System.getProperty("mahout.examples.resources.download.path");
+  static {
+    if (download_dir == null) {
+      sh.exec("pwd");
+      download_dir = sh.out[0];
+    }
+  }
+
+  @BeforeClass
+  public static void setUp() {
+    // download resources
+    sh.exec(
+    "if [ ! -f ${download_dir}/20news-bydate.tar.gz ]; then " +
+      "curl http://people.csail.mit.edu/jrennie/20Newsgroups/20news-bydate.tar.gz -o ${download_dir}/20news-bydate.tar.gz; " +
+    "fi");
+    sh.exec(
+    "if [ ! -f ${download_dir}/reuters21578.tar.gz ]; then " +
+      "curl http://kdd.ics.uci.edu/databases/reuters21578/reuters21578.tar.gz -o ${download_dir}/reuters21578.tar.gz; " +
+    "fi");
+    sh.exec(
+    "if [ ! -f ${download_dir}/synthetic_control.data ]; then " +
+      "curl http://archive.ics.uci.edu/ml/databases/synthetic_control/synthetic_control.data -o ${download_dir}/synthetic_control.data; " +
+    "fi");
+    sh.exec(
+    "if [ ! -f ${download_dir}/ml-1m.zip ]; then " +
+      "curl http://www.grouplens.org/system/files/ml-1m.zip -o ${download_dir}/ml-1m.zip; " +
+    "fi");
+    // uncompress archives
+    // 20news-bydate.tar.gz
+    // reuters21578.tar.gz
+    // ml-1m.zip
+    sh.exec("mkdir ${TEMP_DIR}",
+            "cd ${TEMP_DIR}",
+            "mkdir 20news-bydate",
+            "cd 20news-bydate",
+            "tar xzf ${download_dir}/20news-bydate.tar.gz",
+            "cd ..",
+            "mkdir reuters-sgm",
+            "cd reuters-sgm",
+            "tar xzf ${download_dir}/reuters21578.tar.gz",
+            "cd ..",
+            "mkdir movielens",
+            "cd movielens",
+            "unzip ${download_dir}/ml-1m.zip");
+    assertEquals("Failed to uncompress archives", 0, sh.getRet());
+    sh.exec("hadoop fs -mkdir ${WORK_DIR}");
+    assertEquals("Unable to create work dir in hdfs", 0, sh.getRet());
+    rmr("temp");
+  }
+
+  @AfterClass
+  public static void tearDown() {
+    sh.exec("rm -rf ${TEMP_DIR}",
+            "hadoop fs -rmr ${WORK_DIR}");
+  }
+
+  private static void rmr(String path) {
+    sh.exec("hadoop fs -test -e $path");
+    if (sh.getRet() == 0) {
+      sh.exec("hadoop fs -rmr -skipTrash $path");
+      assertEquals("Deletion of $path from HDFS failed", 0, sh.getRet());
+    }
+  }
+
+  @Test
+  public void factorizeMovieLensRatings() {
+    // convert ratings
+    sh.exec("cat ${TEMP_DIR}/movielens/ml-1m/ratings.dat |sed -e s/::/,/g| cut -d, -f1,2,3 > ${TEMP_DIR}/movielens/ratings.csv");
+    assertEquals("Unexpected error from converting ratings", 0, sh.getRet());
+    // put ratings in hdfs
+    sh.exec("hadoop fs -mkdir ${WORK_DIR}/movielens",
+            "hadoop fs -put ${TEMP_DIR}/movielens/ratings.csv ${WORK_DIR}/movielens/ratings.csv");
+    assertEquals("Unable to put movielens/ratings.csv in hdfs", 0, sh.getRet());
+
+    //create a 90% percent training set and a 10% probe set
+    sh.exec("mahout splitDataset --input ${WORK_DIR}/movielens/ratings.csv --output ${WORK_DIR}/dataset " +
+            "--trainingPercentage 0.9 --probePercentage 0.1 --tempDir ${WORK_DIR}/dataset/tmp");
+    assertEquals("Unexpected error from running mahout", 0, sh.getRet());
+
+    //run distributed ALS-WR to factorize the rating matrix based on the training set
+    sh.exec("mahout parallelALS --input ${WORK_DIR}/dataset/trainingSet/ --output ${WORK_DIR}/als/out " +
+            "--tempDir ${WORK_DIR}/als/tmp --numFeatures 20 --numIterations 10 --lambda 0.065");
+    assertEquals("Unexpected error from running mahout", 0, sh.getRet());
+
+    //compute predictions against the probe set, measure the error
+    sh.exec("mahout evaluateFactorizationParallel --output ${WORK_DIR}/als/rmse --pairs ${WORK_DIR}/dataset/probeSet/ " +
+            "--userFeatures ${WORK_DIR}/als/out/U/ --itemFeatures ${WORK_DIR}/als/out/M/");
+    assertEquals("Unexpected error from running mahout", 0, sh.getRet());
+
+    // check that error has been calculated
+    sh.exec("hadoop fs -test -e ${WORK_DIR}/als/rmse/rmse.txt");
+    assertEquals("${WORK_DIR}/als/rmse/rmse.txt does not exist", 0, sh.getRet());
+    // print the error
+    sh.exec("hadoop fs -cat ${WORK_DIR}/als/rmse/rmse.txt");
+    assertEquals("Unexpected error from running hadoop", 0, sh.getRet());
+  }
+
+  // it's too much of a pain to use junit parameterized tests, so do it
+  // the simple way
+  private void _clusterSyntheticControlData(String algorithm) {
+    rmr("testdata");
+    sh.exec("hadoop fs -mkdir testdata",
+            "hadoop fs -put ${download_dir}/synthetic_control.data testdata");
+    assertEquals("Unable to put data in hdfs", 0, sh.getRet());
+    sh.exec("mahout org.apache.mahout.clustering.syntheticcontrol.${algorithm}.Job");
+    assertEquals("Unexpected error from running mahout", 0, sh.getRet());
+  }
+
+  @Test
+  public void clusterControlDataWithCanopy() {
+    _clusterSyntheticControlData("canopy");
+  }
+
+  @Test
+  public void clusterControlDataWithKMeans() {
+    _clusterSyntheticControlData("kmeans");
+  }
+
+  @Test
+  public void clusterControlDataWithFuzzyKMeans() {
+    _clusterSyntheticControlData("fuzzykmeans");
+  }
+
+  @Test
+  public void clusterControlDataWithDirichlet() {
+    _clusterSyntheticControlData("dirichlet");
+  }
+
+  @Test
+  public void clusterControlDataWithMeanShift() {
+    _clusterSyntheticControlData("meanshift");
+  }
+
+  @Test
+  public void testReutersLDA() {
+    // where does lda.algorithm come in?
+    sh.exec("mahout org.apache.lucene.benchmark.utils.ExtractReuters ${TEMP_DIR}/reuters-sgm ${TEMP_DIR}/reuters-out");
+    assertEquals("Unexpected error from running mahout", 0, sh.getRet());
+    sh.exec("MAHOUT_LOCAL=true mahout seqdirectory -i ${TEMP_DIR}/reuters-out -o ${TEMP_DIR}/reuters-out-seqdir -c UTF-8 -chunk 5");
+    assertEquals("Unexpected error from running mahout", 0, sh.getRet());
+    /*
+    // reuters-out-seqdir exists on a local disk at this point,
+    // copy it to hdfs
+    rmr("${WORK_DIR}/reuters-out-seqdir");
+    sh.exec("hadoop fs -put ${TEMP_DIR}/reuters-out-seqdir ${WORK_DIR}/reuters-out-seqdir");
+    assertEquals("Unable to put reuters-out-seqdir in hdfs", 0, sh.getRet());
+    */
+    sh.exec("""mahout seq2sparse \
+    -i ${WORK_DIR}/reuters-out-seqdir/ \
+    -o ${WORK_DIR}/reuters-out-seqdir-sparse-lda \
+    -wt tf -seq -nr 3 \
+  && \
+  mahout lda \
+    -i ${WORK_DIR}/reuters-out-seqdir-sparse-lda/tf-vectors \
+    -o ${WORK_DIR}/reuters-lda -k 20 -v 50000 -ow -x 20 \
+  && \
+  mahout ldatopics \
+    -i ${WORK_DIR}/reuters-lda/state-20 \
+    -d ${WORK_DIR}/reuters-out-seqdir-sparse-lda/dictionary.file-0 \
+    -dt sequencefile""");
+    assertEquals("Unexpected error from running mahout", 0, sh.getRet());
+  }
+
+  @Test
+  public void testBayesNewsgroupClassifier() {
+    sh.exec("""mahout org.apache.mahout.classifier.bayes.PrepareTwentyNewsgroups \
+  -p ${TEMP_DIR}/20news-bydate/20news-bydate-train \
+  -o ${TEMP_DIR}/20news-bydate/bayes-train-input \
+  -a org.apache.mahout.vectorizer.DefaultAnalyzer \
+  -c UTF-8""");
+    assertEquals("Unexpected error from running mahout", 0, sh.getRet());
+    sh.exec("""mahout org.apache.mahout.classifier.bayes.PrepareTwentyNewsgroups \
+  -p ${TEMP_DIR}/20news-bydate/20news-bydate-test \
+  -o ${TEMP_DIR}/20news-bydate/bayes-test-input \
+  -a org.apache.mahout.vectorizer.DefaultAnalyzer \
+  -c UTF-8""");
+    assertEquals("Unexpected error from running mahout", 0, sh.getRet());
+
+    // put bayes-train-input and bayes-test-input in hdfs
+    sh.exec("hadoop fs -put ${TEMP_DIR}/20news-bydate/bayes-train-input ${WORK_DIR}/20news-bydate/bayes-train-input");
+    assertEquals("Unable to put bayes-train-input in hdfs", 0, sh.getRet());
+    sh.exec("hadoop fs -put ${TEMP_DIR}/20news-bydate/bayes-test-input ${WORK_DIR}/20news-bydate/bayes-test-input");
+    assertEquals("Unable to put bayes-test-input in hdfs", 0, sh.getRet());
+
+    sh.exec("""mahout trainclassifier \
+  -i ${WORK_DIR}/20news-bydate/bayes-train-input \
+  -o ${WORK_DIR}/20news-bydate/bayes-model \
+  -type bayes \
+  -ng 1 \
+  -source hdfs""");
+    assertEquals("Unexpected error from running mahout", 0, sh.getRet());
+    sh.exec("""mahout testclassifier \
+  -m ${WORK_DIR}/20news-bydate/bayes-model \
+  -d ${WORK_DIR}/20news-bydate/bayes-test-input \
+  -type bayes \
+  -ng 1 \
+  -source hdfs \
+  -method mapreduce""");
+    assertEquals("Unexpected error from running mahout", 0, sh.getRet());
+
+  }
+
+}

Modified: incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/oozie/pom.xml
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/oozie/pom.xml?rev=1204715&r1=1204714&r2=1204715&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/oozie/pom.xml (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/oozie/pom.xml Mon Nov 21 21:31:06 2011
@@ -21,13 +21,13 @@
   <parent>
     <groupId>org.apache.bigtop.itest</groupId>
     <artifactId>bigtop-smokes</artifactId>
-    <version>0.2.0-incubating-SNAPSHOT</version>
+    <version>0.3.0-hadoop23-incubating-SNAPSHOT</version>
     <relativePath>../pom.xml</relativePath>
   </parent>
 
   <groupId>org.apache.bigtop.itest</groupId>
   <artifactId>oozie-smoke</artifactId>
-  <version>0.2.0-incubating-SNAPSHOT</version>
+  <version>0.3.0-hadoop23-incubating-SNAPSHOT</version>
   <name>ooziesmoke</name>
 
   <dependencies>

Modified: incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/oozie/src/main/groovy/org/apache/bigtop/itest/ooziesmoke/TestOozieSmoke.groovy
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/oozie/src/main/groovy/org/apache/bigtop/itest/ooziesmoke/TestOozieSmoke.groovy?rev=1204715&r1=1204714&r2=1204715&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/oozie/src/main/groovy/org/apache/bigtop/itest/ooziesmoke/TestOozieSmoke.groovy (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/oozie/src/main/groovy/org/apache/bigtop/itest/ooziesmoke/TestOozieSmoke.groovy Mon Nov 21 21:31:06 2011
@@ -45,9 +45,9 @@ class TestOozieSmoke {
     namenode = System.getProperty("org.apache.bigtop.itest.namenode", "${conf.get('fs.default.name')}");
 
     oozie_tar_home = System.getProperty("org.apache.bigtop.itest.oozie_tar_home",
-                                        (new File("/usr/share/doc/oozie/")).exists() ?
-                                           "/usr/share/doc/oozie/" :
-                                           "/usr/share/doc/packages/oozie/");
+                                        (new File("/usr/share/doc/packages/oozie/")).exists() ?
+                                           "/usr/share/doc/packages/oozie/" :
+                                           "/usr/share/doc/oozie*/");
 
     sh.exec("mkdir /tmp/${tmp_dir}",
             "cd /tmp/${tmp_dir}",

Modified: incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/pom.xml
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/pom.xml?rev=1204715&r1=1204714&r2=1204715&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/pom.xml (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/pom.xml Mon Nov 21 21:31:06 2011
@@ -20,14 +20,14 @@
   <parent>
     <groupId>org.apache.bigtop.itest</groupId>
     <artifactId>bigtop-smokes</artifactId>
-    <version>0.2.0-incubating-SNAPSHOT</version>
+    <version>0.3.0-hadoop23-incubating-SNAPSHOT</version>
     <relativePath>../pom.xml</relativePath>
   </parent>
 
   <modelVersion>4.0.0</modelVersion>
   <groupId>org.apache.bigtop.itest</groupId>
   <artifactId>package-smoke</artifactId>
-  <version>0.2.0-incubating-SNAPSHOT</version>
+  <version>0.3.0-hadoop23-incubating-SNAPSHOT</version>
   <name>packagesmoke</name> 
 
   <dependencies>
@@ -49,11 +49,33 @@
       <artifactId>ant-junit</artifactId>
       <version>1.8.2</version>
     </dependency>
-
-    <dependency>
-      <groupId>org.apache.bigtop.itest</groupId>
-      <artifactId>hadoop-smoke</artifactId>
-      <version>0.2.0-incubating-SNAPSHOT</version>
-    </dependency>
   </dependencies>
+
+  <build>
+    <plugins>
+        <plugin>
+          <groupId>org.codehaus.groovy.maven</groupId>
+          <artifactId>gmaven-plugin</artifactId>
+          <version>1.0</version>
+          <executions>
+            <execution>
+              <id>check-manifest</id>
+              <phase>process-test-classes</phase>
+              <goals>
+                <goal>execute</goal>
+              </goals>
+              <configuration>
+                <source><![CDATA[
+                   ["yum", "zypper", "apt", "urpmi"].each { 
+                     def a = new XmlSlurper().parseText(new File("${project.build.directory}/classes/package_data_${it}.xml").text);
+                   }
+                   println "Manifest XML validated";
+                  ]]>
+                </source>
+              </configuration>
+            </execution>
+          </executions>
+        </plugin>
+     </plugins>
+  </build>
 </project>

Modified: incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/CDHServices.groovy
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/CDHServices.groovy?rev=1204715&r1=1204714&r2=1204715&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/CDHServices.groovy (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/CDHServices.groovy Mon Nov 21 21:31:06 2011
@@ -21,47 +21,65 @@ package org.apache.bigtop.itest.packages
 import org.apache.bigtop.itest.pmanager.PackageManager
 
 class CDHServices {
+  static final List serviceDaemonUserNames = [ "flume", "hbase" , "hdfs" , "hue" , "mapred" , "oozie" , "sqoop" ,
+                                               "zookeeper", "hadoop"];
+
   static final Map components = [
-                     HDFS           : [ services : [ "hadoop-0.20-namenode", "hadoop-0.20-datanode",
-                                                     "hadoop-0.20-secondarynamenode" ],
+                     HDFS           : [ services : [ "hadoop-namenode", "hadoop-datanode",
+                                                     "hadoop-secondarynamenode" ],
                                         verifier : new StateVerifierHDFS(),
+                                        killIDs  : [ "hdfs" ],
                                       ],
-                     mapreduce      : [ services : [ "hadoop-0.20-namenode", "hadoop-0.20-datanode",
-                                                     "hadoop-0.20-jobtracker", "hadoop-0.20-tasktracker" ],
+                     mapreduce      : [ services : [ "hadoop-namenode", "hadoop-datanode",
+                                                     "hadoop-jobtracker", "hadoop-tasktracker" ],
+                                        killIDs  : [ "hdfs", "mapred" ],
                                         verifier : new StateVerifierMapreduce(),
                                       ],
-                     HBase          : [ services : [ "hadoop-0.20-namenode", "hadoop-0.20-datanode",
+                     hive           : [ services : [ "hadoop-namenode", "hadoop-datanode",
+                                                     "hadoop-jobtracker", "hadoop-tasktracker" ],
+                                        killIDs  : [ "hdfs", "mapred" ],
+                                        verifier : new StateVerifierHive(),
+                                      ],
+                     HBase          : [ services : [ "hadoop-namenode", "hadoop-datanode",
                                                      "hadoop-hbase-master" ],
+                                        killIDs  : [ "hdfs", "hbase" ],
                                         verifier : new StateVerifierHBase(),
                                       ],
-                     zookeeper      : [ services : [ "${PackageManager.getPackageManager().type == 'apt' ? 'hadoop-zookeeper-server' : 'hadoop-zookeeper'}" ],
+                     zookeeper      : [ services : [ "hadoop-zookeeper" ],
                                         verifier : new StateVerifierZookeeper(),
+                                        killIDs  : [ "zookeeper" ],
                                       ],
-                     oozie          : [ services : [ "hadoop-0.20-namenode", "hadoop-0.20-datanode", "hadoop-0.20-jobtracker", "hadoop-0.20-tasktracker",
+                     oozie          : [ services : [ "hadoop-namenode", "hadoop-datanode", "hadoop-jobtracker", "hadoop-tasktracker",
                                                      "oozie" ],
+                                        killIDs  : [ "hdfs", "mapred", "oozie" ],
                                         verifier : new StateVerifierOozie(),
                                       ],
-                     flume          : [ services : [ "hadoop-0.20-namenode", "hadoop-0.20-datanode",
+                     flume          : [ services : [ "hadoop-namenode", "hadoop-datanode",
                                                      "flume-master", "flume-node" ],
+                                        killIDs  : [ "hdfs", "flume" ],
                                         verifier : new StateVerifierFlume(),
                                       ],
-                     sqoop          : [ services : [ "hadoop-0.20-namenode", "hadoop-0.20-datanode",
+                     sqoop          : [ services : [ "hadoop-namenode", "hadoop-datanode",
                                                      "sqoop-metastore" ],
+                                        killIDs  : [ "hdfs", "sqoop" ],
                                         verifier : new StateVerifierSqoop(),
                                       ],
-                     hue            : [ services : [ "hadoop-0.20-namenode", "hadoop-0.20-datanode", "hadoop-0.20-jobtracker", "hadoop-0.20-tasktracker",
+                     hue            : [ services : [ "hadoop-namenode", "hadoop-datanode", "hadoop-jobtracker", "hadoop-tasktracker",
                                                      "hue" ],
+                                        killIDs  : [ "hdfs", "mapred", "hue" ],
                                         verifier : new StateVerifierHue(),
                                       ],
                    ];
 
   static final Map<String, List<String>> release2services = [
-                     "2"            : [ "HDFS", "mapreduce" ],
-                     "3b2"          : [ "HDFS", "mapreduce", "HBase", "zookeeper", "oozie", "flume",          "hue" ],
-                     "3b3"          : [ "HDFS", "mapreduce", "HBase", "zookeeper", "oozie", "flume", "sqoop", "hue" ],
-                     "3b4"          : [ "HDFS", "mapreduce", "HBase", "zookeeper", "oozie", "flume", "sqoop", "hue" ],
-                     "3u0"          : [ "HDFS", "mapreduce", "HBase", "zookeeper", "oozie", "flume", "sqoop", "hue" ],
-                     "3"            : [ "HDFS", "mapreduce", "HBase", "zookeeper", "oozie", "flume", "sqoop", "hue" ],
+                     "bigtop"       : [ "HDFS", "mapreduce", "hive", "HBase", "zookeeper", "oozie", "flume" ],
+                     "2"            : [ "HDFS", "mapreduce", "hive" ],
+                     "3b2"          : [ "HDFS", "mapreduce", "hive", "HBase", "zookeeper", "oozie", "flume",          "hue" ],
+                     "3b3"          : [ "HDFS", "mapreduce", "hive", "HBase", "zookeeper", "oozie", "flume", "sqoop", "hue" ],
+                     "3b4"          : [ "HDFS", "mapreduce", "hive", "HBase", "zookeeper", "oozie", "flume", "sqoop", "hue" ],
+                     "3u0"          : [ "HDFS", "mapreduce", "hive", "HBase", "zookeeper", "oozie", "flume", "sqoop", "hue" ],
+                     "3u1"          : [ "HDFS", "mapreduce", "hive", "HBase", "zookeeper", "oozie", "flume", "sqoop", "hue" ],
+                     "3"            : [ "HDFS", "mapreduce", "hive", "HBase", "zookeeper", "oozie", "flume" ],
                    ];
 
   public static Map getServices(String release) {
@@ -69,8 +87,10 @@ class CDHServices {
     List<String> services;
 
     if ((release =~ /\.\./).find()) {
-      services = release2services[release.replaceAll(/^.*\.\./, "")].clone();
-      release2services[release.replaceAll(/\.\..*$/, "")].each {
+      String release_from = release.replaceAll(/\.\..*$/, "");
+      release = release.replaceAll(/^.*\.\./, "");
+      services = release2services[release].clone();
+      release2services[release_from].each {
         services.remove(it);
       }
     } else {
@@ -78,7 +98,16 @@ class CDHServices {
     }
 
     services.each {
-        res[it] = components[it];
+        // zookeeper is a very messy case of naming :-(
+        if (it == "zookeeper" &&
+            (PackageManager.getPackageManager().type == 'apt' ||
+             release == "3" || release == "3u1" || release == "bigtop")) {
+          res[it] = [ services : [ "hadoop-zookeeper-server" ],
+                      verifier : new StateVerifierZookeeper(),
+                    ];
+        } else {
+          res[it] = components[it];
+        }
     }
     return res;
   }

Modified: incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/DeployCDH.groovy
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/DeployCDH.groovy?rev=1204715&r1=1204714&r2=1204715&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/DeployCDH.groovy (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/DeployCDH.groovy Mon Nov 21 21:31:06 2011
@@ -58,13 +58,16 @@ class DeployCDH {
     Map<String, LinkedHashMap<String, Collection>> distPackages = [
       "2" : [ "apt" : cdh2 + aptPkg,
               "yum" : cdh2 + yumPkg + [ "hadoop-0.20-docs", "hadoop-hive-webinterface" ],
+               "zypper" : [],
               // "cloudera-desktop", "cloudera-desktop-plugins",
             ],
     "3b2" : [ "apt" : cdh3b2 + aptPkg,
               "yum" : cdh3b2 + yumPkg + [ "hadoop-0.20-docs", "hadoop-hive-webinterface" ],
+              "zypper" : [],
             ],
     "3b3" : [ "apt" : cdh3b3 + aptPkg + [ "hadoop-hbase-doc",  ],
               "yum" : cdh3b3 + yumPkg + [ "hadoop-0.20-docs", "hadoop-hive-webinterface" ],
+              "zypper" : [],
             ],
     "3b4" : [ "apt" : cdh3b3 + aptPkg + [ "hadoop-hbase-doc",  ],
               "yum" : cdh3b3 + yumPkg + [ "hadoop-0.20-doc",  "hadoop-hive-webinterface" ],
@@ -78,6 +81,10 @@ class DeployCDH {
               "yum" : cdh3b3 + yumPkg + [ "hadoop-0.20-doc" ],
               "zypper" : cdh3b3 + zypperPkg,
             ],
+    "3u1" : [ "apt" : cdh3b3 + aptPkg + [ "hadoop-hbase-doc",  ],
+              "yum" : cdh3b3 + yumPkg + [ "hadoop-0.20-doc" ],
+              "zypper" : cdh3b3 + zypperPkg,
+            ],
     ];
 
   @Rule
@@ -94,10 +101,22 @@ class DeployCDH {
     checkThat("failed to add repository for pre-upgrade CDH deployment",
               oldRepo.getPm().refresh(), equalTo(0));
 
+    // Lets try to remove existing packages -- just in case
+    List stalePkgs = [];
+    distPackages.each { key, value -> stalePkgs.addAll(value[pm.type]); }
+    (stalePkgs as Set).each {
+      PackageInstance pkg = PackageInstance.getPackageInstance(pm, it);
+      pkg.remove();
+    }
+
     packages.each {
       PackageInstance pkg = PackageInstance.getPackageInstance(pm, it);
-      checkThat("failed to install required package ${pkg.getName()}",
-                pkg.install(), equalTo(0));
+      // Some packages get installed as requirement for others that we install.
+      // We don't want to install them for a second time.
+      if (!pkg.isInstalled()) {
+        checkThat("failed to install required package ${pkg.getName()}",
+                  pkg.install(), equalTo(0));
+      }
       pkg.getServices().each { it.value.stop(); }
     }
 

Modified: incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/PackageTestCommon.groovy
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/PackageTestCommon.groovy?rev=1204715&r1=1204714&r2=1204715&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/PackageTestCommon.groovy (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/PackageTestCommon.groovy Mon Nov 21 21:31:06 2011
@@ -30,6 +30,7 @@ import org.hamcrest.Matcher
 
 import static org.apache.bigtop.itest.packagesmoke.PackageTestMatchers.MapHasKeys.hasSameKeys
 import org.apache.bigtop.itest.shell.Shell
+import groovy.xml.MarkupBuilder
 
 class PackageTestCommon {
   static public PackageManager pm;
@@ -48,7 +49,7 @@ class PackageTestCommon {
   }
 
   String formatDescription(String description, String summary) {
-    return ((summary ?: "") + ' ' + description).replaceAll(/\s+/,' ').trim();
+    return ((summary ?: "") + ' ' + description).replaceAll(/\s+/,' ').replaceAll(/\s\.\s/,' ').replaceAll(/\s\.$/,' ').trim();
   }
 
   private void checkMetadata(PackageInstance pkg, Map expected_metadata) {
@@ -96,7 +97,13 @@ class PackageTestCommon {
   }
 
   public void checkPulledDeps(Map expected_deps) {
-    Map pkgDeps = pkg.getDeps();
+    Map pkgDeps = [:];
+
+    pkg.getDeps().each { k, v ->
+      if (!(k =~ /\.so\.[0-9]/).find()) {
+        pkgDeps[k] = v;
+      }
+    }
 
     checkThat("a set of dependencies of package $name is different from what was expected",
               pkgDeps, hasSameKeys(expected_deps));
@@ -321,12 +328,12 @@ class PackageTestCommon {
     if (pm.type == "apt" && doc != null) {
       file.putAll(doc);
     } else {
-      checkThat("list of documentation files of pacakge $name is different from what was expected",
+      checkThat("list of documentation files of package $name is different from what was expected",
                 docs, hasSameKeys(doc));
     }
-    checkThat("list of config files of pacakge $name is different from what was expected",
+    checkThat("list of config files of package $name is different from what was expected",
               configs, hasSameKeys(config));
-    checkThat("list of regular files of pacakge $name is different from what was expected",
+    checkThat("list of regular files of package $name is different from what was expected",
               files, hasSameKeys(file));
 
     // TODO: we should probably iterate over a different set of files to include loose files as well
@@ -367,14 +374,23 @@ class PackageTestCommon {
                 problemFiles, equalTo([]));
 
     // a bit of debug output
-    pkg.getFiles().each {
-      Map meta = fileMeta[it] ?: [:];
-      String target = meta.target ? " target=\"${meta.target}\"" : "";
-      String tip = configs[it] ? "config" : (docs[it] ? "doc " : "file");
-      println "\n::: ${name}    <${tip} name=\"${it}\" owners=\"${meta.owners}\" perm=\"${meta.perm}\"" +
-                                  " user=\"${meta.user}\" group=\"${meta.group}\"${target}/>";
+    def newManifest = new MarkupBuilder(new FileWriter("${pkg.name}_manifest.xml"));
+
+    newManifest.content() {
+      fileMeta = getLsMetadata(pkg.getFiles());
+
+      pkg.getFiles().each {
+        Map meta = fileMeta[it] ?: [:];
+        String node = configs[it] ? "config" : (docs[it] ? "doc " : "file");
+        int owners = meta.owners ?: -1;
+
+        if (meta.target) {
+          "$node"(name : it, owners : owners, perm : meta.perm, user : meta.user, group : meta.group, target : meta.target);
+        } else {
+          "$node"(name : it, owners : owners, perm : meta.perm, user : meta.user, group : meta.group);
+        }
+      }
     }
-    // println "";
   }
 
   public void checkComplimentary32bitInstall() {

Modified: incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/PackageTestRepoMgr.groovy
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/PackageTestRepoMgr.groovy?rev=1204715&r1=1204714&r2=1204715&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/PackageTestRepoMgr.groovy (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/PackageTestRepoMgr.groovy Mon Nov 21 21:31:06 2011
@@ -50,7 +50,7 @@ class PackageTestRepoMgr {
 
   public parseRepoSpec(String prefix) {
     cdhRepoHost = System.getProperty("${prefix}.host", "nightly.cloudera.com");
-    cdhRepoVersion = System.getProperty("${prefix}.version", "3");
+    cdhRepoVersion = System.getProperty("${prefix}.version", "bigtop");
 
     Map cdhKeys  = [ yum    : "http://${cdhRepoHost}/redhat/cdh/RPM-GPG-KEY-cloudera",
                      zypper : null,
@@ -69,6 +69,7 @@ class PackageTestRepoMgr {
 
   public boolean addRepo() {
     repoName = "cloudera-cdh${cdhRepoVersion}";
+    pm.cleanup();
     try {
       String repoText = cdhRepoFileURL.toURL().text;
       if (pm.addBinRepo(repoName, repoText)) {
@@ -80,10 +81,11 @@ class PackageTestRepoMgr {
         return false;
       }
     }
+    pm.refresh();
     return true;
   }
 
   public boolean removeRepo() {
     return (pm.removeBinRepo(repoName) == 0);
   }
-}
\ No newline at end of file
+}

Modified: incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierFlume.groovy
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierFlume.groovy?rev=1204715&r1=1204714&r2=1204715&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierFlume.groovy (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierFlume.groovy Mon Nov 21 21:31:06 2011
@@ -28,6 +28,7 @@ class StateVerifierFlume extends StateVe
 
   public void createState() {
     String node;
+    sleep(120001);
     shFlume.exec("connect localhost",
                  "getnodestatus",
                  "quit\n");
@@ -35,14 +36,15 @@ class StateVerifierFlume extends StateVe
                                      .replaceAll(/^.*Master knows about [0-9]* nodes /,'')
                                      .trim();
     shFlume.exec("connect localhost",
-                 "exec config $node 'text(\"/etc/group\")' 'dfs(\"hdfs://localhost/flume.test\")'",
+                 "exec config $node 'text(\"/etc/group\")' 'collectorSink(\"hdfs://localhost/flume\",\"test\")'",
                  "quit\n");
     sleep(5001);
-    (new Shell()).exec("hadoop fs -rm /flume.test");
   }
 
   public boolean verifyState() {
     sleep(5001);
-    return ((new Shell()).exec("hadoop fs -ls /flume.test >/dev/null 2>&1").getRet() == 0);
+    boolean ret = ((new Shell()).exec("hadoop fs -ls /flume >/dev/null 2>&1").getRet() == 0);
+    (new Shell()).exec("hadoop fs -rmr /flume");
+    return ret;
   }
 }
\ No newline at end of file

Modified: incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierHBase.groovy
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierHBase.groovy?rev=1204715&r1=1204714&r2=1204715&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierHBase.groovy (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierHBase.groovy Mon Nov 21 21:31:06 2011
@@ -36,6 +36,7 @@ class StateVerifierHBase extends StateVe
   public static void createStaticState() {
     shHBase.exec("create 't1', 'f1'",
                  "put 't1', 'r1', 'f1:q', 'val'",
+                 "flush 't1'",
                  "quit\n");
   }
 

Modified: incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierHDFS.groovy
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierHDFS.groovy?rev=1204715&r1=1204714&r2=1204715&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierHDFS.groovy (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierHDFS.groovy Mon Nov 21 21:31:06 2011
@@ -55,6 +55,11 @@ class StateVerifierHDFS extends StateVer
     return (Arrays.equals(digest, MD5.digest()));
   } */
 
+  public boolean config() {
+    Shell shRoot = new Shell("/bin/bash", "root");
+    return 0 == shRoot.exec("sed -i -e 's#<configuration>\$#<configuration><property><name>dfs.safemode.min.datanodes</name><value>1</value></property><property><name>dfs.safemode.extension</name><value>0</value></property>#' /etc/hadoop/conf/hdfs-site.xml").getRet();
+  }
+
   public static void createStaticState() {
     sh.exec("hadoop fs -put <(echo StateHDFSVErifier) /StateHDFSVErifier");
   }

Copied: incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierHive.groovy (from r1204645, incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierFlume.groovy)
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierHive.groovy?p2=incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierHive.groovy&p1=incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierFlume.groovy&r1=1204645&r2=1204715&rev=1204715&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierFlume.groovy (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierHive.groovy Mon Nov 21 21:31:06 2011
@@ -15,34 +15,29 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package org.apache.bigtop.itest.packagesmoke
 
 import org.apache.bigtop.itest.shell.Shell
-import java.security.MessageDigest
-import org.junit.Before
-import org.junit.After
-import org.junit.Test
 
-class StateVerifierFlume extends StateVerifier {
-  Shell shFlume = new Shell("flume shell");
+class StateVerifierHive extends StateVerifier {
+  final static String schema = "CREATE TABLE state_table(A INT) PARTITIONED BY (dt STRING) row format delimited fields terminated by ','  escaped by '\\\\\\\\' stored as textfile;";
+
+  Shell sh = new Shell();
 
   public void createState() {
-    String node;
-    shFlume.exec("connect localhost",
-                 "getnodestatus",
-                 "quit\n");
-    node = shFlume.getOut().join(' ').replaceAll(/ --> IDLE.*$/,'')
-                                     .replaceAll(/^.*Master knows about [0-9]* nodes /,'')
-                                     .trim();
-    shFlume.exec("connect localhost",
-                 "exec config $node 'text(\"/etc/group\")' 'dfs(\"hdfs://localhost/flume.test\")'",
-                 "quit\n");
-    sleep(5001);
-    (new Shell()).exec("hadoop fs -rm /flume.test");
+    File tmpFile = File.createTempFile("StateVerifierHiveData", ".txt");
+    tmpFile.withWriter { (1..15).each { num -> it.write("$num\n"); } };
+
+    sh.exec("hive -e \"${schema}\"");
+    ["2008-08-08", "2008-08-09"].each {
+      sh.exec("hive -e \"LOAD DATA LOCAL INPATH '${tmpFile.getCanonicalPath()}' OVERWRITE INTO TABLE state_table partition (dt='$it');\"");
+    }
+    tmpFile.delete();
   }
 
   public boolean verifyState() {
-    sleep(5001);
-    return ((new Shell()).exec("hadoop fs -ls /flume.test >/dev/null 2>&1").getRet() == 0);
+    sh.exec("hive -e 'SELECT COUNT(*) from state_table;'")
+    return (sh.getOut() =~ /30/).find();
   }
-}
\ No newline at end of file
+}

Modified: incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierHue.groovy
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierHue.groovy?rev=1204715&r1=1204714&r2=1204715&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierHue.groovy (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierHue.groovy Mon Nov 21 21:31:06 2011
@@ -36,20 +36,22 @@ class StateVerifierHue extends StateVeri
 
   public void createState() {
     // first call creates admin/admin username/keypair
-    sh.exec("curl --data '${creds}' ${loginURL}");
+    sh.exec("curl -m 60 --data '${creds}' ${loginURL}");
   }
 
   public boolean verifyState() {
     String sessionId;
     boolean res;
 
-    sh.exec("curl -i --data '${creds}' ${loginURL} | sed -e 's#Set-Cookie: *##' -e 's#;.*\$##' | grep '^sessionid'");
+    sh.exec("curl -m 60 -i --data '${creds}' ${loginURL} | sed -e 's#Set-Cookie: *##' -e 's#;.*\$##' | grep '^sessionid'");
     sessionId = sh.getOut().join('');
 
-    res = (sh.exec("curl -b '${sessionId}' ${checkURL} | grep -q 'All ok. Configuration check passed'").getRet() == 0);
+    sh.exec("curl -m 60 -b '${sessionId}' ${checkURL}");
+    res = (sh.getOut().grep( ~/.*All ok. Configuration check passed.*/ ).size() != 0)
     checkApps.each {
-      res = res && (sh.exec("curl -b '${sessionId}' ${hueServer}/${it}/ | grep -q 'Page Not Found'").getRet() != 0);
+      sh.exec("curl -m 60 -b '${sessionId}' ${hueServer}/${it}/");
+      res = res && (sh.getOut().grep( ~/.*Page Not Found.*/ ).size() == 0);
     }
     return res;
   }
-}
\ No newline at end of file
+}

Modified: incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierMapreduce.groovy
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierMapreduce.groovy?rev=1204715&r1=1204714&r2=1204715&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierMapreduce.groovy (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierMapreduce.groovy Mon Nov 21 21:31:06 2011
@@ -28,7 +28,7 @@ class StateVerifierMapreduce extends Sta
   }
 
   public static boolean verifyStaticState() {
-    return sh.exec("hadoop jar `ls /usr/lib/hadoop-0.20/hadoop*examples*jar | head -1` pi 10 100").getRet() == 0;
+    return sh.exec("hadoop jar `ls /usr/lib/hadoop*/hadoop*examples*jar | head -1` pi 10 100").getRet() == 0;
   }
 
   void createState() {

Modified: incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesBasics.groovy
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesBasics.groovy?rev=1204715&r1=1204714&r2=1204715&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesBasics.groovy (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesBasics.groovy Mon Nov 21 21:31:06 2011
@@ -125,10 +125,9 @@ class TestPackagesBasics extends Package
   @Test
   synchronized void testPackageInstall() {
     // WARNING: sometimes packages do not install because the server is busy
-    int i;
-    for (i=3; pkg.install() && i>0; i--) {};
-    checkThat("could only install package $name on the ${3-i} try",
-              i, equalTo(3));
+    for (int i=3; pkg.install() && i>0; i--) {
+      recordFailure("can not install package $name will retry $i times");
+    }
 
     // TODO: we need to come up with a way to abort any further execution to avoid spurious failures
 
@@ -163,12 +162,6 @@ class TestPackagesBasics extends Package
     checkAlternatives(getMap(golden.alternatives));
   }
 
-  @RunStage(level=1)
-  @Test
-  void testPackageRemove() {
-    checkRemoval();
-  }
-
   static void tryOrFail(Closure cl, int retries, String fail) {
     while (!cl.call()) {
       retries--;

Modified: incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesPseudoDistributed.groovy
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesPseudoDistributed.groovy?rev=1204715&r1=1204714&r2=1204715&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesPseudoDistributed.groovy (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesPseudoDistributed.groovy Mon Nov 21 21:31:06 2011
@@ -23,7 +23,7 @@ import org.apache.bigtop.itest.junit.Ord
 import org.junit.Test
 
 @RunWith(OrderedParameterized.class)
-class TestPackagesPseudoDistributed extends TestPackagesSingleNode {
+class TestPackagesPseudoDistributed extends TestPackagesBasics {
   public TestPackagesPseudoDistributed(String pkgName, Node pkgGolden) {
     super(pkgName, pkgGolden);
   }
@@ -33,8 +33,28 @@ class TestPackagesPseudoDistributed exte
   synchronized void testRemoteMetadata() {
   }
 
-  @RunStage(level=1)
+  @RunStage(level=-1)
   @Test
-  void testPackageRemove() {
+  void testPackageUpgrade() {
+    if (isUpgrade()) {
+      checkThat("upgrade sequence on a package $name failed to be executed",
+                CDHUpgradeSequence.execute(name, System.getProperty("cdh.prev.repo.version"), "3"), equalTo(0));
+    }
+  }
+
+  @Test
+  void testPulledDeps() {
+    checkPulledDeps(getMap(golden.deps));
+  }
+
+  @Test
+  void testPackageContent() {
+    Map files = getMap(golden.content);
+    checkFiles(files.config, files.doc, files.file);
+  }
+
+  @Test
+  void testPackageServices() {
+    checkServices(getMap(golden.services));
   }
 }

Copied: incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesPseudoDistributedWithRM.groovy (from r1204645, incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesReadiness.groovy)
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesPseudoDistributedWithRM.groovy?p2=incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesPseudoDistributedWithRM.groovy&p1=incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesReadiness.groovy&r1=1204645&r2=1204715&rev=1204715&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesReadiness.groovy (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesPseudoDistributedWithRM.groovy Mon Nov 21 21:31:06 2011
@@ -15,58 +15,30 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package org.apache.bigtop.itest.packagesmoke
 
-import org.apache.bigtop.itest.junit.OrderedParameterized
-import org.junit.runner.RunWith
-import org.apache.bigtop.itest.junit.OrderedParameterized.RunStage
 import org.junit.Test
 
-@RunWith(OrderedParameterized.class)
-class TestPackagesReadiness extends TestPackagesSingleNode {
-  public TestPackagesReadiness(String pkgName, Node pkgGolden) {
-    super(pkgName, pkgGolden);
-  }
-  
-  @RunStage(level=-1)
-  @Test
-  void testPackageUpgrade() {
-  }
-
-  @Test
-  void testRepoFile() {
-  }
-
-  @Test
-  void testPulledDeps() {
-  }
-
-  @Test
-  void testPackageMetadata() {
-  }
-
-  @Test
-  void testPackageContent() {
-  }
-
-  @Test
-  void testPackageServices() {
-  }
+import static org.hamcrest.CoreMatchers.equalTo
+import org.junit.runner.RunWith
 
-  @Test
-  void testUsers() {
-  }
+import org.apache.bigtop.itest.junit.OrderedParameterized
+import org.apache.bigtop.itest.junit.OrderedParameterized.RunStage
 
-  @Test
-  void testGroups() {
-  }
+@RunWith(OrderedParameterized.class)
+class TestPackagesPseudoDistributedWithRM extends TestPackagesPseudoDistributed {
 
-  @Test
-  void testAlternatives() {
+  public TestPackagesPseudoDistributedWithRM(String pkgName, Node pkgGolden) {
+    super(pkgName, pkgGolden);
   }
 
   @RunStage(level=1)
   @Test
   void testPackageRemove() {
+    checkComplimentary32bitInstall();
+    checkDaemonStart();
+    sleep(3001); // TODO FIXME: CDH-2816 should address the timing of daemons start.
+    checkRemoval();
   }
 }

Modified: incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestServices.groovy
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestServices.groovy?rev=1204715&r1=1204714&r2=1204715&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestServices.groovy (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestServices.groovy Mon Nov 21 21:31:06 2011
@@ -28,12 +28,16 @@ import org.apache.bigtop.itest.posix.Ser
 import org.apache.bigtop.itest.junit.OrderedParameterized.RunStage
 import org.hamcrest.Matcher
 import static org.hamcrest.core.IsEqual.equalTo
+import org.apache.bigtop.itest.shell.Shell
 
 @RunWith(OrderedParameterized.class)
 class TestServices {
   Map.Entry svcDescr;
   List<Service> svcs;
   StateVerifier verifier;
+  List<String> killIDs;
+
+  static Shell shRoot = new Shell("/bin/bash", "root");
 
   @Rule
   public ErrorCollector errors = new ErrorCollector();
@@ -49,6 +53,7 @@ class TestServices {
     svcDescr = svc;
     svcs = svcDescr.value.services.collect { new Service(it); };
     verifier = svcDescr.value.verifier;
+    killIDs = svcDescr.value.killIDs;
   }
 
   static Map<String, Object[]> selectServices(String CDHrelease) {
@@ -60,6 +65,14 @@ class TestServices {
     return res;
   }
 
+  @AfterClass
+  static void tearDown() {
+    // TODO: this is pretty silly, but it'll do for now
+    CDHServices.serviceDaemonUserNames.each {
+      shRoot.exec("kill -9 `ps -U${it} -opid=`");
+    }
+  }
+
   @RunStage(level=-1)
   @Test
   void createState() {
@@ -79,16 +92,28 @@ class TestServices {
               verifier.verifyState(), equalTo(true));
 
     svcs.reverseEach {
-      checkThat("service ${it.getName()} failed to stop",
-                it.stop(), equalTo(0));
+      // TODO: we're only trying the best we can here
+      // there's a total eradication of  services happening at @BeforeClass
+      it.stop();
+      sleep(5001);
     }
     sleep(5001);
+
+    // TODO: this is pretty silly, but it'll do for now
+    killIDs.each {
+      shRoot.exec("kill -9 `ps -U${it} -opid=`");
+    }
   }
 
   @RunStage(level=1)
   @Test
   void verifyState() {
     svcs.each {
+      checkThat("failed to configure service ${it.getName()}",
+                verifier.config(), equalTo(true));
+    }
+
+    svcs.each {
       checkThat("service ${it.getName()} failed to start",
                 it.start(), equalTo(0));
     }
@@ -97,6 +122,16 @@ class TestServices {
               verifier.verifyState(), equalTo(true));
 
     svcs.reverseEach { it.stop(); }
+    sleep(5001);
+    // lets check if they are really stopped (if not -- we'll complain and kill them)
+    killIDs.each {
+      shRoot.exec("kill -0 `ps -U${it} -opid=`");
+      if (!shRoot.getRet()) {
+        shRoot.exec("kill -9 `ps -U${it} -opid=`");
+        checkThat("service running under the name of $it is supposed to be stopped, but it is not",
+                  true, equalTo(false));
+      }
+    }
   }
 
   public void checkThat(String msg, Object value, Matcher<Object> matcher) {



Mime
View raw message