Return-Path: X-Original-To: apmail-incubator-bigtop-commits-archive@minotaur.apache.org Delivered-To: apmail-incubator-bigtop-commits-archive@minotaur.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id CB8679972 for ; Sun, 24 Jun 2012 21:49:33 +0000 (UTC) Received: (qmail 87767 invoked by uid 500); 24 Jun 2012 21:49:33 -0000 Delivered-To: apmail-incubator-bigtop-commits-archive@incubator.apache.org Received: (qmail 87727 invoked by uid 500); 24 Jun 2012 21:49:33 -0000 Mailing-List: contact bigtop-commits-help@incubator.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: bigtop-dev@incubator.apache.org Delivered-To: mailing list bigtop-commits@incubator.apache.org Received: (qmail 87720 invoked by uid 99); 24 Jun 2012 21:49:33 -0000 Received: from athena.apache.org (HELO athena.apache.org) (140.211.11.136) by apache.org (qpsmtpd/0.29) with ESMTP; Sun, 24 Jun 2012 21:49:33 +0000 X-ASF-Spam-Status: No, hits=-2000.0 required=5.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Sun, 24 Jun 2012 21:49:32 +0000 Received: from eris.apache.org (localhost [127.0.0.1]) by eris.apache.org (Postfix) with ESMTP id 5A4CF23889E0; Sun, 24 Jun 2012 21:49:12 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r1353343 - /incubator/bigtop/trunk/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hdfstests/TestDFSAdmin.groovy Date: Sun, 24 Jun 2012 21:49:12 -0000 To: bigtop-commits@incubator.apache.org From: rvs@apache.org X-Mailer: svnmailer-1.0.8-patched Message-Id: <20120624214912.5A4CF23889E0@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Author: rvs Date: Sun Jun 24 21:49:11 2012 New Revision: 1353343 URL: http://svn.apache.org/viewvc?rev=1353343&view=rev Log: BIGTOP-620. Add test for dfsadmin commands (Sujay Rau via rvs) Added: incubator/bigtop/trunk/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hdfstests/TestDFSAdmin.groovy Added: incubator/bigtop/trunk/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hdfstests/TestDFSAdmin.groovy URL: http://svn.apache.org/viewvc/incubator/bigtop/trunk/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hdfstests/TestDFSAdmin.groovy?rev=1353343&view=auto ============================================================================== --- incubator/bigtop/trunk/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hdfstests/TestDFSAdmin.groovy (added) +++ incubator/bigtop/trunk/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hdfstests/TestDFSAdmin.groovy Sun Jun 24 21:49:11 2012 @@ -0,0 +1,182 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.bigtop.itest.hdfstests; + +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import org.apache.bigtop.itest.JarContent; +import org.apache.bigtop.itest.shell.Shell; + +public class TestDFSAdmin { + + // set debugging variable to true if you want error messages sent to stdout + private static Shell shHDFS = new Shell("/bin/bash", "hdfs"); + + @BeforeClass + public static void setUp() { + // unpack resource + JarContent.unpackJarContainer(TestDFSAdmin.class, "." , null); + System.out.println("Running DFSAdmin commands:"); + } + + @AfterClass + public static void tearDown() { + } + + @Test + public void testDFSbasic() { + // report + System.out.println("-report"); + shHDFS.exec("hdfs dfsadmin -report"); + assertTrue("-report failed", shHDFS.getRet() == 0); + + // help + System.out.println("-help"); + shHDFS.exec("hdfs dfsadmin -help"); + assertTrue("-help failed", shHDFS.getRet() == 0); + + // printTopology + System.out.println("-printTopology"); + shHDFS.exec("hdfs dfsadmin -printTopology"); + assertTrue("-printTopology failed", shHDFS.getRet() == 0); + + // metasave + System.out.println("-metasave"); + shHDFS.exec("hdfs dfsadmin -metasave metasave_test"); + assertTrue("-metasave failed", shHDFS.getRet() == 0); + } + + @Test + public void testDFSsafemode() { + // safemode + System.out.println("-safemode"); + shHDFS.exec("hdfs dfsadmin -safemode leave"); + assertTrue("-safemode leave failed", shHDFS.getRet() == 0); + shHDFS.exec("hdfs dfsadmin -safemode get"); + assertTrue("-safemode get failed", shHDFS.getOut().get(0) == "Safe mode is OFF"); + assertTrue("-safemode get failed", shHDFS.getRet() == 0); + shHDFS.exec("hdfs dfsadmin -safemode enter"); + assertTrue("-safemode enter failed", shHDFS.getRet() == 0); + shHDFS.exec("hdfs dfsadmin -safemode get"); + assertTrue("-safemode get failed", shHDFS.getOut().get(0) == "Safe mode is ON"); + assertTrue("-safemode get failed", shHDFS.getRet() == 0); + shHDFS.exec("hdfs dfsadmin -safemode leave"); + assertTrue("-safemode leave failed", shHDFS.getRet() == 0); + } + + @Test + public void testDFSnamespace() { + // saveNamespace + System.out.println("-saveNamespace"); + shHDFS.exec("hdfs dfsadmin -safemode enter"); + shHDFS.exec("hdfs dfsadmin -saveNamespace"); + assertTrue("-saveNamespace failed", shHDFS.getRet() == 0); + shHDFS.exec("hdfs dfsadmin -safemode leave"); + shHDFS.exec("hdfs dfsadmin -saveNamespace"); + assertTrue("-saveNamespace worked in non safemode", shHDFS.getRet() != 0); + } + + @Test + public void testDFSrefreshcommands() { + // refreshNodes + System.out.println("-refreshNodes"); + shHDFS.exec("hdfs dfsadmin -refreshNodes"); + assertTrue("-refreshNodes failed", shHDFS.getRet() == 0); + + /*// refreshServiceAcl - does not work - shHDFS.getRet() = 255 + System.out.println("-refreshServiceAcl"); + shHDFS.exec("hdfs dfsadmin -refreshServiceAcl"); + System.out.println(shHDFS.getRet()); + assertTrue("-refreshServiceAcl failed", shHDFS.getRet() == 0); */ + + // refreshUserToGroupsMappings + System.out.println("-refreshUserToGroupsMappings"); + shHDFS.exec("hdfs dfsadmin -refreshUserToGroupsMappings"); + assertTrue("-refreshUserToGroupsMappings failed", shHDFS.getRet() == 0); + + // refreshSuperUserGroupsConfiguration + System.out.println("-refreshSuperUserGroupsConfiguration"); + shHDFS.exec("hdfs dfsadmin -refreshSuperUserGroupsConfiguration"); + assertTrue("-refreshSuperUserGroupsConfiguration failed", shHDFS.getRet() == 0); + } + + @Test + public void testDFSupgrades() { + // upgradeProgress + System.out.println("-upgradeProgress"); + shHDFS.exec("hdfs dfsadmin -upgradeProgress details"); + assertTrue("-upgradeProgress details failed", shHDFS.getRet() == 0); + shHDFS.exec("hdfs dfsadmin -upgradeProgress status"); + assertTrue("-upgradeProgress status failed", shHDFS.getRet() == 0); + + // finalizeUpgrade + System.out.println("-finalizeUpgrade"); + shHDFS.exec("hdfs dfsadmin -finalizeUpgrade"); + assertTrue("-finalizeUpgrade failed", shHDFS.getRet() == 0); + } + + @Test + public void testDFSstorage() { + // restoreFailedStorage + System.out.println("-restoreFailedStorage"); + shHDFS.exec("hdfs dfsadmin -restoreFailedStorage false"); + assertTrue("-restoreFailedStorage false failed", shHDFS.getRet() == 0); + shHDFS.exec("hdfs dfsadmin -restoreFailedStorage check"); + assertTrue("-restoreFailedStorage check failed", shHDFS.getOut().get(0) == "restoreFailedStorage is set to false"); + assertTrue("-restoreFailedStorage check failed", shHDFS.getRet() == 0); + shHDFS.exec("hdfs dfsadmin -restoreFailedStorage true"); + assertTrue("-restoreFailedStorage true failed", shHDFS.getRet() == 0); + shHDFS.exec("hdfs dfsadmin -restoreFailedStorage check"); + assertTrue("-restoreFailedStorage check", shHDFS.getOut().get(0) == "restoreFailedStorage is set to true"); + assertTrue("-restoreFailedStorage check", shHDFS.getRet() == 0); + shHDFS.exec("hdfs dfsadmin -restoreFailedStorage false"); + assertTrue("-restoreFailedStorage false failed", shHDFS.getRet() == 0); + } + + @Test + public void testDFSquotas() { + // setQuota, clrQuota + System.out.println("-setQuota, -clrQuota"); + shHDFS.exec("date"); + String quota_test = "quota_test" + shHDFS.getOut().get(0).replaceAll("\\s","").replaceAll(":",""); + shHDFS.exec("hadoop fs -test -e $quota_test"); + if (shHDFS.getRet() == 0) { + shHDFS.exec("hadoop fs -rmr -skipTrash $quota_test"); + assertTrue("Deletion of previous testDistcpInputs from HDFS failed", + shHDFS.getRet() == 0); + } + shHDFS.exec("hadoop fs -mkdir $quota_test"); + shHDFS.exec("hdfs dfsadmin -setQuota 1000 $quota_test"); + assertTrue("-setQuota failed", shHDFS.getRet() == 0); + shHDFS.exec("hdfs dfsadmin -clrQuota $quota_test"); + assertTrue("-clrQuota failed", shHDFS.getRet() == 0); + + // setSpaceQuota, clrSpaceQuota + System.out.println("-setSpaceQuota, -clrSpaceQuota"); + shHDFS.exec("hdfs dfsadmin -setSpaceQuota 1000 $quota_test"); + assertTrue("-setSpaceQuota failed", shHDFS.getRet() == 0); + shHDFS.exec("hdfs dfsadmin -clrSpaceQuota $quota_test"); + assertTrue("-clrSpaceQuota failed", shHDFS.getRet() == 0); + shHDFS.exec("hadoop fs -rmr $quota_test"); + } + +}