Return-Path: X-Original-To: apmail-hadoop-hdfs-commits-archive@minotaur.apache.org Delivered-To: apmail-hadoop-hdfs-commits-archive@minotaur.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 49884774E for ; Tue, 22 Nov 2011 02:16:12 +0000 (UTC) Received: (qmail 31524 invoked by uid 500); 22 Nov 2011 02:16:12 -0000 Delivered-To: apmail-hadoop-hdfs-commits-archive@hadoop.apache.org Received: (qmail 31438 invoked by uid 500); 22 Nov 2011 02:16:12 -0000 Mailing-List: contact hdfs-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: hdfs-dev@hadoop.apache.org Delivered-To: mailing list hdfs-commits@hadoop.apache.org Received: (qmail 31429 invoked by uid 99); 22 Nov 2011 02:16:12 -0000 Received: from athena.apache.org (HELO athena.apache.org) (140.211.11.136) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 22 Nov 2011 02:16:12 +0000 X-ASF-Spam-Status: No, hits=-2000.0 required=5.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 22 Nov 2011 02:16:10 +0000 Received: from eris.apache.org (localhost [127.0.0.1]) by eris.apache.org (Postfix) with ESMTP id 729562388B43; Tue, 22 Nov 2011 02:15:50 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r1204781 - in /hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs: CHANGES.txt src/test/aop/org/apache/hadoop/hdfs/server/datanode/DataXceiverAspects.aj src/test/aop/org/apache/hadoop/hdfs/server/datanode/TestFiDataXceiverServer.java Date: Tue, 22 Nov 2011 02:15:50 -0000 To: hdfs-commits@hadoop.apache.org From: cos@apache.org X-Mailer: svnmailer-1.0.8-patched Message-Id: <20111122021550.729562388B43@eris.apache.org> Author: cos Date: Tue Nov 22 02:15:49 2011 New Revision: 1204781 URL: http://svn.apache.org/viewvc?rev=1204781&view=rev Log: HDFS-2573. TestFiDataXceiverServer is failing, not testing OOME. Contributed by Konstantin Boudnik. Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/aop/org/apache/hadoop/hdfs/server/datanode/DataXceiverAspects.aj hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/aop/org/apache/hadoop/hdfs/server/datanode/TestFiDataXceiverServer.java Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1204781&r1=1204780&r2=1204781&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt Tue Nov 22 02:15:49 2011 @@ -1950,6 +1950,8 @@ Release 0.22.0 - Unreleased HDFS-2514. Link resolution bug for intermediate symlinks with relative targets. (eli) + HDFS-2573. TestFiDataXceiverServer is failing, not testing OOME (cos) + Release 0.21.1 - Unreleased HDFS-1466. TestFcHdfsSymlink relies on /tmp/test not existing. (eli) Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/aop/org/apache/hadoop/hdfs/server/datanode/DataXceiverAspects.aj URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/aop/org/apache/hadoop/hdfs/server/datanode/DataXceiverAspects.aj?rev=1204781&r1=1204780&r2=1204781&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/aop/org/apache/hadoop/hdfs/server/datanode/DataXceiverAspects.aj (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/aop/org/apache/hadoop/hdfs/server/datanode/DataXceiverAspects.aj Tue Nov 22 02:15:49 2011 @@ -1,41 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hdfs.server.datanode; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - -/** - * This aspect takes care about faults injected into datanode.DataXceiver - * class - */ -privileged public aspect DataXceiverAspects { - public static final Log LOG = LogFactory.getLog(DataXceiverAspects.class); - - pointcut runXceiverThread(DataXceiver xceiver) : - execution (* run(..)) && target(xceiver); - - void around (DataXceiver xceiver) : runXceiverThread(xceiver) { - if ("true".equals(System.getProperty("fi.enabledOOM"))) { - LOG.info("fi.enabledOOM is enabled"); - throw new OutOfMemoryError("Pretend there's no more memory"); - } else { - proceed(xceiver); - } - } -} \ No newline at end of file Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/aop/org/apache/hadoop/hdfs/server/datanode/TestFiDataXceiverServer.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/aop/org/apache/hadoop/hdfs/server/datanode/TestFiDataXceiverServer.java?rev=1204781&r1=1204780&r2=1204781&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/aop/org/apache/hadoop/hdfs/server/datanode/TestFiDataXceiverServer.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/aop/org/apache/hadoop/hdfs/server/datanode/TestFiDataXceiverServer.java Tue Nov 22 02:15:49 2011 @@ -1,97 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hdfs.server.datanode; - -import static org.junit.Assert.assertTrue; - -import java.io.IOException; -import java.io.InputStream; -import java.net.InetAddress; -import java.net.InetSocketAddress; -import java.net.ServerSocket; -import java.net.Socket; -import java.net.SocketAddress; -import java.util.concurrent.CountDownLatch; - -import org.apache.hadoop.conf.Configuration; -import org.junit.Test; -import org.mockito.Mockito; - -/** - * This is a test for DataXceiverServer when DataXceiver thread spawning is - * failed due to OutOfMemoryError. Expected behavior is that DataXceiverServer - * should not be exited. It should retry again after 30 seconds - */ -public class TestFiDataXceiverServer { - - @Test(timeout = 30000) - public void testOutOfMemoryErrorInDataXceiverServerRun() throws Exception { - final CountDownLatch latch = new CountDownLatch(1); - ServerSocket sock = new ServerSocket() { - @Override - public Socket accept() throws IOException { - return new Socket() { - @Override - public InetAddress getInetAddress() { - return super.getLocalAddress(); - } - - @Override - public SocketAddress getRemoteSocketAddress() { - return new InetSocketAddress(8080); - } - - @Override - public SocketAddress getLocalSocketAddress() { - return new InetSocketAddress(0); - } - - @Override - public synchronized void close() throws IOException { - latch.countDown(); - super.close(); - } - - @Override - public InputStream getInputStream() throws IOException { - return null; - } - }; - } - }; - Thread thread = null; - System.setProperty("fi.enabledOOM", "true"); - DataNode dn = Mockito.mock(DataNode.class); - try { - Configuration conf = new Configuration(); - Mockito.doReturn(conf).when(dn).getConf(); - dn.shouldRun = true; - DataXceiverServer server = new DataXceiverServer(sock, conf, dn); - thread = new Thread(server); - thread.start(); - latch.await(); - assertTrue("Not running the thread", thread.isAlive()); - } finally { - System.setProperty("fi.enabledOOM", "false"); - dn.shouldRun = false; - if (null != thread) - thread.interrupt(); - sock.close(); - } - } -} \ No newline at end of file