Return-Path: Delivered-To: apmail-incubator-whirr-commits-archive@minotaur.apache.org Received: (qmail 25641 invoked from network); 5 Oct 2010 16:23:09 -0000 Received: from unknown (HELO mail.apache.org) (140.211.11.3) by 140.211.11.9 with SMTP; 5 Oct 2010 16:23:09 -0000 Received: (qmail 34282 invoked by uid 500); 5 Oct 2010 16:23:09 -0000 Delivered-To: apmail-incubator-whirr-commits-archive@incubator.apache.org Received: (qmail 34261 invoked by uid 500); 5 Oct 2010 16:23:09 -0000 Mailing-List: contact whirr-commits-help@incubator.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: whirr-dev@incubator.apache.org Delivered-To: mailing list whirr-commits@incubator.apache.org Received: (qmail 34254 invoked by uid 99); 5 Oct 2010 16:23:09 -0000 Received: from athena.apache.org (HELO athena.apache.org) (140.211.11.136) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 05 Oct 2010 16:23:09 +0000 X-ASF-Spam-Status: No, hits=-2000.0 required=10.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 05 Oct 2010 16:23:08 +0000 Received: by eris.apache.org (Postfix, from userid 65534) id 50458238890D; Tue, 5 Oct 2010 16:22:48 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r1004698 - in /incubator/whirr/trunk: CHANGES.txt services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopProxy.java services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopService.java Date: Tue, 05 Oct 2010 16:22:48 -0000 To: whirr-commits@incubator.apache.org From: tomwhite@apache.org X-Mailer: svnmailer-1.0.8 Message-Id: <20101005162248.50458238890D@eris.apache.org> Author: tomwhite Date: Tue Oct 5 16:22:47 2010 New Revision: 1004698 URL: http://svn.apache.org/viewvc?rev=1004698&view=rev Log: WHIRR-110. Create client-side Hadoop configuration file during cluster launch. Modified: incubator/whirr/trunk/CHANGES.txt incubator/whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopProxy.java incubator/whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopService.java Modified: incubator/whirr/trunk/CHANGES.txt URL: http://svn.apache.org/viewvc/incubator/whirr/trunk/CHANGES.txt?rev=1004698&r1=1004697&r2=1004698&view=diff ============================================================================== --- incubator/whirr/trunk/CHANGES.txt (original) +++ incubator/whirr/trunk/CHANGES.txt Tue Oct 5 16:22:47 2010 @@ -23,6 +23,9 @@ Trunk (unreleased changes) WHIRR-109. Unit tests fail if there is no private key found at ~/.ssh/id_rsa. (Adrian Cole via tomwhite) + WHIRR-110. Create client-side Hadoop configuration file during cluster + launch. (tomwhite) + BUG FIXES WHIRR-93. Fail on checkstyle violation. (tomwhite) Modified: incubator/whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopProxy.java URL: http://svn.apache.org/viewvc/incubator/whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopProxy.java?rev=1004698&r1=1004697&r2=1004698&view=diff ============================================================================== --- incubator/whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopProxy.java (original) +++ incubator/whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopProxy.java Tue Oct 5 16:22:47 2010 @@ -41,10 +41,8 @@ public class HadoopProxy { this.clusterSpec = clusterSpec; this.cluster = cluster; } - - public void start() throws IOException { - // jsch doesn't support SOCKS-based dynamic port forwarding, so we need to shell out... - // TODO: Use static port forwarding instead? + + public String[] getProxyCommand() throws IOException { checkState(clusterSpec.getPrivateKey() != null, "privateKey is needed"); File identity; if (clusterSpec.getPrivateKey().getRawContent() instanceof File) { @@ -56,14 +54,21 @@ public class HadoopProxy { } String user = Iterables.get(cluster.getInstances(), 0).getLoginCredentials().identity; String server = cluster.getNamenodePublicAddress().getHostName(); - String[] command = new String[] { "ssh", - "-i", identity.getAbsolutePath(), - "-o", "ConnectTimeout=10", - "-o", "ServerAliveInterval=60", - "-o", "StrictHostKeyChecking=no", - "-N", - "-D 6666", - String.format("%s@%s", user, server)}; + return new String[] { "ssh", + "-i", identity.getAbsolutePath(), + "-o", "ConnectTimeout=10", + "-o", "ServerAliveInterval=60", + "-o", "StrictHostKeyChecking=no", + "-N", + "-D 6666", + String.format("%s@%s", user, server)}; + } + + public void start() throws IOException { + // jsch doesn't support SOCKS-based dynamic port forwarding + // so we need to shell out + + String[] command = getProxyCommand(); ProcessBuilder processBuilder = new ProcessBuilder(command); process = processBuilder.start(); Modified: incubator/whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopService.java URL: http://svn.apache.org/viewvc/incubator/whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopService.java?rev=1004698&r1=1004697&r2=1004698&view=diff ============================================================================== --- incubator/whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopService.java (original) +++ incubator/whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopService.java Tue Oct 5 16:22:47 2010 @@ -26,6 +26,7 @@ import static org.jclouds.io.Payloads.ne import com.google.common.base.Charsets; import com.google.common.base.Function; +import com.google.common.base.Joiner; import com.google.common.collect.Collections2; import com.google.common.collect.Iterables; import com.google.common.collect.Sets; @@ -180,7 +181,10 @@ public class HadoopService extends Servi LOG.info("Web UI available at http://{}", namenodePublicAddress.getHostName()); Properties config = createClientSideProperties(namenodePublicAddress, jobtrackerPublicAddress); - return new HadoopCluster(instances, config); + createClientSideHadoopSiteFile(clusterSpec, config); + HadoopCluster cluster = new HadoopCluster(instances, config); + createProxyScript(clusterSpec, cluster); + return cluster; } private Set getInstances(final Set roles, Set nodes) { @@ -209,9 +213,24 @@ public class HadoopService extends Servi return config; } - private void createClientSideHadoopSiteFile(InetAddress namenode, InetAddress jobtracker) throws IOException { - File file = new File("/tmp/hadoop-site.xml"); - Files.write(generateHadoopConfigurationFile(createClientSideProperties(namenode, jobtracker)), file, Charsets.UTF_8); + private void createClientSideHadoopSiteFile(ClusterSpec clusterSpec, Properties config) { + File configDir = getConfigDir(clusterSpec); + File hadoopSiteFile = new File(configDir, "hadoop-site.xml"); + try { + Files.write(generateHadoopConfigurationFile(config), hadoopSiteFile, + Charsets.UTF_8); + LOG.info("Wrote Hadoop site file {}", hadoopSiteFile); + } catch (IOException e) { + LOG.error("Problem writing Hadoop site file {}", hadoopSiteFile, e); + } + } + + private File getConfigDir(ClusterSpec clusterSpec) { + File configDir = new File(new File(System.getProperty("user.home")), + ".whirr"); + configDir = new File(configDir, clusterSpec.getClusterName()); + configDir.mkdirs(); + return configDir; } private CharSequence generateHadoopConfigurationFile(Properties config) { @@ -220,13 +239,29 @@ public class HadoopService extends Servi sb.append("\n"); sb.append("\n"); for (Entry entry : config.entrySet()) { - sb.append("\n"); - sb.append("").append(entry.getKey()).append("\n"); - sb.append("").append(entry.getValue()).append("\n"); - sb.append("\n"); + sb.append(" \n"); + sb.append(" ").append(entry.getKey()).append("\n"); + sb.append(" ").append(entry.getValue()).append("\n"); + sb.append(" \n"); } sb.append("\n"); return sb; } + private void createProxyScript(ClusterSpec clusterSpec, HadoopCluster cluster) { + File configDir = getConfigDir(clusterSpec); + File hadoopProxyFile = new File(configDir, "hadoop-proxy.sh"); + try { + HadoopProxy proxy = new HadoopProxy(clusterSpec, cluster); + String script = String.format("echo 'Running proxy to Hadoop cluster at %s. " + + "Use Ctrl-c to quit.'\n", + cluster.getNamenodePublicAddress().getHostName()) + + Joiner.on(" ").join(proxy.getProxyCommand()); + Files.write(script, hadoopProxyFile, Charsets.UTF_8); + LOG.info("Wrote Hadoop proxy script {}", hadoopProxyFile); + } catch (IOException e) { + LOG.error("Problem writing Hadoop proxy script {}", hadoopProxyFile, e); + } + } + }