whirr-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From tomwh...@apache.org
Subject svn commit: r1004698 - in /incubator/whirr/trunk: CHANGES.txt services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopProxy.java services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopService.java
Date Tue, 05 Oct 2010 16:22:48 GMT
Author: tomwhite
Date: Tue Oct  5 16:22:47 2010
New Revision: 1004698

URL: http://svn.apache.org/viewvc?rev=1004698&view=rev
Log:
WHIRR-110. Create client-side Hadoop configuration file during cluster launch.

Modified:
    incubator/whirr/trunk/CHANGES.txt
    incubator/whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopProxy.java
    incubator/whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopService.java

Modified: incubator/whirr/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/incubator/whirr/trunk/CHANGES.txt?rev=1004698&r1=1004697&r2=1004698&view=diff
==============================================================================
--- incubator/whirr/trunk/CHANGES.txt (original)
+++ incubator/whirr/trunk/CHANGES.txt Tue Oct  5 16:22:47 2010
@@ -23,6 +23,9 @@ Trunk (unreleased changes)
     WHIRR-109. Unit tests fail if there is no private key found at
     ~/.ssh/id_rsa. (Adrian Cole via tomwhite)
 
+    WHIRR-110. Create client-side Hadoop configuration file during cluster
+    launch. (tomwhite)
+
   BUG FIXES
 
     WHIRR-93. Fail on checkstyle violation. (tomwhite)

Modified: incubator/whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopProxy.java
URL: http://svn.apache.org/viewvc/incubator/whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopProxy.java?rev=1004698&r1=1004697&r2=1004698&view=diff
==============================================================================
--- incubator/whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopProxy.java
(original)
+++ incubator/whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopProxy.java
Tue Oct  5 16:22:47 2010
@@ -41,10 +41,8 @@ public class HadoopProxy {
     this.clusterSpec = clusterSpec;
     this.cluster = cluster;
   }
-
-  public void start() throws IOException {
-    // jsch doesn't support SOCKS-based dynamic port forwarding, so we need to shell out...
-    // TODO: Use static port forwarding instead?
+  
+  public String[] getProxyCommand() throws IOException {
     checkState(clusterSpec.getPrivateKey() != null, "privateKey is needed");
     File identity;
     if (clusterSpec.getPrivateKey().getRawContent() instanceof File) {
@@ -56,14 +54,21 @@ public class HadoopProxy {
     }
     String user = Iterables.get(cluster.getInstances(), 0).getLoginCredentials().identity;
     String server = cluster.getNamenodePublicAddress().getHostName();
-    String[] command = new String[] { "ssh",
-      "-i", identity.getAbsolutePath(),
-      "-o", "ConnectTimeout=10",
-      "-o", "ServerAliveInterval=60",
-      "-o", "StrictHostKeyChecking=no",
-      "-N",
-      "-D 6666",
-      String.format("%s@%s", user, server)};
+    return new String[] { "ssh",
+        "-i", identity.getAbsolutePath(),
+        "-o", "ConnectTimeout=10",
+        "-o", "ServerAliveInterval=60",
+        "-o", "StrictHostKeyChecking=no",
+        "-N",
+        "-D 6666",
+        String.format("%s@%s", user, server)};
+  }
+
+  public void start() throws IOException {
+    // jsch doesn't support SOCKS-based dynamic port forwarding
+    // so we need to shell out
+
+    String[] command = getProxyCommand();
     ProcessBuilder processBuilder = new ProcessBuilder(command);
     process = processBuilder.start();
     

Modified: incubator/whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopService.java
URL: http://svn.apache.org/viewvc/incubator/whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopService.java?rev=1004698&r1=1004697&r2=1004698&view=diff
==============================================================================
--- incubator/whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopService.java
(original)
+++ incubator/whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopService.java
Tue Oct  5 16:22:47 2010
@@ -26,6 +26,7 @@ import static org.jclouds.io.Payloads.ne
 
 import com.google.common.base.Charsets;
 import com.google.common.base.Function;
+import com.google.common.base.Joiner;
 import com.google.common.collect.Collections2;
 import com.google.common.collect.Iterables;
 import com.google.common.collect.Sets;
@@ -180,7 +181,10 @@ public class HadoopService extends Servi
     LOG.info("Web UI available at http://{}",
         namenodePublicAddress.getHostName());
     Properties config = createClientSideProperties(namenodePublicAddress, jobtrackerPublicAddress);
-    return new HadoopCluster(instances, config);
+    createClientSideHadoopSiteFile(clusterSpec, config);
+    HadoopCluster cluster = new HadoopCluster(instances, config);
+    createProxyScript(clusterSpec, cluster);
+    return cluster; 
   }
   
   private Set<Instance> getInstances(final Set<String> roles, Set<? extends
NodeMetadata> nodes) {
@@ -209,9 +213,24 @@ public class HadoopService extends Servi
       return config;
   }
 
-  private void createClientSideHadoopSiteFile(InetAddress namenode, InetAddress jobtracker)
throws IOException {
-    File file = new File("/tmp/hadoop-site.xml");
-    Files.write(generateHadoopConfigurationFile(createClientSideProperties(namenode, jobtracker)),
file, Charsets.UTF_8);
+  private void createClientSideHadoopSiteFile(ClusterSpec clusterSpec, Properties config)
{
+    File configDir = getConfigDir(clusterSpec);
+    File hadoopSiteFile = new File(configDir, "hadoop-site.xml");
+    try {
+      Files.write(generateHadoopConfigurationFile(config), hadoopSiteFile,
+          Charsets.UTF_8);
+      LOG.info("Wrote Hadoop site file {}", hadoopSiteFile);
+    } catch (IOException e) {
+      LOG.error("Problem writing Hadoop site file {}", hadoopSiteFile, e);
+    }
+  }
+  
+  private File getConfigDir(ClusterSpec clusterSpec) {
+    File configDir = new File(new File(System.getProperty("user.home")),
+        ".whirr");
+    configDir = new File(configDir, clusterSpec.getClusterName());
+    configDir.mkdirs();
+    return configDir;
   }
   
   private CharSequence generateHadoopConfigurationFile(Properties config) {
@@ -220,13 +239,29 @@ public class HadoopService extends Servi
     sb.append("<?xml-stylesheet type=\"text/xsl\" href=\"configuration.xsl\"?>\n");
     sb.append("<configuration>\n");
     for (Entry<Object, Object> entry : config.entrySet()) {
-      sb.append("<property>\n");
-      sb.append("<name>").append(entry.getKey()).append("</name>\n");
-      sb.append("<value>").append(entry.getValue()).append("</value>\n");
-      sb.append("</property>\n");
+      sb.append("  <property>\n");
+      sb.append("    <name>").append(entry.getKey()).append("</name>\n");
+      sb.append("    <value>").append(entry.getValue()).append("</value>\n");
+      sb.append("  </property>\n");
     }
     sb.append("</configuration>\n");
     return sb;
   }
   
+  private void createProxyScript(ClusterSpec clusterSpec, HadoopCluster cluster) {
+    File configDir = getConfigDir(clusterSpec);
+    File hadoopProxyFile = new File(configDir, "hadoop-proxy.sh");
+    try {
+      HadoopProxy proxy = new HadoopProxy(clusterSpec, cluster);
+      String script = String.format("echo 'Running proxy to Hadoop cluster at %s. " +
+          "Use Ctrl-c to quit.'\n",
+          cluster.getNamenodePublicAddress().getHostName())
+        + Joiner.on(" ").join(proxy.getProxyCommand());
+      Files.write(script, hadoopProxyFile, Charsets.UTF_8);
+      LOG.info("Wrote Hadoop proxy script {}", hadoopProxyFile);
+    } catch (IOException e) {
+      LOG.error("Problem writing Hadoop proxy script {}", hadoopProxyFile, e);
+    }
+  }
+  
 }



Mime
View raw message