hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jh...@apache.org
Subject [38/50] hadoop git commit: YARN-6365. Get static SLS html resources from classpath. Contributed by Yufei Gu.
Date Thu, 20 Apr 2017 21:53:56 GMT
YARN-6365. Get static SLS html resources from classpath. Contributed by Yufei Gu.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/7e075a50
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/7e075a50
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/7e075a50

Branch: refs/heads/YARN-5734
Commit: 7e075a50e37a24399bdae413349762f556293da3
Parents: 41ac190
Author: Andrew Wang <wang@apache.org>
Authored: Wed Apr 19 11:08:59 2017 -0700
Committer: Andrew Wang <wang@apache.org>
Committed: Wed Apr 19 11:08:59 2017 -0700

----------------------------------------------------------------------
 hadoop-tools/hadoop-sls/pom.xml                 | 16 ++++++++++
 hadoop-tools/hadoop-sls/src/main/bin/slsrun.sh  | 10 ++----
 .../apache/hadoop/yarn/sls/web/SLSWebApp.java   | 33 ++++++++++----------
 .../src/site/markdown/SchedulerLoadSimulator.md |  4 +--
 4 files changed, 35 insertions(+), 28 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e075a50/hadoop-tools/hadoop-sls/pom.xml
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-sls/pom.xml b/hadoop-tools/hadoop-sls/pom.xml
index eb40e49..0d4ef58 100644
--- a/hadoop-tools/hadoop-sls/pom.xml
+++ b/hadoop-tools/hadoop-sls/pom.xml
@@ -76,6 +76,22 @@
   </dependencies>
 
   <build>
+    <resources>
+      <resource>
+      <directory>src/main/</directory>
+      <includes>
+        <include>html/simulate.html.template</include>
+        <include>html/simulate.info.html.template</include>
+        <include>html/track.html.template</include>
+        <include>html/css/bootstrap-responsive.min.css</include>
+        <include>html/css/bootstrap.min.css</include>
+        <include>html/js/thirdparty/bootstrap.min.js</include>
+        <include>html/js/thirdparty/d3.v3.js</include>
+        <include>html/js/thirdparty/d3-LICENSE</include>
+        <include>html/js/thirdparty/jquery.js</include>
+      </includes>
+      </resource>
+    </resources>
     <plugins>
       <plugin>
         <groupId>org.apache.maven.plugins</groupId>

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e075a50/hadoop-tools/hadoop-sls/src/main/bin/slsrun.sh
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-sls/src/main/bin/slsrun.sh b/hadoop-tools/hadoop-sls/src/main/bin/slsrun.sh
index fb53045..5f8d9fc 100644
--- a/hadoop-tools/hadoop-sls/src/main/bin/slsrun.sh
+++ b/hadoop-tools/hadoop-sls/src/main/bin/slsrun.sh
@@ -71,8 +71,6 @@ function parse_args()
 function calculate_classpath
 {
   hadoop_add_to_classpath_tools hadoop-sls
-  hadoop_debug "Injecting ${HADOOP_TOOLS_HOME}/${HADOOP_TOOLS_DIR}/sls/html into CLASSPATH"
-  hadoop_add_classpath "${HADOOP_TOOLS_HOME}/${HADOOP_TOOLS_DIR}/sls/html"
 }
 
 function run_simulation() {
@@ -103,16 +101,12 @@ function run_simulation() {
   hadoop_java_exec sls org.apache.hadoop.yarn.sls.SLSRunner ${args}
 }
 
-this="${BASH_SOURCE-$0}"
-bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P)
-
-# copy 'html' directory to current directory to make sure web sever can access
-cp -r "${bin}/../html" "$(pwd)"
-
 # let's locate libexec...
 if [[ -n "${HADOOP_HOME}" ]]; then
   HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec"
 else
+  this="${BASH_SOURCE-$0}"
+  bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P)
   HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../../../../../libexec"
 fi
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e075a50/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/web/SLSWebApp.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/web/SLSWebApp.java
b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/web/SLSWebApp.java
index 2d2ffc5..29bbe1a 100644
--- a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/web/SLSWebApp.java
+++ b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/web/SLSWebApp.java
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.yarn.sls.web;
 
-import java.io.File;
 import java.io.IOException;
 import java.io.ObjectInputStream;
 import java.text.MessageFormat;
@@ -31,7 +30,7 @@ import javax.servlet.http.HttpServlet;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
-import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
 import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.SchedulerEventType;
@@ -39,17 +38,17 @@ import org.apache.hadoop.yarn.sls.SLSRunner;
 import org.apache.hadoop.yarn.sls.scheduler.FairSchedulerMetrics;
 import org.apache.hadoop.yarn.sls.scheduler.SchedulerMetrics;
 import org.apache.hadoop.yarn.sls.scheduler.SchedulerWrapper;
+
 import org.eclipse.jetty.http.MimeTypes;
 import org.eclipse.jetty.server.Handler;
 import org.eclipse.jetty.server.Request;
 import org.eclipse.jetty.server.Server;
-
+import org.eclipse.jetty.server.handler.AbstractHandler;
+import org.eclipse.jetty.server.handler.ResourceHandler;
 import com.codahale.metrics.Counter;
 import com.codahale.metrics.Gauge;
 import com.codahale.metrics.Histogram;
 import com.codahale.metrics.MetricRegistry;
-import org.eclipse.jetty.server.handler.AbstractHandler;
-import org.eclipse.jetty.server.handler.ResourceHandler;
 
 @Private
 @Unstable
@@ -86,12 +85,12 @@ public class SLSWebApp extends HttpServlet {
     // load templates
     ClassLoader cl = Thread.currentThread().getContextClassLoader();
     try {
-      simulateInfoTemplate = FileUtils.readFileToString(new File(
-              cl.getResource("simulate.info.html.template").getFile()));
-      simulateTemplate = FileUtils.readFileToString(new File(
-              cl.getResource("simulate.html.template").getFile()));
-      trackTemplate = FileUtils.readFileToString(new File(
-              cl.getResource("track.html.template").getFile()));
+      simulateInfoTemplate = IOUtils.toString(
+          cl.getResourceAsStream("html/simulate.info.html.template"));
+      simulateTemplate = IOUtils.toString(
+          cl.getResourceAsStream("html/simulate.html.template"));
+      trackTemplate = IOUtils.toString(
+          cl.getResourceAsStream("html/track.html.template"));
     } catch (IOException e) {
       e.printStackTrace();
     }
@@ -107,20 +106,20 @@ public class SLSWebApp extends HttpServlet {
 
   public SLSWebApp(SchedulerWrapper wrapper, int metricsAddressPort) {
     this.wrapper = wrapper;
-    handleOperTimecostHistogramMap =
-            new HashMap<SchedulerEventType, Histogram>();
-    queueAllocatedMemoryCounterMap = new HashMap<String, Counter>();
-    queueAllocatedVCoresCounterMap = new HashMap<String, Counter>();
+    handleOperTimecostHistogramMap = new HashMap<>();
+    queueAllocatedMemoryCounterMap = new HashMap<>();
+    queueAllocatedVCoresCounterMap = new HashMap<>();
     schedulerMetrics = wrapper.getSchedulerMetrics();
     metrics = schedulerMetrics.getMetrics();
     port = metricsAddressPort;
   }
 
   public void start() throws Exception {
-    // static files
     final ResourceHandler staticHandler = new ResourceHandler();
     staticHandler.setMimeTypes(new MimeTypes());
-    staticHandler.setResourceBase("html");
+    String webRootDir = getClass().getClassLoader().getResource("html").
+        toExternalForm();
+    staticHandler.setResourceBase(webRootDir);
 
     Handler handler = new AbstractHandler() {
       @Override

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e075a50/hadoop-tools/hadoop-sls/src/site/markdown/SchedulerLoadSimulator.md
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-sls/src/site/markdown/SchedulerLoadSimulator.md b/hadoop-tools/hadoop-sls/src/site/markdown/SchedulerLoadSimulator.md
index 2cffc86..dfd872c 100644
--- a/hadoop-tools/hadoop-sls/src/site/markdown/SchedulerLoadSimulator.md
+++ b/hadoop-tools/hadoop-sls/src/site/markdown/SchedulerLoadSimulator.md
@@ -97,7 +97,7 @@ This section will show how to use the simulator. Here let `$HADOOP_ROOT`
represe
 
 *   `bin`: contains running scripts for the simulator.
 
-*   `html`: contains several html/css/js files we needed for real-time tracking.
+*   `html`: Users can also reproduce those real-time tracking charts in offline mode. Just
upload the `realtimetrack.json` to `$HADOOP_ROOT/share/hadoop/tools/sls/html/showSimulationTrace.html`.
For browser security problem, need to put files `realtimetrack.json` and `showSimulationTrace.html`
in the same directory.
 
 *   `sample-conf`: specifies the simulator configurations.
 
@@ -279,8 +279,6 @@ After the simulator finishes, all logs are saved in the output directory
specifi
 
 *   Folder `metrics`: logs generated by the Metrics.
 
-Users can also reproduce those real-time tracking charts in offline mode. Just upload the
`realtimetrack.json` to `$HADOOP_ROOT/share/hadoop/tools/sls/html/showSimulationTrace.html`.
For browser security problem, need to put files `realtimetrack.json` and `showSimulationTrace.html`
in the same directory.
-
 Appendix
 --------
 


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org


Mime
View raw message