hadoop-mapreduce-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From vino...@apache.org
Subject svn commit: r1213975 [4/6] - in /hadoop/common/trunk/hadoop-mapreduce-project: ./ hadoop-mapreduce-client/ hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/client/ hadoop-mapreduce-client/hadoop-mapre...
Date Tue, 13 Dec 2011 23:05:59 GMT
Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/WebServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/WebServer.java?rev=1213975&r1=1213974&r2=1213975&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/WebServer.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/WebServer.java Tue Dec 13 23:05:56 2011
@@ -30,9 +30,10 @@ import org.apache.hadoop.yarn.server.nod
 import org.apache.hadoop.yarn.server.nodemanager.ResourceView;
 import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
 import org.apache.hadoop.yarn.service.AbstractService;
-import org.apache.hadoop.yarn.webapp.YarnWebParams;
+import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
 import org.apache.hadoop.yarn.webapp.WebApp;
 import org.apache.hadoop.yarn.webapp.WebApps;
+import org.apache.hadoop.yarn.webapp.YarnWebParams;
 
 public class WebServer extends AbstractService {
 
@@ -61,8 +62,9 @@ public class WebServer extends AbstractS
         YarnConfiguration.DEFAULT_NM_WEBAPP_ADDRESS);
     LOG.info("Instantiating NMWebApp at " + bindAddress);
     try {
-      this.webApp = WebApps.$for("node", Context.class, this.nmContext).at(
-          bindAddress).with(getConfig()).start(this.nmWebApp);
+      this.webApp =
+          WebApps.$for("node", Context.class, this.nmContext, "ws")
+              .at(bindAddress).with(getConfig()).start(this.nmWebApp);
     } catch (Exception e) {
       String msg = "NMWebapps failed to start.";
       LOG.error(msg, e);
@@ -95,6 +97,9 @@ public class WebServer extends AbstractS
 
     @Override
     public void setup() {
+      bind(NMWebServices.class);
+      bind(GenericExceptionHandler.class);
+      bind(JAXBContextResolver.class);
       bind(ResourceView.class).toInstance(this.resourceView);
       bind(ApplicationACLsManager.class).toInstance(this.aclsManager);
       bind(LocalDirsHandlerService.class).toInstance(dirsHandler);

Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/dao/AppInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/dao/AppInfo.java?rev=1213975&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/dao/AppInfo.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/dao/AppInfo.java Tue Dec 13 23:05:56 2011
@@ -0,0 +1,73 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.nodemanager.webapp.dao;
+
+import java.util.ArrayList;
+import java.util.Map;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+
+import org.apache.hadoop.yarn.api.records.ContainerId;
+import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.Application;
+import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
+import org.apache.hadoop.yarn.util.ConverterUtils;
+
+@XmlRootElement(name = "app")
+@XmlAccessorType(XmlAccessType.FIELD)
+public class AppInfo {
+
+  protected String id;
+  protected String state;
+  protected String user;
+  protected ArrayList<String> containerids;
+
+  public AppInfo() {
+  } // JAXB needs this
+
+  public AppInfo(final Application app) {
+    this.id = ConverterUtils.toString(app.getAppId());
+    this.state = app.getApplicationState().toString();
+    this.user = app.getUser();
+
+    this.containerids = new ArrayList<String>();
+    Map<ContainerId, Container> appContainers = app.getContainers();
+    for (ContainerId containerId : appContainers.keySet()) {
+      String containerIdStr = ConverterUtils.toString(containerId);
+      containerids.add(containerIdStr);
+    }
+  }
+
+  public String getId() {
+    return this.id;
+  }
+
+  public String getUser() {
+    return this.user;
+  }
+
+  public String getState() {
+    return this.state;
+  }
+
+  public ArrayList<String> getContainers() {
+    return this.containerids;
+  }
+
+}

Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/dao/AppsInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/dao/AppsInfo.java?rev=1213975&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/dao/AppsInfo.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/dao/AppsInfo.java Tue Dec 13 23:05:56 2011
@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.nodemanager.webapp.dao;
+
+import java.util.ArrayList;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+
+@XmlRootElement(name = "apps")
+@XmlAccessorType(XmlAccessType.FIELD)
+public class AppsInfo {
+
+  protected ArrayList<AppInfo> app = new ArrayList<AppInfo>();
+
+  public AppsInfo() {
+  } // JAXB needs this
+
+  public void add(AppInfo appInfo) {
+    app.add(appInfo);
+  }
+
+  public ArrayList<AppInfo> getApps() {
+    return app;
+  }
+
+}

Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/dao/ContainerInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/dao/ContainerInfo.java?rev=1213975&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/dao/ContainerInfo.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/dao/ContainerInfo.java Tue Dec 13 23:05:56 2011
@@ -0,0 +1,122 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.nodemanager.webapp.dao;
+
+import static org.apache.hadoop.yarn.util.StringHelper.join;
+import static org.apache.hadoop.yarn.util.StringHelper.ujoin;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+import javax.xml.bind.annotation.XmlTransient;
+
+import org.apache.hadoop.yarn.api.records.ContainerStatus;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.hadoop.yarn.server.nodemanager.Context;
+import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
+
+@XmlRootElement(name = "container")
+@XmlAccessorType(XmlAccessType.FIELD)
+public class ContainerInfo {
+
+  protected String id;
+  protected String state;
+  protected int exitCode;
+  protected String diagnostics;
+  protected String user;
+  protected long totalMemoryNeededMB;
+  protected String containerLogsLink;
+  protected String nodeId;
+  @XmlTransient
+  protected String containerLogsShortLink;
+  @XmlTransient
+  protected String exitStatus;
+
+  public ContainerInfo() {
+  } // JAXB needs this
+
+  public ContainerInfo(final Context nmContext, final Container container) {
+    this(nmContext, container, "", "");
+  }
+
+  public ContainerInfo(final Context nmContext, final Container container,
+      final String requestUri, final String pathPrefix) {
+
+    this.id = container.getContainerID().toString();
+    this.nodeId = nmContext.getNodeId().toString();
+    ContainerStatus containerData = container.cloneAndGetContainerStatus();
+    this.exitCode = containerData.getExitStatus();
+    this.exitStatus = (this.exitCode == YarnConfiguration.INVALID_CONTAINER_EXIT_STATUS) ? "N/A"
+        : String.valueOf(exitCode);
+    this.state = container.getContainerState().toString();
+    this.diagnostics = containerData.getDiagnostics();
+    if (this.diagnostics == null || this.diagnostics.isEmpty()) {
+      this.diagnostics = "";
+    }
+
+    this.user = container.getUser();
+    this.totalMemoryNeededMB = container.getLaunchContext().getResource()
+        .getMemory();
+    this.containerLogsShortLink = ujoin("containerlogs", this.id,
+        container.getUser());
+    this.containerLogsLink = join(requestUri, pathPrefix,
+        this.containerLogsShortLink);
+  }
+
+  public String getId() {
+    return this.id;
+  }
+
+  public String getNodeId() {
+    return this.nodeId;
+  }
+
+  public String getState() {
+    return this.state;
+  }
+
+  public int getExitCode() {
+    return this.exitCode;
+  }
+
+  public String getExitStatus() {
+    return this.exitStatus;
+  }
+
+  public String getDiagnostics() {
+    return this.diagnostics;
+  }
+
+  public String getUser() {
+    return this.user;
+  }
+
+  public String getShortLogLink() {
+    return this.containerLogsShortLink;
+  }
+
+  public String getLogLink() {
+    return this.containerLogsLink;
+  }
+
+  public long getMemoryNeeded() {
+    return this.totalMemoryNeededMB;
+  }
+
+}

Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/dao/ContainersInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/dao/ContainersInfo.java?rev=1213975&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/dao/ContainersInfo.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/dao/ContainersInfo.java Tue Dec 13 23:05:56 2011
@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.nodemanager.webapp.dao;
+
+import java.util.ArrayList;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+
+@XmlRootElement(name = "containers")
+@XmlAccessorType(XmlAccessType.FIELD)
+public class ContainersInfo {
+
+  protected ArrayList<ContainerInfo> container = new ArrayList<ContainerInfo>();
+
+  public ContainersInfo() {
+  } // JAXB needs this
+
+  public void add(ContainerInfo containerInfo) {
+    container.add(containerInfo);
+  }
+
+  public ArrayList<ContainerInfo> getContainers() {
+    return container;
+  }
+
+}

Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/dao/NodeInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/dao/NodeInfo.java?rev=1213975&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/dao/NodeInfo.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/dao/NodeInfo.java Tue Dec 13 23:05:56 2011
@@ -0,0 +1,127 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.nodemanager.webapp.dao;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+
+import org.apache.hadoop.util.VersionInfo;
+import org.apache.hadoop.yarn.server.nodemanager.Context;
+import org.apache.hadoop.yarn.server.nodemanager.ResourceView;
+import org.apache.hadoop.yarn.util.YarnVersionInfo;
+
+@XmlRootElement
+@XmlAccessorType(XmlAccessType.FIELD)
+public class NodeInfo {
+
+  private static final long BYTES_IN_MB = 1024 * 1024;
+
+  protected String healthReport;
+  protected long totalVmemAllocatedContainersMB;
+  protected long totalPmemAllocatedContainersMB;
+  protected long lastNodeUpdateTime;
+  protected boolean nodeHealthy;
+  protected String nodeManagerVersion;
+  protected String nodeManagerBuildVersion;
+  protected String nodeManagerVersionBuiltOn;
+  protected String hadoopVersion;
+  protected String hadoopBuildVersion;
+  protected String hadoopVersionBuiltOn;
+  protected String id;
+  protected String nodeHostName;
+
+  public NodeInfo() {
+  } // JAXB needs this
+
+  public NodeInfo(final Context context, final ResourceView resourceView) {
+
+    this.id = context.getNodeId().toString();
+    this.nodeHostName = context.getNodeId().getHost();
+    this.totalVmemAllocatedContainersMB = resourceView
+        .getVmemAllocatedForContainers() / BYTES_IN_MB;
+    this.totalPmemAllocatedContainersMB = resourceView
+        .getPmemAllocatedForContainers() / BYTES_IN_MB;
+    this.nodeHealthy = context.getNodeHealthStatus().getIsNodeHealthy();
+    this.lastNodeUpdateTime = context.getNodeHealthStatus()
+        .getLastHealthReportTime();
+
+    this.healthReport = context.getNodeHealthStatus().getHealthReport();
+
+    this.nodeManagerVersion = YarnVersionInfo.getVersion();
+    this.nodeManagerBuildVersion = YarnVersionInfo.getBuildVersion();
+    this.nodeManagerVersionBuiltOn = YarnVersionInfo.getDate();
+    this.hadoopVersion = VersionInfo.getVersion();
+    this.hadoopBuildVersion = VersionInfo.getBuildVersion();
+    this.hadoopVersionBuiltOn = VersionInfo.getDate();
+  }
+
+  public String getNodeId() {
+    return this.id;
+  }
+
+  public String getNodeHostName() {
+    return this.nodeHostName;
+  }
+
+  public String getNMVersion() {
+    return this.nodeManagerVersion;
+  }
+
+  public String getNMBuildVersion() {
+    return this.nodeManagerBuildVersion;
+  }
+
+  public String getNMVersionBuiltOn() {
+    return this.nodeManagerVersionBuiltOn;
+  }
+
+  public String getHadoopVersion() {
+    return this.hadoopVersion;
+  }
+
+  public String getHadoopBuildVersion() {
+    return this.hadoopBuildVersion;
+  }
+
+  public String getHadoopVersionBuiltOn() {
+    return this.hadoopVersionBuiltOn;
+  }
+
+  public boolean getHealthStatus() {
+    return this.nodeHealthy;
+  }
+
+  public long getLastNodeUpdateTime() {
+    return this.lastNodeUpdateTime;
+  }
+
+  public String getHealthReport() {
+    return this.healthReport;
+  }
+
+  public long getTotalVmemAllocated() {
+    return this.totalVmemAllocatedContainersMB;
+  }
+
+  public long getTotalPmemAllocated() {
+    return this.totalPmemAllocatedContainersMB;
+  }
+
+}

Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java?rev=1213975&r1=1213974&r2=1213975&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java Tue Dec 13 23:05:56 2011
@@ -412,7 +412,7 @@ public class ResourceManager extends Com
 
   protected void startWepApp() {
     Builder<ApplicationMasterService> builder = 
-      WebApps.$for("cluster", masterService).at(
+      WebApps.$for("cluster", ApplicationMasterService.class, masterService, "ws").at(
           this.conf.get(YarnConfiguration.RM_WEBAPP_ADDRESS,
           YarnConfiguration.DEFAULT_RM_WEBAPP_ADDRESS)); 
     if(YarnConfiguration.getRMWebAppHostAndPort(conf).

Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/AboutBlock.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/AboutBlock.java?rev=1213975&r1=1213974&r2=1213975&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/AboutBlock.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/AboutBlock.java Tue Dec 13 23:05:56 2011
@@ -18,10 +18,9 @@
 
 package org.apache.hadoop.yarn.server.resourcemanager.webapp;
 
-import org.apache.hadoop.util.VersionInfo;
 import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
+import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.ClusterInfo;
 import org.apache.hadoop.yarn.util.Times;
-import org.apache.hadoop.yarn.util.YarnVersionInfo;
 import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
 import org.apache.hadoop.yarn.webapp.view.InfoBlock;
 
@@ -30,25 +29,25 @@ import com.google.inject.Inject;
 public class AboutBlock extends HtmlBlock {
   final ResourceManager rm;
 
-  @Inject 
+  @Inject
   AboutBlock(ResourceManager rm, ViewContext ctx) {
     super(ctx);
     this.rm = rm;
   }
-  
+
   @Override
   protected void render(Block html) {
     html._(MetricsOverviewTable.class);
-    long ts = ResourceManager.clusterTimeStamp;
     ResourceManager rm = getInstance(ResourceManager.class);
+    ClusterInfo cinfo = new ClusterInfo(rm);
     info("Cluster overview").
-      _("Cluster ID:", ts).
-      _("ResourceManager state:", rm.getServiceState()).
-      _("ResourceManager started on:", Times.format(ts)).
-      _("ResourceManager version:", YarnVersionInfo.getBuildVersion() +
-          " on " + YarnVersionInfo.getDate()).
-      _("Hadoop version:", VersionInfo.getBuildVersion() +
-          " on " + VersionInfo.getDate());
+      _("Cluster ID:", cinfo.getClusterId()).
+      _("ResourceManager state:", cinfo.getState()).
+      _("ResourceManager started on:", Times.format(cinfo.getStartedOn())).
+      _("ResourceManager version:", cinfo.getRMBuildVersion() +
+          " on " + cinfo.getRMVersionBuiltOn()).
+      _("Hadoop version:", cinfo.getHadoopBuildVersion() +
+          " on " + cinfo.getHadoopVersionBuiltOn());
     html._(InfoBlock.class);
   }
 

Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/AppsBlock.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/AppsBlock.java?rev=1213975&r1=1213974&r2=1213975&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/AppsBlock.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/AppsBlock.java Tue Dec 13 23:05:56 2011
@@ -23,6 +23,7 @@ import static org.apache.hadoop.yarn.web
 import static org.apache.hadoop.yarn.webapp.view.JQueryUI._PROGRESSBAR_VALUE;
 
 import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
+import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.AppInfo;
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE;
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TBODY;
@@ -56,23 +57,18 @@ class AppsBlock extends HtmlBlock {
         tbody();
     int i = 0;
     for (RMApp app : list.apps.values()) {
-      String appId = app.getApplicationId().toString();
-      String trackingUrl = app.getTrackingUrl();
-      boolean trackingUrlIsNotReady = trackingUrl == null || trackingUrl.isEmpty() || "N/A".equalsIgnoreCase(trackingUrl);
-	  String ui = trackingUrlIsNotReady ? "UNASSIGNED" :
-          (app.getFinishTime() == 0 ? 
-              "ApplicationMaster" : "History");
-      String percent = String.format("%.1f", app.getProgress() * 100);
+      AppInfo appInfo = new AppInfo(app, true);
+      String percent = String.format("%.1f", appInfo.getProgress());
       tbody.
         tr().
           td().
-            br().$title(String.valueOf(app.getApplicationId().getId()))._(). // for sorting
-            a(url("app", appId), appId)._().
-          td(app.getUser().toString()).
-          td(app.getName().toString()).
-          td(app.getQueue().toString()).
-          td(app.getState().toString()).
-          td(app.getFinalApplicationStatus().toString()).
+            br().$title(appInfo.getAppIdNum())._(). // for sorting
+            a(url("app", appInfo.getAppId()), appInfo.getAppId())._().
+          td(appInfo.getUser()).
+          td(appInfo.getName()).
+          td(appInfo.getQueue()).
+          td(appInfo.getState()).
+          td(appInfo.getFinalStatus()).
           td().
             br().$title(percent)._(). // for sorting
             div(_PROGRESSBAR).
@@ -80,9 +76,9 @@ class AppsBlock extends HtmlBlock {
               div(_PROGRESSBAR_VALUE).
                 $style(join("width:", percent, '%'))._()._()._().
           td().
-            a(trackingUrlIsNotReady ?
-              "#" : join("http://", trackingUrl), ui)._().
-          td(app.getDiagnostics().toString())._();
+            a(!appInfo.isTrackingUrlReady()?
+              "#" : appInfo.getTrackingUrlPretty(), appInfo.getTrackingUI())._().
+          td(appInfo.getNote())._();
       if (list.rendering != Render.HTML && ++i >= 20) break;
     }
     tbody._()._();

Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/AppsList.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/AppsList.java?rev=1213975&r1=1213974&r2=1213975&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/AppsList.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/AppsList.java Tue Dec 13 23:05:56 2011
@@ -31,6 +31,7 @@ import java.util.concurrent.ConcurrentMa
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
 import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
+import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.AppInfo;
 import org.apache.hadoop.yarn.webapp.Controller.RequestContext;
 import org.apache.hadoop.yarn.webapp.ToJSON;
 import org.apache.hadoop.yarn.webapp.view.JQueryUI.Render;
@@ -54,31 +55,27 @@ class AppsList implements ToJSON {
     out.append('[');
     boolean first = true;
     for (RMApp app : apps.values()) {
+      AppInfo appInfo = new AppInfo(app, false);
       if (first) {
         first = false;
       } else {
         out.append(",\n");
       }
-      String appID = app.getApplicationId().toString();
-      String trackingUrl = app.getTrackingUrl();
-      boolean trackingUrlIsNotReady = trackingUrl == null
-          || trackingUrl.isEmpty() || "N/A".equalsIgnoreCase(trackingUrl);
-      String ui = trackingUrlIsNotReady ? "UNASSIGNED"
-          : (app.getFinishTime() == 0 ? "ApplicationMaster" : "History");
       out.append("[\"");
-      appendSortable(out, app.getApplicationId().getId());
-      appendLink(out, appID, rc.prefix(), "app", appID).append(_SEP).
-          append(escapeHtml(app.getUser().toString())).append(_SEP).
-          append(escapeHtml(app.getName().toString())).append(_SEP).
-          append(escapeHtml(app.getQueue())).append(_SEP).
-          append(app.getState().toString()).append(_SEP).
-          append(app.getFinalApplicationStatus().toString()).append(_SEP);
-      appendProgressBar(out, app.getProgress()).append(_SEP);
-      appendLink(out, ui, rc.prefix(),
-          trackingUrlIsNotReady ?
-            "#" : "http://", trackingUrl).
+      appendSortable(out, appInfo.getAppIdNum());
+      appendLink(out, appInfo.getAppId(), rc.prefix(), "app",
+          appInfo.getAppId()).append(_SEP).
+          append(escapeHtml(appInfo.getUser())).append(_SEP).
+          append(escapeHtml(appInfo.getName())).append(_SEP).
+          append(escapeHtml(appInfo.getQueue())).append(_SEP).
+          append(appInfo.getState()).append(_SEP).
+          append(appInfo.getFinalStatus()).append(_SEP);
+      appendProgressBar(out, appInfo.getProgress()).append(_SEP);
+      appendLink(out, appInfo.getTrackingUI(), rc.prefix(),
+          !appInfo.isTrackingUrlReady() ?
+            "#" : appInfo.getTrackingUrlPretty()).
           append(_SEP).append(escapeJavaScript(escapeHtml(
-                              app.getDiagnostics().toString()))).
+                              appInfo.getNote()))).
           append("\"]");
     }
     out.append(']');

Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/CapacitySchedulerPage.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/CapacitySchedulerPage.java?rev=1213975&r1=1213974&r2=1213975&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/CapacitySchedulerPage.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/CapacitySchedulerPage.java Tue Dec 13 23:05:56 2011
@@ -18,19 +18,23 @@
 
 package org.apache.hadoop.yarn.server.resourcemanager.webapp;
 
-import com.google.inject.Inject;
-import com.google.inject.servlet.RequestScoped;
+import static org.apache.hadoop.yarn.util.StringHelper.join;
 
 import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
+import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CSQueue;
 import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler;
 import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.ParentQueue;
-import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CSQueue;
+import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.CapacitySchedulerInfo;
+import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.CapacitySchedulerQueueInfo;
 import org.apache.hadoop.yarn.webapp.SubView;
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
-import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.*;
+import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.DIV;
+import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.LI;
+import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.UL;
 import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
 
-import static org.apache.hadoop.yarn.util.StringHelper.*;
+import com.google.inject.Inject;
+import com.google.inject.servlet.RequestScoped;
 
 class CapacitySchedulerPage extends RmView {
   static final String _Q = ".ui-state-default.ui-corner-all";
@@ -47,22 +51,21 @@ class CapacitySchedulerPage extends RmVi
 
   public static class QueueBlock extends HtmlBlock {
     final Parent parent;
+    final CapacitySchedulerInfo sinfo;
 
     @Inject QueueBlock(Parent parent) {
       this.parent = parent;
+      sinfo = new CapacitySchedulerInfo(parent.queue);
     }
 
     @Override
     public void render(Block html) {
       UL<Hamlet> ul = html.ul();
-      CSQueue parentQueue = parent.queue;
-      for (CSQueue queue : parentQueue.getChildQueues()) {
-        float used = queue.getUsedCapacity();
-        float set = queue.getCapacity();
+      for (CapacitySchedulerQueueInfo info : sinfo.getSubQueues()) {
+        float used = info.getUsedCapacity() / 100;
+        float set = info.getCapacity() / 100;
         float delta = Math.abs(set - used) + 0.001f;
-        float max = queue.getMaximumCapacity();
-        if (max < EPSILON || max > 1f) max = 1f;
-        //String absMaxPct = percent(queue.getAbsoluteMaximumCapacity());
+        float max = info.getMaxCapacity() / 100;
         LI<UL<Hamlet>> li = ul.
           li().
             a(_Q).$style(width(max * WIDTH_F)).
@@ -72,14 +75,16 @@ class CapacitySchedulerPage extends RmVi
               span().$style(join(width(delta/max), ';',
                 used > set ? OVER : UNDER, ';',
                 used > set ? left(set/max) : left(used/max)))._('.')._().
-              span(".q", queue.getQueuePath().substring(5))._();
-        if (queue instanceof ParentQueue) {
-          parent.queue = queue;
+              span(".q", info.getQueuePath().substring(5))._();
+        if (info.getQueue() instanceof ParentQueue) {
+          // this could be optimized better
+          parent.queue = info.getQueue();
           li.
             _(QueueBlock.class);
         }
         li._();
       }
+
       ul._();
     }
   }
@@ -111,8 +116,9 @@ class CapacitySchedulerPage extends RmVi
       } else {
         CSQueue root = cs.getRootQueue();
         parent.queue = root;
-        float used = root.getUsedCapacity();
-        float set = root.getCapacity();
+        CapacitySchedulerInfo sinfo = new CapacitySchedulerInfo(parent.queue);
+        float used = sinfo.getUsedCapacity() / 100;
+        float set = sinfo.getCapacity() / 100;
         float delta = Math.abs(set - used) + 0.001f;
         ul.
           li().

Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/DefaultSchedulerPage.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/DefaultSchedulerPage.java?rev=1213975&r1=1213974&r2=1213975&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/DefaultSchedulerPage.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/DefaultSchedulerPage.java Tue Dec 13 23:05:56 2011
@@ -18,22 +18,20 @@
 
 package org.apache.hadoop.yarn.server.resourcemanager.webapp;
 
-import com.google.inject.Inject;
+import static org.apache.hadoop.yarn.util.StringHelper.join;
 
+import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
 import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
 import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo.FifoScheduler;
+import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.FifoSchedulerInfo;
 import org.apache.hadoop.yarn.webapp.SubView;
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
-import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.*;
+import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.DIV;
+import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.UL;
 import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
-import org.apache.hadoop.yarn.api.records.QueueInfo;
-import org.apache.hadoop.yarn.api.records.QueueState;
-import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
-import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerNodeReport;
-import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode;
 import org.apache.hadoop.yarn.webapp.view.InfoBlock;
 
-import static org.apache.hadoop.yarn.util.StringHelper.*;
+import com.google.inject.Inject;
 
 class DefaultSchedulerPage extends RmView {
   static final String _Q = ".ui-state-default.ui-corner-all";
@@ -44,66 +42,35 @@ class DefaultSchedulerPage extends RmVie
   static final float EPSILON = 1e-8f;
 
   static class QueueInfoBlock extends HtmlBlock {
-    final RMContext rmContext;
-    final FifoScheduler fs;
-    final String qName;
-    final QueueInfo qInfo;
+    final FifoSchedulerInfo sinfo;
 
     @Inject QueueInfoBlock(RMContext context, ViewContext ctx, ResourceManager rm) {
       super(ctx);
-      this.rmContext = context;
-
-      fs = (FifoScheduler) rm.getResourceScheduler();
-      qName = fs.getQueueInfo("",false,false).getQueueName();
-      qInfo = fs.getQueueInfo(qName,true,true);
+      sinfo = new FifoSchedulerInfo(rm);
     }
 
     @Override public void render(Block html) {
-      String minmemoryresource = 
-                Integer.toString(fs.getMinimumResourceCapability().getMemory());
-      String maxmemoryresource = 
-                Integer.toString(fs.getMaximumResourceCapability().getMemory());
-      String qstate = (qInfo.getQueueState() == QueueState.RUNNING) ?
-                       "Running" :
-                           (qInfo.getQueueState() == QueueState.STOPPED) ?
-                                  "Stopped" : "Unknown";
-
-      int usedNodeMem      = 0;
-      int availNodeMem     = 0;
-      int totNodeMem       = 0;
-      int nodeContainers   = 0;
-
-      for (RMNode ni : this.rmContext.getRMNodes().values()) {
-        SchedulerNodeReport report = fs.getNodeReport(ni.getNodeID());
-        usedNodeMem += report.getUsedResource().getMemory();
-        availNodeMem += report.getAvailableResource().getMemory();
-        totNodeMem += ni.getTotalCapability().getMemory();
-        nodeContainers += fs.getNodeReport(ni.getNodeID()).getNumContainers();
-      }
-
-      info("\'" + qName + "\' Queue Status").
-        _("Queue State:" , qstate).
-        _("Minimum Queue Memory Capacity:" , minmemoryresource).
-        _("Maximum Queue Memory Capacity:" , maxmemoryresource).
-        _("Number of Nodes:" , Integer.toString(this.rmContext.getRMNodes().size())).
-        _("Used Node Capacity:" , Integer.toString(usedNodeMem)).
-        _("Available Node Capacity:" , Integer.toString(availNodeMem)).
-        _("Total Node Capacity:" , Integer.toString(totNodeMem)).
-        _("Number of Node Containers:" , Integer.toString(nodeContainers));
+      info("\'" + sinfo.getQueueName() + "\' Queue Status").
+        _("Queue State:" , sinfo.getState()).
+        _("Minimum Queue Memory Capacity:" , Integer.toString(sinfo.getMinQueueMemoryCapacity())).
+        _("Maximum Queue Memory Capacity:" , Integer.toString(sinfo.getMaxQueueMemoryCapacity())).
+        _("Number of Nodes:" , Integer.toString(sinfo.getNumNodes())).
+        _("Used Node Capacity:" , Integer.toString(sinfo.getUsedNodeCapacity())).
+        _("Available Node Capacity:" , Integer.toString(sinfo.getAvailNodeCapacity())).
+        _("Total Node Capacity:" , Integer.toString(sinfo.getTotalNodeCapacity())).
+        _("Number of Node Containers:" , Integer.toString(sinfo.getNumContainers()));
 
       html._(InfoBlock.class);
     }
   }
 
   static class QueuesBlock extends HtmlBlock {
+    final FifoSchedulerInfo sinfo;
     final FifoScheduler fs;
-    final String qName;
-    final QueueInfo qInfo;
 
     @Inject QueuesBlock(ResourceManager rm) {
+      sinfo = new FifoSchedulerInfo(rm);
       fs = (FifoScheduler) rm.getResourceScheduler();
-      qName = fs.getQueueInfo("",false,false).getQueueName();
-      qInfo = fs.getQueueInfo(qName,false,false);
     }
 
     @Override
@@ -123,8 +90,8 @@ class DefaultSchedulerPage extends RmVie
               span().$style(Q_END)._("100% ")._().
               span(".q", "default")._()._();
       } else {
-        float used = qInfo.getCurrentCapacity();
-        float set = qInfo.getCapacity();
+        float used = sinfo.getUsedCapacity();
+        float set = sinfo.getCapacity();
         float delta = Math.abs(set - used) + 0.001f;
         ul.
           li().
@@ -133,7 +100,7 @@ class DefaultSchedulerPage extends RmVie
               span().$style(Q_END)._("100%")._().
               span().$style(join(width(delta), ';', used > set ? OVER : UNDER,
                 ';', used > set ? left(set) : left(used)))._(".")._().
-              span(".q", qName)._().
+              span(".q", sinfo.getQueueName())._().
             _(QueueInfoBlock.class)._();
       }
 

Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/JAXBContextResolver.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/JAXBContextResolver.java?rev=1213975&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/JAXBContextResolver.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/JAXBContextResolver.java Tue Dec 13 23:05:56 2011
@@ -0,0 +1,70 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.resourcemanager.webapp;
+
+import com.google.inject.Singleton;
+import com.sun.jersey.api.json.JSONConfiguration;
+import com.sun.jersey.api.json.JSONJAXBContext;
+
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Set;
+
+import javax.ws.rs.ext.ContextResolver;
+import javax.ws.rs.ext.Provider;
+import javax.xml.bind.JAXBContext;
+
+import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.AppInfo;
+import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.AppsInfo;
+import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.CapacitySchedulerInfo;
+import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.CapacitySchedulerQueueInfo;
+import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.ClusterInfo;
+import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.ClusterMetricsInfo;
+import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.FifoSchedulerInfo;
+import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.NodeInfo;
+import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.NodesInfo;
+import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.SchedulerInfo;
+import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.SchedulerTypeInfo;
+import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.UserMetricsInfo;
+
+@Singleton
+@Provider
+public class JAXBContextResolver implements ContextResolver<JAXBContext> {
+
+  private JAXBContext context;
+  private final Set<Class> types;
+
+  // you have to specify all the dao classes here
+  private final Class[] cTypes = { AppInfo.class, ClusterInfo.class,
+      CapacitySchedulerQueueInfo.class, FifoSchedulerInfo.class,
+      SchedulerTypeInfo.class, NodeInfo.class, UserMetricsInfo.class,
+      CapacitySchedulerInfo.class, ClusterMetricsInfo.class,
+      SchedulerInfo.class, AppsInfo.class, NodesInfo.class };
+
+  public JAXBContextResolver() throws Exception {
+    this.types = new HashSet<Class>(Arrays.asList(cTypes));
+    this.context = new JSONJAXBContext(JSONConfiguration.natural()
+        .rootUnwrapping(false).build(), cTypes);
+  }
+
+  @Override
+  public JAXBContext getContext(Class<?> objectType) {
+    return (types.contains(objectType)) ? context : null;
+  }
+}

Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/MetricsOverviewTable.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/MetricsOverviewTable.java?rev=1213975&r1=1213974&r2=1213975&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/MetricsOverviewTable.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/MetricsOverviewTable.java Tue Dec 13 23:05:56 2011
@@ -19,11 +19,11 @@
 package org.apache.hadoop.yarn.server.resourcemanager.webapp;
 
 import org.apache.hadoop.util.StringUtils;
-import org.apache.hadoop.yarn.server.resourcemanager.ClusterMetrics;
 import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
 import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
-import org.apache.hadoop.yarn.server.resourcemanager.scheduler.QueueMetrics;
-import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
+import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.ClusterMetricsInfo;
+import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.UserMetricsInfo;
+
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.DIV;
 import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
@@ -36,12 +36,12 @@ import com.google.inject.Inject;
  * current user is using on the cluster.
  */
 public class MetricsOverviewTable extends HtmlBlock {
-  private static final long BYTES_IN_GB = 1024 * 1024 * 1024;
-  
+  private static final long BYTES_IN_MB = 1024 * 1024;
+
   private final RMContext rmContext;
   private final ResourceManager rm;
 
-  @Inject 
+  @Inject
   MetricsOverviewTable(RMContext context, ResourceManager rm, ViewContext ctx) {
     super(ctx);
     this.rmContext = context;
@@ -55,22 +55,7 @@ public class MetricsOverviewTable extend
     //CSS in the correct spot
     html.style(".metrics {margin-bottom:5px}"); 
     
-    ResourceScheduler rs = rm.getResourceScheduler();
-    QueueMetrics metrics = rs.getRootQueueMetrics();
-    ClusterMetrics clusterMetrics = ClusterMetrics.getMetrics();
-    
-    int appsSubmitted = metrics.getAppsSubmitted();
-    int reservedGB = metrics.getReservedGB();
-    int availableGB = metrics.getAvailableGB();
-    int allocatedGB = metrics.getAllocatedGB();
-    int containersAllocated = metrics.getAllocatedContainers();
-    int totalGB = availableGB + reservedGB + allocatedGB;
-
-    int totalNodes = clusterMetrics.getNumNMs();
-    int lostNodes = clusterMetrics.getNumLostNMs();
-    int unhealthyNodes = clusterMetrics.getUnhealthyNMs();
-    int decommissionedNodes = clusterMetrics.getNumDecommisionedNMs();
-    int rebootedNodes = clusterMetrics.getNumRebootedNMs();
+    ClusterMetricsInfo clusterMetrics = new ClusterMetricsInfo(this.rm, this.rmContext);
 
     
     DIV<Hamlet> div = html.div().$class("metrics");
@@ -92,30 +77,23 @@ public class MetricsOverviewTable extend
     _().
     tbody().$class("ui-widget-content").
       tr().
-        td(String.valueOf(appsSubmitted)).
-        td(String.valueOf(containersAllocated)).
-        td(StringUtils.byteDesc(allocatedGB * BYTES_IN_GB)).
-        td(StringUtils.byteDesc(totalGB * BYTES_IN_GB)).
-        td(StringUtils.byteDesc(reservedGB * BYTES_IN_GB)).
-        td().a(url("nodes"),String.valueOf(totalNodes))._(). 
-        td().a(url("nodes/decommissioned"),String.valueOf(decommissionedNodes))._(). 
-        td().a(url("nodes/lost"),String.valueOf(lostNodes))._().
-        td().a(url("nodes/unhealthy"),String.valueOf(unhealthyNodes))._().
-        td().a(url("nodes/rebooted"),String.valueOf(rebootedNodes))._().
+        td(String.valueOf(clusterMetrics.getAppsSubmitted())).
+        td(String.valueOf(clusterMetrics.getContainersAllocated())).
+        td(StringUtils.byteDesc(clusterMetrics.getAllocatedMB() * BYTES_IN_MB)).
+        td(StringUtils.byteDesc(clusterMetrics.getTotalMB() * BYTES_IN_MB)).
+        td(StringUtils.byteDesc(clusterMetrics.getReservedMB() * BYTES_IN_MB)).
+        td().a(url("nodes"),String.valueOf(clusterMetrics.getTotalNodes()))._().
+        td().a(url("nodes/decommissioned"),String.valueOf(clusterMetrics.getDecommissionedNodes()))._().
+        td().a(url("nodes/lost"),String.valueOf(clusterMetrics.getLostNodes()))._().
+        td().a(url("nodes/unhealthy"),String.valueOf(clusterMetrics.getUnhealthyNodes()))._().
+        td().a(url("nodes/rebooted"),String.valueOf(clusterMetrics.getRebootedNodes()))._().
       _().
     _()._();
-    
+
     String user = request().getRemoteUser();
     if (user != null) {
-      QueueMetrics userMetrics = metrics.getUserMetrics(user);
-      if(userMetrics != null) {
-        int myAppsSubmitted = userMetrics.getAppsSubmitted();
-        int myRunningContainers = userMetrics.getAllocatedContainers();
-        int myPendingContainers = userMetrics.getPendingContainers();
-        int myReservedContainers = userMetrics.getReservedContainers();
-        int myReservedGB = userMetrics.getReservedGB();
-        int myPendingGB = userMetrics.getPendingGB();
-        int myAllocatedGB = userMetrics.getAllocatedGB();
+      UserMetricsInfo userMetrics = new UserMetricsInfo(this.rm, this.rmContext, user);
+      if (userMetrics.metricsAvailable()) {
         div.table("#usermetricsoverview").
         thead().$class("ui-widget-header").
           tr().
@@ -130,13 +108,13 @@ public class MetricsOverviewTable extend
         _().
         tbody().$class("ui-widget-content").
           tr().
-            td(String.valueOf(myAppsSubmitted)).
-            td(String.valueOf(myRunningContainers)).
-            td(String.valueOf(myPendingContainers)).
-            td(String.valueOf(myReservedContainers)).
-            td(StringUtils.byteDesc(myAllocatedGB * BYTES_IN_GB)).
-            td(StringUtils.byteDesc(myPendingGB * BYTES_IN_GB)).
-            td(StringUtils.byteDesc(myReservedGB * BYTES_IN_GB)).
+            td(String.valueOf(userMetrics.getAppsSubmitted())).
+            td(String.valueOf(userMetrics.getRunningContainers())).
+            td(String.valueOf(userMetrics.getPendingContainers())).
+            td(String.valueOf(userMetrics.getReservedContainers())).
+            td(StringUtils.byteDesc(userMetrics.getAllocatedMB() * BYTES_IN_MB)).
+            td(StringUtils.byteDesc(userMetrics.getPendingMB() * BYTES_IN_MB)).
+            td(StringUtils.byteDesc(userMetrics.getReservedMB() * BYTES_IN_MB)).
           _().
         _()._();
       }

Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/NodesPage.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/NodesPage.java?rev=1213975&r1=1213974&r2=1213975&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/NodesPage.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/NodesPage.java Tue Dec 13 23:05:56 2011
@@ -25,14 +25,12 @@ import static org.apache.hadoop.yarn.web
 import static org.apache.hadoop.yarn.webapp.view.JQueryUI.tableInit;
 
 import org.apache.hadoop.util.StringUtils;
-import org.apache.hadoop.yarn.api.records.NodeHealthStatus;
-import org.apache.hadoop.yarn.api.records.NodeId;
 import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
 import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
 import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode;
 import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNodeState;
 import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
-import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerNodeReport;
+import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.NodeInfo;
 import org.apache.hadoop.yarn.util.Times;
 import org.apache.hadoop.yarn.webapp.SubView;
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
@@ -45,9 +43,9 @@ import com.google.inject.Inject;
 class NodesPage extends RmView {
 
   static class NodesBlock extends HtmlBlock {
-    private static final long BYTES_IN_MB = 1024 * 1024;
     final RMContext rmContext;
     final ResourceManager rm;
+    private static final long BYTES_IN_MB = 1024 * 1024;
 
     @Inject
     NodesBlock(RMContext context, ResourceManager rm, ViewContext ctx) {
@@ -59,7 +57,7 @@ class NodesPage extends RmView {
     @Override
     protected void render(Block html) {
       html._(MetricsOverviewTable.class);
-      
+
       ResourceScheduler sched = rm.getResourceScheduler();
       String type = $(NODE_STATE);
       TBODY<TABLE<Hamlet>> tbody = html.table("#nodes").
@@ -88,27 +86,18 @@ class NodesPage extends RmView {
             continue;
           }
         }
-        NodeId id = ni.getNodeID();
-        SchedulerNodeReport report = sched.getNodeReport(id);
-        int numContainers = 0;
-        int usedMemory = 0;
-        int availableMemory = 0;
-        if(report != null) {
-          numContainers = report.getNumContainers();
-          usedMemory = report.getUsedResource().getMemory();
-          availableMemory = report.getAvailableResource().getMemory();
-        }
-
-        NodeHealthStatus health = ni.getNodeHealthStatus();
+        NodeInfo info = new NodeInfo(ni, sched);
+        int usedMemory = (int)info.getUsedMemory();
+        int availableMemory = (int)info.getAvailableMemory();
         tbody.tr().
-            td(ni.getRackName()).
-            td(String.valueOf(ni.getState())).
-            td(String.valueOf(ni.getNodeID().toString())).
-            td().a("http://" + ni.getHttpAddress(), ni.getHttpAddress())._().
-            td(health.getIsNodeHealthy() ? "Healthy" : "Unhealthy").
-            td(Times.format(health.getLastHealthReportTime())).
-            td(String.valueOf(health.getHealthReport())).
-            td(String.valueOf(numContainers)).
+            td(info.getRack()).
+            td(info.getState()).
+            td(info.getNodeId()).
+            td().a("http://" + info.getNodeHTTPAddress(), info.getNodeHTTPAddress())._().
+            td(info.getHealthStatus()).
+            td(Times.format(info.getLastHealthUpdate())).
+            td(info.getHealthReport()).
+            td(String.valueOf(info.getNumContainers())).
             td().br().$title(String.valueOf(usedMemory))._().
               _(StringUtils.byteDesc(usedMemory * BYTES_IN_MB))._().
             td().br().$title(String.valueOf(usedMemory))._().

Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebApp.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebApp.java?rev=1213975&r1=1213974&r2=1213975&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebApp.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebApp.java Tue Dec 13 23:05:56 2011
@@ -23,6 +23,7 @@ import static org.apache.hadoop.yarn.uti
 import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
 import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
 import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
+import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
 import org.apache.hadoop.yarn.webapp.WebApp;
 
 /**
@@ -41,6 +42,9 @@ public class RMWebApp extends WebApp {
 
   @Override
   public void setup() {
+    bind(JAXBContextResolver.class);
+    bind(RMWebServices.class);
+    bind(GenericExceptionHandler.class);
     if (rm != null) {
       bind(ResourceManager.class).toInstance(rm);
       bind(RMContext.class).toInstance(rm.getRMContext());

Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java?rev=1213975&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java Tue Dec 13 23:05:56 2011
@@ -0,0 +1,333 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.resourcemanager.webapp;
+
+import java.io.IOException;
+import java.util.concurrent.ConcurrentMap;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.MediaType;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.yarn.api.records.ApplicationAccessType;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.api.records.NodeId;
+import org.apache.hadoop.yarn.factories.RecordFactory;
+import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
+import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
+import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
+import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState;
+import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode;
+import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNodeState;
+import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
+import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CSQueue;
+import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler;
+import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo.FifoScheduler;
+import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.AppInfo;
+import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.AppsInfo;
+import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.CapacitySchedulerInfo;
+import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.ClusterInfo;
+import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.ClusterMetricsInfo;
+import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.FifoSchedulerInfo;
+import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.NodeInfo;
+import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.NodesInfo;
+import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.SchedulerInfo;
+import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.SchedulerTypeInfo;
+import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
+import org.apache.hadoop.yarn.util.ConverterUtils;
+import org.apache.hadoop.yarn.webapp.BadRequestException;
+import org.apache.hadoop.yarn.webapp.NotFoundException;
+
+import com.google.inject.Inject;
+import com.google.inject.Singleton;
+
+@Singleton
+@Path("/ws/v1/cluster")
+public class RMWebServices {
+  private static final Log LOG = LogFactory.getLog(RMWebServices.class);
+  private final ResourceManager rm;
+  private static RecordFactory recordFactory = RecordFactoryProvider
+      .getRecordFactory(null);
+  private final ApplicationACLsManager aclsManager;
+
+  @Inject
+  public RMWebServices(final ResourceManager rm,
+      final ApplicationACLsManager aclsManager) {
+    this.rm = rm;
+    this.aclsManager = aclsManager;
+  }
+
+  protected Boolean hasAccess(RMApp app, HttpServletRequest hsr) {
+    // Check for the authorization.
+    String remoteUser = hsr.getRemoteUser();
+    UserGroupInformation callerUGI = null;
+    if (remoteUser != null) {
+      callerUGI = UserGroupInformation.createRemoteUser(remoteUser);
+    }
+    if (callerUGI != null
+        && !this.aclsManager.checkAccess(callerUGI,
+            ApplicationAccessType.VIEW_APP, app.getUser(),
+            app.getApplicationId())) {
+      return false;
+    }
+    return true;
+  }
+
+  @GET
+  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+  public ClusterInfo get() {
+    return getClusterInfo();
+  }
+
+  @GET
+  @Path("/info")
+  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+  public ClusterInfo getClusterInfo() {
+    return new ClusterInfo(this.rm);
+  }
+
+  @GET
+  @Path("/metrics")
+  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+  public ClusterMetricsInfo getClusterMetricsInfo() {
+    return new ClusterMetricsInfo(this.rm, this.rm.getRMContext());
+  }
+
+  @GET
+  @Path("/scheduler")
+  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+  public SchedulerTypeInfo getSchedulerInfo() {
+    ResourceScheduler rs = rm.getResourceScheduler();
+    SchedulerInfo sinfo;
+    if (rs instanceof CapacityScheduler) {
+      CapacityScheduler cs = (CapacityScheduler) rs;
+      CSQueue root = cs.getRootQueue();
+      sinfo = new CapacitySchedulerInfo(root);
+    } else if (rs instanceof FifoScheduler) {
+      sinfo = new FifoSchedulerInfo(this.rm);
+    } else {
+      throw new NotFoundException("Unknown scheduler configured");
+    }
+    return new SchedulerTypeInfo(sinfo);
+  }
+
+  @GET
+  @Path("/nodes")
+  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+  public NodesInfo getNodes(@QueryParam("state") String filterState,
+      @QueryParam("healthy") String healthState) {
+    ResourceScheduler sched = this.rm.getResourceScheduler();
+    if (sched == null) {
+      throw new NotFoundException("Null ResourceScheduler instance");
+    }
+
+    NodesInfo allNodes = new NodesInfo();
+    for (RMNode ni : this.rm.getRMContext().getRMNodes().values()) {
+      NodeInfo nodeInfo = new NodeInfo(ni, sched);
+      if (filterState != null) {
+        RMNodeState.valueOf(filterState);
+        if (!(nodeInfo.getState().equalsIgnoreCase(filterState))) {
+          continue;
+        }
+      }
+      if ((healthState != null) && (!healthState.isEmpty())) {
+        LOG.info("heatlh state is : " + healthState);
+        if (!healthState.equalsIgnoreCase("true")
+            && !healthState.equalsIgnoreCase("false")) {
+          String msg = "Error: You must specify either true or false to query on health";
+          throw new BadRequestException(msg);
+        }
+        if (nodeInfo.isHealthy() != Boolean.parseBoolean(healthState)) {
+          continue;
+        }
+      }
+      allNodes.add(nodeInfo);
+    }
+    return allNodes;
+  }
+
+  @GET
+  @Path("/nodes/{nodeId}")
+  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+  public NodeInfo getNode(@PathParam("nodeId") String nodeId) {
+    if (nodeId == null || nodeId.isEmpty()) {
+      throw new NotFoundException("nodeId, " + nodeId + ", is empty or null");
+    }
+    ResourceScheduler sched = this.rm.getResourceScheduler();
+    if (sched == null) {
+      throw new NotFoundException("Null ResourceScheduler instance");
+    }
+    NodeId nid = ConverterUtils.toNodeId(nodeId);
+    RMNode ni = this.rm.getRMContext().getRMNodes().get(nid);
+    if (ni == null) {
+      throw new NotFoundException("nodeId, " + nodeId + ", is not found");
+    }
+    return new NodeInfo(ni, sched);
+  }
+
+  @GET
+  @Path("/apps")
+  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+  public AppsInfo getApps(@Context HttpServletRequest hsr,
+      @QueryParam("state") String stateQuery,
+      @QueryParam("user") String userQuery,
+      @QueryParam("queue") String queueQuery,
+      @QueryParam("limit") String count,
+      @QueryParam("startedTimeBegin") String startedBegin,
+      @QueryParam("startedTimeEnd") String startedEnd,
+      @QueryParam("finishedTimeBegin") String finishBegin,
+      @QueryParam("finishedTimeEnd") String finishEnd) {
+    long num = 0;
+    boolean checkCount = false;
+    boolean checkStart = false;
+    boolean checkEnd = false;
+    long countNum = 0;
+
+    // set values suitable in case both of begin/end not specified
+    long sBegin = 0;
+    long sEnd = Long.MAX_VALUE;
+    long fBegin = 0;
+    long fEnd = Long.MAX_VALUE;
+
+    if (count != null && !count.isEmpty()) {
+      checkCount = true;
+      countNum = Long.parseLong(count);
+      if (countNum <= 0) {
+        throw new BadRequestException("limit value must be greater then 0");
+      }
+    }
+
+    if (startedBegin != null && !startedBegin.isEmpty()) {
+      checkStart = true;
+      sBegin = Long.parseLong(startedBegin);
+      if (sBegin < 0) {
+        throw new BadRequestException("startedTimeBegin must be greater than 0");
+      }
+    }
+    if (startedEnd != null && !startedEnd.isEmpty()) {
+      checkStart = true;
+      sEnd = Long.parseLong(startedEnd);
+      if (sEnd < 0) {
+        throw new BadRequestException("startedTimeEnd must be greater than 0");
+      }
+    }
+    if (sBegin > sEnd) {
+      throw new BadRequestException(
+          "startedTimeEnd must be greater than startTimeBegin");
+    }
+
+    if (finishBegin != null && !finishBegin.isEmpty()) {
+      checkEnd = true;
+      fBegin = Long.parseLong(finishBegin);
+      if (fBegin < 0) {
+        throw new BadRequestException("finishTimeBegin must be greater than 0");
+      }
+    }
+    if (finishEnd != null && !finishEnd.isEmpty()) {
+      checkEnd = true;
+      fEnd = Long.parseLong(finishEnd);
+      if (fEnd < 0) {
+        throw new BadRequestException("finishTimeEnd must be greater than 0");
+      }
+    }
+    if (fBegin > fEnd) {
+      throw new BadRequestException(
+          "finishTimeEnd must be greater than finishTimeBegin");
+    }
+
+    final ConcurrentMap<ApplicationId, RMApp> apps = rm.getRMContext()
+        .getRMApps();
+    AppsInfo allApps = new AppsInfo();
+    for (RMApp rmapp : apps.values()) {
+      if (checkCount && num == countNum) {
+        break;
+      }
+      AppInfo app = new AppInfo(rmapp, hasAccess(rmapp, hsr));
+
+      if (stateQuery != null && !stateQuery.isEmpty()) {
+        RMAppState.valueOf(stateQuery);
+        if (!app.getState().equalsIgnoreCase(stateQuery)) {
+          continue;
+        }
+      }
+      if (userQuery != null && !userQuery.isEmpty()) {
+        if (!app.getUser().equals(userQuery)) {
+          continue;
+        }
+      }
+      if (queueQuery != null && !queueQuery.isEmpty()) {
+        ResourceScheduler rs = rm.getResourceScheduler();
+        if (rs instanceof CapacityScheduler) {
+          CapacityScheduler cs = (CapacityScheduler) rs;
+          // validate queue exists
+          try {
+            cs.getQueueInfo(queueQuery, false, false);
+          } catch (IOException e) {
+            throw new BadRequestException(e.getMessage());
+          }
+        }
+        if (!app.getQueue().equals(queueQuery)) {
+          continue;
+        }
+      }
+
+      if (checkStart
+          && (app.getStartTime() < sBegin || app.getStartTime() > sEnd)) {
+        continue;
+      }
+      if (checkEnd
+          && (app.getFinishTime() < fBegin || app.getFinishTime() > fEnd)) {
+        continue;
+      }
+
+      allApps.add(app);
+      num++;
+    }
+    return allApps;
+  }
+
+  @GET
+  @Path("/apps/{appid}")
+  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+  public AppInfo getApp(@Context HttpServletRequest hsr,
+      @PathParam("appid") String appId) {
+    if (appId == null || appId.isEmpty()) {
+      throw new NotFoundException("appId, " + appId + ", is empty or null");
+    }
+    ApplicationId id;
+    id = ConverterUtils.toApplicationId(recordFactory, appId);
+    if (id == null) {
+      throw new NotFoundException("appId is null");
+    }
+    RMApp app = rm.getRMContext().getRMApps().get(id);
+    if (app == null) {
+      throw new NotFoundException("app with id: " + appId + " not found");
+    }
+    return new AppInfo(app, hasAccess(app, hsr));
+  }
+
+}

Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RmController.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RmController.java?rev=1213975&r1=1213974&r2=1213975&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RmController.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RmController.java Tue Dec 13 23:05:56 2011
@@ -26,17 +26,16 @@ import javax.servlet.http.HttpServletRes
 
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.StringUtils;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationAccessType;
-import org.apache.hadoop.yarn.api.records.Container;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
 import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
 import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
 import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
 import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler;
+import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.AppInfo;
 import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
 import org.apache.hadoop.yarn.util.Apps;
-import org.apache.hadoop.yarn.util.ConverterUtils;
 import org.apache.hadoop.yarn.util.Times;
 import org.apache.hadoop.yarn.webapp.Controller;
 import org.apache.hadoop.yarn.webapp.ResponseInfo;
@@ -73,13 +72,14 @@ public class RmController extends Contro
     }
     ApplicationId appID = Apps.toAppID(aid);
     RMContext context = getInstance(RMContext.class);
-    RMApp app = context.getRMApps().get(appID);
-    if (app == null) {
+    RMApp rmApp = context.getRMApps().get(appID);
+    if (rmApp == null) {
       // TODO: handle redirect to jobhistory server
       setStatus(HttpServletResponse.SC_NOT_FOUND);
       setTitle("Application not found: "+ aid);
       return;
     }
+    AppInfo app = new AppInfo(rmApp, true);
 
     // Check for the authorization.
     String remoteUser = request().getRemoteUser();
@@ -98,32 +98,22 @@ public class RmController extends Contro
     }
 
     setTitle(join("Application ", aid));
-    String trackingUrl = app.getTrackingUrl();
-    boolean trackingUrlIsNotReady = trackingUrl == null
-        || trackingUrl.isEmpty() || "N/A".equalsIgnoreCase(trackingUrl);
-    String ui = trackingUrlIsNotReady ? "UNASSIGNED" :
-        (app.getFinishTime() == 0 ? "ApplicationMaster" : "History");
 
     ResponseInfo info = info("Application Overview").
       _("User:", app.getUser()).
       _("Name:", app.getName()).
-      _("State:", app.getState().toString()).
-      _("FinalStatus:", app.getFinalApplicationStatus().toString()).
+      _("State:", app.getState()).
+      _("FinalStatus:", app.getFinalStatus()).
       _("Started:", Times.format(app.getStartTime())).
       _("Elapsed:", StringUtils.formatTime(
         Times.elapsed(app.getStartTime(), app.getFinishTime()))).
-      _("Tracking URL:", trackingUrlIsNotReady ?
-        "#" : join("http://", trackingUrl), ui).
-      _("Diagnostics:", app.getDiagnostics());
-    Container masterContainer = app.getCurrentAppAttempt()
-        .getMasterContainer();
-    if (masterContainer != null) {
-      String url = join("http://", masterContainer.getNodeHttpAddress(),
-          "/node", "/containerlogs/",
-          ConverterUtils.toString(masterContainer.getId()));
-      info._("AM container logs:", url, url);
+      _("Tracking URL:", !app.isTrackingUrlReady() ?
+        "#" : app.getTrackingUrlPretty(), app.getTrackingUI()).
+      _("Diagnostics:", app.getNote());
+    if (app.amContainerLogsExist()) {
+      info._("AM container logs:", app.getAMContainerLogs(), app.getAMContainerLogs());
     } else {
-      info._("AM container logs:", "AM not yet registered with RM");
+      info._("AM container logs:", "");
     }
     render(AppPage.class);
   }

Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/AppInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/AppInfo.java?rev=1213975&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/AppInfo.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/AppInfo.java Tue Dec 13 23:05:56 2011
@@ -0,0 +1,213 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.resourcemanager.webapp.dao;
+
+import static org.apache.hadoop.yarn.util.StringHelper.join;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+import javax.xml.bind.annotation.XmlTransient;
+
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.api.records.Container;
+import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
+import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
+import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
+import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState;
+import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt;
+import org.apache.hadoop.yarn.util.ConverterUtils;
+import org.apache.hadoop.yarn.util.Times;
+
+@XmlRootElement(name = "app")
+@XmlAccessorType(XmlAccessType.FIELD)
+public class AppInfo {
+
+  @XmlTransient
+  protected String appIdNum;
+  @XmlTransient
+  protected boolean trackingUrlIsNotReady;
+  @XmlTransient
+  protected String trackingUrlPretty;
+  @XmlTransient
+  protected boolean amContainerLogsExist = false;
+  @XmlTransient
+  protected ApplicationId applicationId;
+
+  // these are ok for any user to see
+  protected String id;
+  protected String user;
+  protected String name;
+  protected String queue;
+  protected RMAppState state;
+  protected FinalApplicationStatus finalStatus;
+  protected float progress;
+  protected String trackingUI;
+  protected String trackingUrl;
+  protected String diagnostics;
+  protected long clusterId;
+
+  // these are only allowed if acls allow
+  protected long startedTime;
+  protected long finishedTime;
+  protected long elapsedTime;
+  protected String amContainerLogs;
+  protected String amHostHttpAddress;
+
+  public AppInfo() {
+  } // JAXB needs this
+
+  public AppInfo(RMApp app, Boolean hasAccess, String host) {
+    this(app, hasAccess);
+  }
+
+  public AppInfo(RMApp app, Boolean hasAccess) {
+
+    if (app != null) {
+      String trackingUrl = app.getTrackingUrl();
+      this.trackingUrlIsNotReady = trackingUrl == null || trackingUrl.isEmpty()
+          || "N/A".equalsIgnoreCase(trackingUrl);
+      this.trackingUI = this.trackingUrlIsNotReady ? "UNASSIGNED" : (app
+          .getFinishTime() == 0 ? "ApplicationMaster" : "History");
+      if (!trackingUrlIsNotReady) {
+        this.trackingUrl = join("http://", trackingUrl);
+      }
+      this.trackingUrlPretty = trackingUrlIsNotReady ? "UNASSIGNED" : join(
+          "http://", trackingUrl);
+      this.applicationId = app.getApplicationId();
+      this.appIdNum = String.valueOf(app.getApplicationId().getId());
+      this.id = app.getApplicationId().toString();
+      this.user = app.getUser().toString();
+      this.name = app.getName().toString();
+      this.queue = app.getQueue().toString();
+      this.state = app.getState();
+      this.progress = app.getProgress() * 100;
+      this.diagnostics = app.getDiagnostics().toString();
+      if (diagnostics == null || diagnostics.isEmpty()) {
+        this.diagnostics = "";
+      }
+      this.finalStatus = app.getFinalApplicationStatus();
+      this.clusterId = ResourceManager.clusterTimeStamp;
+
+      if (hasAccess) {
+        this.startedTime = app.getStartTime();
+        this.finishedTime = app.getFinishTime();
+        this.elapsedTime = Times.elapsed(app.getStartTime(),
+            app.getFinishTime());
+
+        RMAppAttempt attempt = app.getCurrentAppAttempt();
+        if (attempt != null) {
+          Container masterContainer = attempt.getMasterContainer();
+          if (masterContainer != null) {
+            this.amContainerLogsExist = true;
+            String url = join("http://", masterContainer.getNodeHttpAddress(),
+                "/node", "/containerlogs/",
+                ConverterUtils.toString(masterContainer.getId()));
+            this.amContainerLogs = url;
+            this.amHostHttpAddress = masterContainer.getNodeHttpAddress();
+          }
+        }
+      }
+    }
+  }
+
+  public boolean isTrackingUrlReady() {
+    return !this.trackingUrlIsNotReady;
+  }
+
+  public ApplicationId getApplicationId() {
+    return this.applicationId;
+  }
+
+  public String getAppId() {
+    return this.id;
+  }
+
+  public String getAppIdNum() {
+    return this.appIdNum;
+  }
+
+  public String getUser() {
+    return this.user;
+  }
+
+  public String getQueue() {
+    return this.queue;
+  }
+
+  public String getName() {
+    return this.name;
+  }
+
+  public String getState() {
+    return this.state.toString();
+  }
+
+  public float getProgress() {
+    return this.progress;
+  }
+
+  public String getTrackingUI() {
+    return this.trackingUI;
+  }
+
+  public String getNote() {
+    return this.diagnostics;
+  }
+
+  public String getFinalStatus() {
+    return this.finalStatus.toString();
+  }
+
+  public String getTrackingUrl() {
+    return this.trackingUrl;
+  }
+
+  public String getTrackingUrlPretty() {
+    return this.trackingUrlPretty;
+  }
+
+  public long getStartTime() {
+    return this.startedTime;
+  }
+
+  public long getFinishTime() {
+    return this.finishedTime;
+  }
+
+  public long getElapsedTime() {
+    return this.elapsedTime;
+  }
+
+  public String getAMContainerLogs() {
+    return this.amContainerLogs;
+  }
+
+  public String getAMHostHttpAddress() {
+    return this.amHostHttpAddress;
+  }
+
+  public boolean amContainerLogsExist() {
+    return this.amContainerLogsExist;
+  }
+
+  public long getClusterId() {
+    return this.clusterId;
+  }
+
+}

Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/AppsInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/AppsInfo.java?rev=1213975&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/AppsInfo.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/AppsInfo.java Tue Dec 13 23:05:56 2011
@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.resourcemanager.webapp.dao;
+
+import java.util.ArrayList;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+
+@XmlRootElement(name = "apps")
+@XmlAccessorType(XmlAccessType.FIELD)
+public class AppsInfo {
+
+  protected ArrayList<AppInfo> app = new ArrayList<AppInfo>();
+
+  public AppsInfo() {
+  } // JAXB needs this
+
+  public void add(AppInfo appinfo) {
+    app.add(appinfo);
+  }
+
+  public ArrayList<AppInfo> getApps() {
+    return app;
+  }
+
+}



Mime
View raw message