chukwa-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ey...@apache.org
Subject svn commit: r783442 [2/5] - in /hadoop/chukwa/trunk: ./ contrib/chukwa-pig/ ivy/ lib/ src/java/org/apache/hadoop/chukwa/database/ src/java/org/apache/hadoop/chukwa/datacollection/adaptor/ src/java/org/apache/hadoop/chukwa/rest/ src/java/org/apache/hado...
Date Wed, 10 Jun 2009 18:31:26 GMT
Added: hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/actions/DfsThroughputResource.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/actions/DfsThroughputResource.java?rev=783442&view=auto
==============================================================================
--- hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/actions/DfsThroughputResource.java (added)
+++ hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/actions/DfsThroughputResource.java Wed Jun 10 18:31:05 2009
@@ -0,0 +1,110 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.chukwa.rest.actions;
+
+import java.util.*;
+import javax.ws.rs.*;
+
+import org.apache.hadoop.chukwa.rest.objects.DfsThroughput;
+import org.apache.hadoop.chukwa.rest.services.DfsThroughputHome;
+
+@Path ("/dfs_throughput")
+@Produces("application/xml")
+public class DfsThroughputResource extends RestController {
+
+    // get one object
+    @GET
+    @Path("timestamp/{timestamp}")
+    @Produces({"application/xml","text/xml"})
+    public String getByTimestampXML( @PathParam ("timestamp") String timestamp) {
+	DfsThroughput model = DfsThroughputHome.find(timestamp);
+	return convertToXml(model);
+    }
+    
+    @GET
+    @Path("timestamp/{timestamp}")
+    @Produces("application/json")
+    public String getByTimestampJason( @PathParam ("timestamp") String timestamp) {
+	DfsThroughput model = DfsThroughputHome.find(timestamp);
+	return convertToJson(model);
+    }
+    
+    @GET
+    @Path("timestamp/{timestamp}")
+    @Produces({"text/plain","text/csv"})
+    public String getByTimestampCsv( @PathParam ("timestamp") String timestamp) {
+	DfsThroughput model = DfsThroughputHome.find(timestamp);
+	return convertToCsv(model);
+    }
+
+    // get one object timestamp + host
+    @GET
+    @Path("timestamp/{timestamp}/host/{host}")
+    @Produces({"application/xml","text/xml"})
+	public String getByTimestampHostXML( @PathParam ("timestamp") String timestamp,
+					 @PathParam ("host") String host ) {
+	DfsThroughput model = DfsThroughputHome.find(timestamp, host);
+	return convertToXml(model);
+    }
+    
+    @GET
+    @Path("timestamp/{timestamp}/host/{host}")
+    @Produces("application/json")
+	public String getByTimestampHostJason( @PathParam ("timestamp") String timestamp,
+					   @PathParam ("host") String host ) {
+	DfsThroughput model = DfsThroughputHome.find(timestamp, host);
+	return convertToJson(model);
+    }
+    
+    @GET
+    @Path("timestamp/{timestamp}/host/{host}")
+    @Produces({"text/plain","text/csv"})
+	public String getByTimestampHostCsv( @PathParam ("timestamp") String timestamp,
+					 @PathParam ("host") String host ) {
+	DfsThroughput model = DfsThroughputHome.find(timestamp, host);
+	return convertToCsv(model);
+    }
+
+    // search range 
+    @GET
+    @Path("starttime/{starttime}/endtime/{endtime}")
+    @Produces({"application/xml", "text/xml"})
+    public String getByKeysXml(@PathParam("starttime") String starttime,
+			    @PathParam("endtime") String endtime) {
+	Collection<DfsThroughput> list = DfsThroughputHome.findBetween(starttime,endtime);
+	return convertToXml(list);
+    }
+
+    @GET
+    @Path("starttime/{starttime}/endtime/{endtime}")
+    @Produces("application/json")
+    public String getByKeysJson(@PathParam("starttime") String starttime,
+			    @PathParam("endtime") String endtime) {
+	Collection<DfsThroughput> list = DfsThroughputHome.findBetween(starttime,endtime);
+	return convertToJson(list);
+    }
+    @GET
+    @Path("starttime/{starttime}/endtime/{endtime}")
+    @Produces({"text/plain", "text/csv"})
+    public String getByKeysCsv(@PathParam("starttime") String starttime,
+			    @PathParam("endtime") String endtime) {
+	Collection<DfsThroughput> list = DfsThroughputHome.findBetween(starttime,endtime);
+	return convertToCsv(list);
+    }
+}

Added: hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/actions/DiskResource.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/actions/DiskResource.java?rev=783442&view=auto
==============================================================================
--- hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/actions/DiskResource.java (added)
+++ hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/actions/DiskResource.java Wed Jun 10 18:31:05 2009
@@ -0,0 +1,113 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.chukwa.rest.actions;
+
+import java.util.*;
+import javax.ws.rs.*;
+
+import org.apache.hadoop.chukwa.rest.objects.Disk;
+import org.apache.hadoop.chukwa.rest.services.DiskHome;
+
+@Path ("/disk")
+@Produces("application/xml")
+public class DiskResource extends RestController {
+
+    // get one object
+    @GET
+    @Path("timestamp/{timestamp}")
+    @Produces({"application/xml","text/xml"})
+    public String getByTimestampXML( @PathParam ("timestamp") String timestamp) {
+	Disk model = DiskHome.find(timestamp);
+	return convertToXml(model);
+    }
+    
+    @GET
+    @Path("timestamp/{timestamp}")
+    @Produces("application/json")
+    public String getByTimestampJason( @PathParam ("timestamp") String timestamp) {
+	Disk model = DiskHome.find(timestamp);
+	return convertToJson(model);
+    }
+    
+    @GET
+    @Path("timestamp/{timestamp}")
+    @Produces({"text/plain","text/csv"})
+    public String getByTimestampCsv( @PathParam ("timestamp") String timestamp) {
+	Disk model = DiskHome.find(timestamp);
+	return convertToCsv(model);
+    }
+
+    // get one object timestamp + host + mount
+    @GET
+    @Path("timestamp/{timestamp}/host/{host}/mount/{mount}")
+    @Produces({"application/xml","text/xml"})
+	public String getByTimestampHostXML( @PathParam ("timestamp") String timestamp,
+					     @PathParam ("host") String host,
+					     @PathParam ("mount") String mount ) {
+	Disk model = DiskHome.find(timestamp, host,mount);
+	return convertToXml(model);
+    }
+    
+    @GET
+    @Path("timestamp/{timestamp}/host/{host}/mount/{mount}")
+    @Produces("application/json")
+	public String getByTimestampHostJason( @PathParam ("timestamp") String timestamp,
+					       @PathParam ("host") String host,
+					       @PathParam ("mount") String mount  ) {
+	Disk model = DiskHome.find(timestamp, host, mount);
+	return convertToJson(model);
+    }
+    
+    @GET
+    @Path("timestamp/{timestamp}/host/{host}/mount/{mount}")
+    @Produces({"text/plain","text/csv"})
+	public String getByTimestampHostCsv( @PathParam ("timestamp") String timestamp,
+					     @PathParam ("host") String host,
+					     @PathParam ("mount") String mount  ) {
+	Disk model = DiskHome.find(timestamp, host, mount);
+	return convertToCsv(model);
+    }
+
+    // search range 
+    @GET
+    @Path("starttime/{starttime}/endtime/{endtime}")
+    @Produces({"application/xml", "text/xml"})
+    public String getByKeysXml(@PathParam("starttime") String starttime,
+			    @PathParam("endtime") String endtime) {
+	Collection<Disk> list = DiskHome.findBetween(starttime,endtime);
+	return convertToXml(list);
+    }
+
+    @GET
+    @Path("starttime/{starttime}/endtime/{endtime}")
+    @Produces("application/json")
+    public String getByKeysJson(@PathParam("starttime") String starttime,
+			    @PathParam("endtime") String endtime) {
+	Collection<Disk> list = DiskHome.findBetween(starttime,endtime);
+	return convertToJson(list);
+    }
+    @GET
+    @Path("starttime/{starttime}/endtime/{endtime}")
+    @Produces({"text/plain", "text/csv"})
+    public String getByKeysCsv(@PathParam("starttime") String starttime,
+			    @PathParam("endtime") String endtime) {
+	Collection<Disk> list = DiskHome.findBetween(starttime,endtime);
+	return convertToCsv(list);
+    }
+}

Added: hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/actions/HadoopJvmResource.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/actions/HadoopJvmResource.java?rev=783442&view=auto
==============================================================================
--- hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/actions/HadoopJvmResource.java (added)
+++ hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/actions/HadoopJvmResource.java Wed Jun 10 18:31:05 2009
@@ -0,0 +1,113 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.chukwa.rest.actions;
+
+import java.util.*;
+import javax.ws.rs.*;
+
+import org.apache.hadoop.chukwa.rest.objects.HadoopJvm;
+import org.apache.hadoop.chukwa.rest.services.HadoopJvmHome;
+
+@Path ("/hadoop_jvm")
+@Produces("application/xml")
+public class HadoopJvmResource extends RestController {
+
+    // get one object
+    @GET
+    @Path("timestamp/{timestamp}")
+    @Produces({"application/xml","text/xml"})
+    public String getByTimestampXML( @PathParam ("timestamp") String timestamp) {
+	HadoopJvm model = HadoopJvmHome.find(timestamp);
+	return convertToXml(model);
+    }
+    
+    @GET
+    @Path("timestamp/{timestamp}")
+    @Produces("application/json")
+    public String getByTimestampJason( @PathParam ("timestamp") String timestamp) {
+	HadoopJvm model = HadoopJvmHome.find(timestamp);
+	return convertToJson(model);
+    }
+    
+    @GET
+    @Path("timestamp/{timestamp}")
+    @Produces({"text/plain","text/csv"})
+    public String getByTimestampCsv( @PathParam ("timestamp") String timestamp) {
+	HadoopJvm model = HadoopJvmHome.find(timestamp);
+	return convertToCsv(model);
+    }
+
+    // get one object timestamp + host + mount
+    @GET
+    @Path("timestamp/{timestamp}/host/{host}/process_name/{process_name}")
+    @Produces({"application/xml","text/xml"})
+	public String getByTimestampHostXML( @PathParam ("timestamp") String timestamp,
+					     @PathParam ("host") String host,
+					     @PathParam ("process_name") String process_name ) {
+	HadoopJvm model = HadoopJvmHome.find(timestamp, host, process_name);
+	return convertToXml(model);
+    }
+    
+    @GET
+    @Path("timestamp/{timestamp}/host/{host}/process_name/{process_name}")
+    @Produces("application/json")
+	public String getByTimestampHostJason( @PathParam ("timestamp") String timestamp,
+					       @PathParam ("host") String host,
+					       @PathParam ("process_name") String process_name  ) {
+	HadoopJvm model = HadoopJvmHome.find(timestamp, host, process_name);
+	return convertToJson(model);
+    }
+    
+    @GET
+    @Path("timestamp/{timestamp}/host/{host}/process_name/{process_name}")
+    @Produces({"text/plain","text/csv"})
+	public String getByTimestampHostCsv( @PathParam ("timestamp") String timestamp,
+					     @PathParam ("host") String host,
+					     @PathParam ("process_name") String process_name  ) {
+	HadoopJvm model = HadoopJvmHome.find(timestamp, host, process_name);
+	return convertToCsv(model);
+    }
+
+    // search range 
+    @GET
+    @Path("starttime/{starttime}/endtime/{endtime}")
+    @Produces({"application/xml", "text/xml"})
+    public String getByKeysXml(@PathParam("starttime") String starttime,
+			    @PathParam("endtime") String endtime) {
+	Collection<HadoopJvm> list = HadoopJvmHome.findBetween(starttime,endtime);
+	return convertToXml(list);
+    }
+
+    @GET
+    @Path("starttime/{starttime}/endtime/{endtime}")
+    @Produces("application/json")
+    public String getByKeysJson(@PathParam("starttime") String starttime,
+			    @PathParam("endtime") String endtime) {
+	Collection<HadoopJvm> list = HadoopJvmHome.findBetween(starttime,endtime);
+	return convertToJson(list);
+    }
+    @GET
+    @Path("starttime/{starttime}/endtime/{endtime}")
+    @Produces({"text/plain", "text/csv"})
+    public String getByKeysCsv(@PathParam("starttime") String starttime,
+			    @PathParam("endtime") String endtime) {
+	Collection<HadoopJvm> list = HadoopJvmHome.findBetween(starttime,endtime);
+	return convertToCsv(list);
+    }
+}

Added: hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/actions/HadoopRpcResource.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/actions/HadoopRpcResource.java?rev=783442&view=auto
==============================================================================
--- hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/actions/HadoopRpcResource.java (added)
+++ hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/actions/HadoopRpcResource.java Wed Jun 10 18:31:05 2009
@@ -0,0 +1,110 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.chukwa.rest.actions;
+
+import java.util.*;
+import javax.ws.rs.*;
+
+import org.apache.hadoop.chukwa.rest.objects.HadoopRpc;
+import org.apache.hadoop.chukwa.rest.services.HadoopRpcHome;
+
+@Path ("/hadoop_rpc")
+@Produces("application/xml")
+public class HadoopRpcResource extends RestController {
+
+    // get one object
+    @GET
+    @Path("timestamp/{timestamp}")
+    @Produces({"application/xml","text/xml"})
+    public String getByTimestampXML( @PathParam ("timestamp") String timestamp) {
+	HadoopRpc model = HadoopRpcHome.find(timestamp);
+	return convertToXml(model);
+    }
+    
+    @GET
+    @Path("timestamp/{timestamp}")
+    @Produces("application/json")
+    public String getByTimestampJason( @PathParam ("timestamp") String timestamp) {
+	HadoopRpc model = HadoopRpcHome.find(timestamp);
+	return convertToJson(model);
+    }
+    
+    @GET
+    @Path("timestamp/{timestamp}")
+    @Produces({"text/plain","text/csv"})
+    public String getByTimestampCsv( @PathParam ("timestamp") String timestamp) {
+	HadoopRpc model = HadoopRpcHome.find(timestamp);
+	return convertToCsv(model);
+    }
+
+    // get one object timestamp + host
+    @GET
+    @Path("timestamp/{timestamp}/host/{host}")
+    @Produces({"application/xml","text/xml"})
+	public String getByTimestampHostXML( @PathParam ("timestamp") String timestamp,
+					 @PathParam ("host") String host ) {
+	HadoopRpc model = HadoopRpcHome.find(timestamp, host);
+	return convertToXml(model);
+    }
+    
+    @GET
+    @Path("timestamp/{timestamp}/host/{host}")
+    @Produces("application/json")
+	public String getByTimestampHostJason( @PathParam ("timestamp") String timestamp,
+					   @PathParam ("host") String host ) {
+	HadoopRpc model = HadoopRpcHome.find(timestamp, host);
+	return convertToJson(model);
+    }
+    
+    @GET
+    @Path("timestamp/{timestamp}/host/{host}")
+    @Produces({"text/plain","text/csv"})
+	public String getByTimestampHostCsv( @PathParam ("timestamp") String timestamp,
+					 @PathParam ("host") String host ) {
+	HadoopRpc model = HadoopRpcHome.find(timestamp, host);
+	return convertToCsv(model);
+    }
+
+    // search range 
+    @GET
+    @Path("starttime/{starttime}/endtime/{endtime}")
+    @Produces({"application/xml", "text/xml"})
+    public String getByKeysXml(@PathParam("starttime") String starttime,
+			    @PathParam("endtime") String endtime) {
+	Collection<HadoopRpc> list = HadoopRpcHome.findBetween(starttime,endtime);
+	return convertToXml(list);
+    }
+
+    @GET
+    @Path("starttime/{starttime}/endtime/{endtime}")
+    @Produces("application/json")
+    public String getByKeysJson(@PathParam("starttime") String starttime,
+			    @PathParam("endtime") String endtime) {
+	Collection<HadoopRpc> list = HadoopRpcHome.findBetween(starttime,endtime);
+	return convertToJson(list);
+    }
+    @GET
+    @Path("starttime/{starttime}/endtime/{endtime}")
+    @Produces({"text/plain", "text/csv"})
+    public String getByKeysCsv(@PathParam("starttime") String starttime,
+			    @PathParam("endtime") String endtime) {
+	Collection<HadoopRpc> list = HadoopRpcHome.findBetween(starttime,endtime);
+	return convertToCsv(list);
+    }
+}

Added: hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/actions/JsonDateValueProcessor.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/actions/JsonDateValueProcessor.java?rev=783442&view=auto
==============================================================================
--- hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/actions/JsonDateValueProcessor.java (added)
+++ hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/actions/JsonDateValueProcessor.java Wed Jun 10 18:31:05 2009
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.chukwa.rest.actions;
+
+import java.util.Date;
+import java.text.*;
+import java.sql.Timestamp;
+import net.sf.json.JSONObject;
+import net.sf.json.JsonConfig;
+import net.sf.json.processors.JsonValueProcessor;
+import org.apache.commons.logging.*;
+
+public class JsonDateValueProcessor implements JsonValueProcessor {
+    private static String OUTPUT_FORMAT="yyyy-MM-dd HH:mm:ss";
+    protected static Log log = LogFactory.getLog(JsonDateValueProcessor.class);
+
+    public Object processArrayValue(Object value, JsonConfig jsonConfig) {
+	return process(value);
+    }
+    
+    public Object processObjectValue(String key, Object value,
+				     JsonConfig jsonConfig) {
+	return process(value);
+    }
+    
+    private Object process(Object value) {
+	DateFormat dateFormat = new SimpleDateFormat(OUTPUT_FORMAT);
+	if (value == null) {
+	    return "";
+	}
+	if(value instanceof Timestamp)
+	    return dateFormat.format((Timestamp) value);
+	else 
+	    return dateFormat.format((Date) value);
+    }
+}
+
+

Added: hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/actions/MrJobConfResource.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/actions/MrJobConfResource.java?rev=783442&view=auto
==============================================================================
--- hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/actions/MrJobConfResource.java (added)
+++ hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/actions/MrJobConfResource.java Wed Jun 10 18:31:05 2009
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.chukwa.rest.actions;
+
+import java.util.*;
+import javax.ws.rs.*;
+
+import org.apache.hadoop.chukwa.rest.objects.MrJobConf;
+import org.apache.hadoop.chukwa.rest.services.MrJobConfHome;
+
+@Path ("/mr_job_conf")
+@Produces("application/xml")
+public class MrJobConfResource extends RestController {
+
+    // get one object
+    @GET
+    @Path("job_id/{job_id}")
+    @Produces({"application/xml","text/xml"})
+    public String getByJob_IdXML( @PathParam ("job_id") String job_id) {
+	MrJobConf model = MrJobConfHome.find(job_id);
+	return convertToXml(model);
+    }
+    
+    @GET
+    @Path("job_id/{job_id}")
+    @Produces("application/json")
+    public String getByJob_IdJason( @PathParam ("job_id") String job_id) {
+	MrJobConf model = MrJobConfHome.find(job_id);
+	return convertToJson(model);
+    }
+    
+    @GET
+    @Path("job_id/{job_id}")
+    @Produces({"text/plain","text/csv"})
+    public String getByJob_IdCsv( @PathParam ("job_id") String job_id) {
+	MrJobConf model = MrJobConfHome.find(job_id);
+	return convertToCsv(model);
+    }
+}

Added: hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/actions/MrJobResource.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/actions/MrJobResource.java?rev=783442&view=auto
==============================================================================
--- hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/actions/MrJobResource.java (added)
+++ hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/actions/MrJobResource.java Wed Jun 10 18:31:05 2009
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.chukwa.rest.actions;
+
+import java.util.*;
+import javax.ws.rs.*;
+
+import org.apache.hadoop.chukwa.rest.objects.MrJob;
+import org.apache.hadoop.chukwa.rest.services.MrJobHome;
+
+@Path ("/mr_job")
+@Produces("application/xml")
+public class MrJobResource extends RestController {
+
+    // get one object
+    @GET
+    @Path("job_id/{job_id}")
+    @Produces({"application/xml","text/xml"})
+    public String getByJob_IdXML( @PathParam ("job_id") String job_id) {
+	MrJob model = MrJobHome.find(job_id);
+	return convertToXml(model);
+    }
+    
+    @GET
+    @Path("job_id/{job_id}")
+    @Produces("application/json")
+    public String getByJob_IdJason( @PathParam ("job_id") String job_id) {
+	MrJob model = MrJobHome.find(job_id);
+	return convertToJson(model);
+    }
+    
+    @GET
+    @Path("job_id/{job_id}")
+    @Produces({"text/plain","text/csv"})
+    public String getByJob_IdCsv( @PathParam ("job_id") String job_id) {
+	MrJob model = MrJobHome.find(job_id);
+	return convertToCsv(model);
+    }
+}

Added: hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/actions/MrTaskResource.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/actions/MrTaskResource.java?rev=783442&view=auto
==============================================================================
--- hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/actions/MrTaskResource.java (added)
+++ hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/actions/MrTaskResource.java Wed Jun 10 18:31:05 2009
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.chukwa.rest.actions;
+
+import java.util.*;
+import javax.ws.rs.*;
+
+import org.apache.hadoop.chukwa.rest.objects.MrTask;
+import org.apache.hadoop.chukwa.rest.services.MrTaskHome;
+
+@Path ("/mr_task")
+@Produces("application/xml")
+public class MrTaskResource extends RestController {
+
+    // get one object
+    @GET
+    @Path("task_id/{task_id}")
+    @Produces({"application/xml","text/xml"})
+    public String getByTask_IdXML( @PathParam ("task_id") String task_id) {
+	MrTask model = MrTaskHome.find(task_id);
+	return convertToXml(model);
+    }
+    
+    @GET
+    @Path("task_id/{task_id}")
+    @Produces("application/json")
+    public String getByTask_IdJason( @PathParam ("task_id") String task_id) {
+	MrTask model = MrTaskHome.find(task_id);
+	return convertToJson(model);
+    }
+    
+    @GET
+    @Path("task_id/{task_id}")
+    @Produces({"text/plain","text/csv"})
+    public String getByTask_IdCsv( @PathParam ("task_id") String task_id) {
+	MrTask model = MrTaskHome.find(task_id);
+	return convertToCsv(model);
+    }
+}

Added: hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/actions/RestController.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/actions/RestController.java?rev=783442&view=auto
==============================================================================
--- hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/actions/RestController.java (added)
+++ hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/actions/RestController.java Wed Jun 10 18:31:05 2009
@@ -0,0 +1,216 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.chukwa.rest.actions;
+
+import java.util.*;
+import org.apache.commons.logging.*;
+import org.apache.commons.lang.time.*;
+import java.lang.reflect.*;
+import net.sf.json.*;
+import net.sf.json.processors.JsonValueProcessor;
+
+public class RestController  {
+    protected static Log log = LogFactory.getLog(RestController.class);
+
+    private static String convertObjectToXml(Object obj) {
+	StringBuilder s=new StringBuilder();
+	s.append("<item>");
+	try {
+            Class cls = obj.getClass();
+        
+            Field fieldlist[] 
+		= cls.getDeclaredFields();
+            for (int i = 0; i < fieldlist.length; i++) {
+		Field fld = fieldlist[i];
+		String fldName = fld.getName();
+		String functionName = "get"+ fldName.substring(0,1).toUpperCase() + fldName.substring(1);
+		String value = "";
+                @SuppressWarnings("unchecked")
+		Method meth=cls.getMethod(functionName);
+		Object oret = meth.invoke(obj);
+		if (oret == null) {
+		    value="";
+		} else if ((oret instanceof Date) || (oret instanceof java.sql.Timestamp)) {
+		    
+		    java.sql.Timestamp d = (java.sql.Timestamp) oret;
+		    
+		    long time = d.getTime();
+		    String date = DateFormatUtils.format(time, "yyyy-MM-dd HH:mm:ss");
+		    value = date;
+		} else {
+		    value = oret.toString();
+		}
+
+		s.append("<"+fldName+">"+value+"</"+fldName+">");
+            }
+	    s.append("\n");
+	}
+	catch (Throwable e) {
+	    System.err.println(e);
+	}
+	s.append("</item>");
+	return s.toString();
+    }
+
+    private static String getObjectFields(Object obj) {
+	StringBuilder s=new StringBuilder();
+	try {
+            Class cls = obj.getClass();
+
+            Method methlist[] 
+		= cls.getDeclaredMethods();
+	    int count=0;
+            for (int i = 0; i < methlist.length;
+		 i++) {  
+		Method m = methlist[i];
+		if (m.getName().startsWith("get")) {
+		    String name=m.getName().substring(3);
+		    if (count!=0) {
+			s.append(",");
+		    }	
+		    count+=1;
+		    s.append("\""+name+"\"");
+		}
+            }
+	    s.append("\n");
+	}
+	catch (Throwable e) {
+	    System.err.println(e);
+	}
+	return s.toString();
+
+    }
+
+    private static String getObjectValues(Object obj) {
+	StringBuilder s=new StringBuilder();
+        try {
+	    Class cls = obj.getClass();
+        
+            Method methlist[] 
+		= cls.getDeclaredMethods();
+	    int count=0;
+            for (int i = 0; i < methlist.length;
+		 i++) {  
+		Method m = methlist[i];
+		if (m.getName().startsWith("get")) {
+		    String name=m.getName();
+                    @SuppressWarnings("unchecked")
+		    Method meth=cls.getMethod(name);
+		    Object oret = meth.invoke(obj);
+		    if (count!=0) {
+			s.append(",");
+		    }
+		    count+=1;
+		    if (oret == null) {
+			s.append("\"\"");
+		    } else if ((oret instanceof Date) || (oret instanceof java.sql.Timestamp)) {
+			long time=0;
+			if (oret instanceof Date) {
+			    Date d = (Date) oret;
+			    time = d.getTime();
+			} else if (oret instanceof java.sql.Timestamp) {
+			    java.sql.Timestamp d = (java.sql.Timestamp) oret;
+			    time = d.getTime();
+			}
+
+                        String date = DateFormatUtils.format(time, "yyyy-MM-dd HH:mm:ss");
+			s.append("\""+date+"\"");
+		    } else {
+			s.append("\""+oret.toString()+"\"");
+		    }
+		}
+	    }
+	    s.append("\n");
+	}
+	catch (Throwable e) {
+            System.err.println(e);
+	}
+	return s.toString();
+    }
+
+    
+    protected static String convertToJson(Object obj) {
+	String str="";
+
+        JsonConfig config = new JsonConfig();
+	config.registerJsonValueProcessor(Date.class,new JsonDateValueProcessor());
+	config.registerJsonValueProcessor(java.sql.Timestamp.class,new JsonDateValueProcessor()); 
+
+        if (obj != null) {
+            if (isArray(obj)) {
+
+                JSONArray jsonArray = JSONArray.fromObject(obj, config);
+                str=jsonArray.toString();
+            } else {
+
+                JSONObject jsonObject = JSONObject.fromObject(obj, config);
+                str=jsonObject.toString();
+            }
+        }
+
+	return str;
+    }
+
+    protected static String convertToXml(Object obj) {
+	StringBuilder s=new StringBuilder();
+	s.append("<items>");
+	if ( obj != null) {
+	    if (isArray(obj)) {
+		Iterator iterator = ((Collection)obj).iterator();
+		while (iterator.hasNext()) {
+		    Object element = iterator.next();
+		    s.append(convertObjectToXml(element));
+		}
+	    } else {
+		s.append(convertObjectToXml(obj));
+	    }
+	}
+	s.append("</items>");
+	return s.toString();
+    }
+
+    protected static String convertToCsv(Object obj) {
+	StringBuilder str=new StringBuilder();
+	if ( obj != null) {
+	    if (isArray(obj)) {
+		boolean first=true;
+		Iterator iterator = ((Collection)obj).iterator();
+		while (iterator.hasNext()) {
+		    Object element = iterator.next();
+		    if (first) {
+			first=false;
+			str.append(getObjectFields(element));
+		    }
+		    str.append(getObjectValues(element));
+		}
+	    } else {
+		str.append(getObjectFields(obj));
+		str.append(getObjectValues(obj));
+	    }
+	}
+	return str.toString();
+    }
+
+    private static boolean isArray(Object obj) {
+        return obj instanceof Collection || obj.getClass().isArray();
+    }
+
+
+    
+}

Added: hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/actions/SystemMetricsResource.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/actions/SystemMetricsResource.java?rev=783442&view=auto
==============================================================================
--- hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/actions/SystemMetricsResource.java (added)
+++ hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/actions/SystemMetricsResource.java Wed Jun 10 18:31:05 2009
@@ -0,0 +1,110 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.chukwa.rest.actions;
+
+import java.util.*;
+import javax.ws.rs.*;
+
+import org.apache.hadoop.chukwa.rest.objects.SystemMetrics;
+import org.apache.hadoop.chukwa.rest.services.SystemMetricsHome;
+
+@Path ("/system_metrics")
+@Produces("application/xml")
+public class SystemMetricsResource extends RestController {
+
+    // get one object
+    @GET
+    @Path("timestamp/{timestamp}")
+    @Produces({"application/xml","text/xml"})
+    public String getByTimestampXML( @PathParam ("timestamp") String timestamp) {
+	SystemMetrics model = SystemMetricsHome.find(timestamp);
+	return convertToXml(model);
+    }
+    
+    @GET
+    @Path("timestamp/{timestamp}")
+    @Produces("application/json")
+    public String getByTimestampJason( @PathParam ("timestamp") String timestamp) {
+	SystemMetrics model = SystemMetricsHome.find(timestamp);
+	return convertToJson(model);
+    }
+    
+    @GET
+    @Path("timestamp/{timestamp}")
+    @Produces({"text/plain","text/csv"})
+    public String getByTimestampCsv( @PathParam ("timestamp") String timestamp) {
+	SystemMetrics model = SystemMetricsHome.find(timestamp);
+	return convertToCsv(model);
+    }
+
+    // get one object timestamp + host
+    @GET
+    @Path("timestamp/{timestamp}/host/{host}")
+    @Produces({"application/xml","text/xml"})
+	public String getByTimestampHostXML( @PathParam ("timestamp") String timestamp,
+					 @PathParam ("host") String host ) {
+	SystemMetrics model = SystemMetricsHome.find(timestamp, host);
+	return convertToXml(model);
+    }
+    
+    @GET
+    @Path("timestamp/{timestamp}/host/{host}")
+    @Produces("application/json")
+	public String getByTimestampHostJason( @PathParam ("timestamp") String timestamp,
+					   @PathParam ("host") String host ) {
+	SystemMetrics model = SystemMetricsHome.find(timestamp, host);
+	return convertToJson(model);
+    }
+    
+    @GET
+    @Path("timestamp/{timestamp}/host/{host}")
+    @Produces({"text/plain","text/csv"})
+	public String getByTimestampHostCsv( @PathParam ("timestamp") String timestamp,
+					 @PathParam ("host") String host ) {
+	SystemMetrics model = SystemMetricsHome.find(timestamp, host);
+	return convertToCsv(model);
+    }
+
+    // search range 
+    @GET
+    @Path("starttime/{starttime}/endtime/{endtime}")
+    @Produces({"application/xml", "text/xml"})
+    public String getByKeysXml(@PathParam("starttime") String starttime,
+			    @PathParam("endtime") String endtime) {
+	Collection<SystemMetrics> list = SystemMetricsHome.findBetween(starttime,endtime);
+	return convertToXml(list);
+    }
+
+    @GET
+    @Path("starttime/{starttime}/endtime/{endtime}")
+    @Produces("application/json")
+    public String getByKeysJson(@PathParam("starttime") String starttime,
+			    @PathParam("endtime") String endtime) {
+	Collection<SystemMetrics> list = SystemMetricsHome.findBetween(starttime,endtime);
+	return convertToJson(list);
+    }
+    @GET
+    @Path("starttime/{starttime}/endtime/{endtime}")
+    @Produces({"text/plain", "text/csv"})
+    public String getByKeysCsv(@PathParam("starttime") String starttime,
+			    @PathParam("endtime") String endtime) {
+	Collection<SystemMetrics> list = SystemMetricsHome.findBetween(starttime,endtime);
+	return convertToCsv(list);
+    }
+}

Added: hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/objects/ClientTrace.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/objects/ClientTrace.java?rev=783442&view=auto
==============================================================================
--- hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/objects/ClientTrace.java (added)
+++ hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/objects/ClientTrace.java Wed Jun 10 18:31:05 2009
@@ -0,0 +1,146 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.chukwa.rest.objects;
+
+// Generated May 28, 2009 3:39:53 PM by Hibernate Tools 3.2.4.GA
+import java.sql.Timestamp;
+
+/**
+ * ClientTrace generated by hbm2java
+ */
+public class ClientTrace implements java.io.Serializable {
+
+    static final long serialVersionUID = 4644588139290113067L;
+
+    private Timestamp timestamp;
+    private Double localHdfsRead;
+    private Double intraRackHdfsRead;
+    private Double interRackHdfsRead;
+    private Double localHdfsWrite;
+    private Double intraRackHdfsWrite;
+    private Double interRackHdfsWrite;
+    private Double localMapredShuffle;
+    private Double intraRackMapredShuffle;
+    private Double interRackMapredShuffle;
+
+    public ClientTrace() {
+    }
+
+    public ClientTrace(Timestamp timestamp) {
+	this.timestamp = timestamp;
+    }
+
+    public ClientTrace(Timestamp timestamp, Double localHdfsRead,
+		       Double intraRackHdfsRead, Double interRackHdfsRead,
+		       Double localHdfsWrite, Double intraRackHdfsWrite,
+		       Double interRackHdfsWrite, Double localMapredShuffle,
+		       Double intraRackMapredShuffle, Double interRackMapredShuffle) {
+	this.timestamp = timestamp;
+	this.localHdfsRead = localHdfsRead;
+	this.intraRackHdfsRead = intraRackHdfsRead;
+	this.interRackHdfsRead = interRackHdfsRead;
+	this.localHdfsWrite = localHdfsWrite;
+	this.intraRackHdfsWrite = intraRackHdfsWrite;
+	this.interRackHdfsWrite = interRackHdfsWrite;
+	this.localMapredShuffle = localMapredShuffle;
+	this.intraRackMapredShuffle = intraRackMapredShuffle;
+	this.interRackMapredShuffle = interRackMapredShuffle;
+    }
+
+    public Timestamp getTimestamp() {
+	return this.timestamp;
+    }
+
+    public void setTimestamp(Timestamp timestamp) {
+	this.timestamp = timestamp;
+    }
+
+    public Double getLocalHdfsRead() {
+	return this.localHdfsRead;
+    }
+
+    public void setLocalHdfsRead(Double localHdfsRead) {
+	this.localHdfsRead = localHdfsRead;
+    }
+
+    public Double getIntraRackHdfsRead() {
+	return this.intraRackHdfsRead;
+    }
+
+    public void setIntraRackHdfsRead(Double intraRackHdfsRead) {
+	this.intraRackHdfsRead = intraRackHdfsRead;
+    }
+
+    public Double getInterRackHdfsRead() {
+	return this.interRackHdfsRead;
+    }
+
+    public void setInterRackHdfsRead(Double interRackHdfsRead) {
+	this.interRackHdfsRead = interRackHdfsRead;
+    }
+
+    public Double getLocalHdfsWrite() {
+	return this.localHdfsWrite;
+    }
+
+    public void setLocalHdfsWrite(Double localHdfsWrite) {
+	this.localHdfsWrite = localHdfsWrite;
+    }
+
+    public Double getIntraRackHdfsWrite() {
+	return this.intraRackHdfsWrite;
+    }
+
+    public void setIntraRackHdfsWrite(Double intraRackHdfsWrite) {
+	this.intraRackHdfsWrite = intraRackHdfsWrite;
+    }
+
+    public Double getInterRackHdfsWrite() {
+	return this.interRackHdfsWrite;
+    }
+
+    public void setInterRackHdfsWrite(Double interRackHdfsWrite) {
+	this.interRackHdfsWrite = interRackHdfsWrite;
+    }
+
+    public Double getLocalMapredShuffle() {
+	return this.localMapredShuffle;
+    }
+
+    public void setLocalMapredShuffle(Double localMapredShuffle) {
+	this.localMapredShuffle = localMapredShuffle;
+    }
+
+    public Double getIntraRackMapredShuffle() {
+	return this.intraRackMapredShuffle;
+    }
+
+    public void setIntraRackMapredShuffle(Double intraRackMapredShuffle) {
+	this.intraRackMapredShuffle = intraRackMapredShuffle;
+    }
+
+    public Double getInterRackMapredShuffle() {
+	return this.interRackMapredShuffle;
+    }
+
+    public void setInterRackMapredShuffle(Double interRackMapredShuffle) {
+	this.interRackMapredShuffle = interRackMapredShuffle;
+    }
+
+}

Added: hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/objects/ClusterDisk.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/objects/ClusterDisk.java?rev=783442&view=auto
==============================================================================
--- hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/objects/ClusterDisk.java (added)
+++ hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/objects/ClusterDisk.java Wed Jun 10 18:31:05 2009
@@ -0,0 +1,94 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.chukwa.rest.objects;
+
+// Generated May 28, 2009 3:39:53 PM by Hibernate Tools 3.2.4.GA
+import java.sql.Timestamp;
+
+/**
+ * ClusterDisk generated by hbm2java
+ */
+public class ClusterDisk implements java.io.Serializable {
+
+    static final long serialVersionUID = -3170394679051640836L;
+
+    private Timestamp timestamp;
+    private String mount;
+    private Double used;
+    private Double available;
+    private Double usedPercent;
+
+    public ClusterDisk() {
+    }
+
+    public ClusterDisk(Timestamp timestamp, String mount) {
+	this.timestamp=timestamp;
+	this.mount = mount;
+    }
+
+    public ClusterDisk(Timestamp timestamp, String mount, Double used,
+		       Double available, Double usedPercent) {
+	this.timestamp=timestamp;
+	this.mount = mount;
+	this.used = used;
+	this.available = available;
+	this.usedPercent = usedPercent;
+    }
+
+    public Timestamp getTimestamp() {
+	return this.timestamp;
+    }
+
+    public void setTime(Timestamp timestamp) {
+	this.timestamp=timestamp;
+    }
+
+    public String getMount() {
+	return this.mount;
+    }
+
+    public void setMount(String mount) {
+	this.mount=mount;
+    }
+
+    public Double getUsed() {
+	return this.used;
+    }
+
+    public void setUsed(Double used) {
+	this.used = used;
+    }
+
+    public Double getAvailable() {
+	return this.available;
+    }
+
+    public void setAvailable(Double available) {
+	this.available = available;
+    }
+
+    public Double getUsedPercent() {
+	return this.usedPercent;
+    }
+
+    public void setUsedPercent(Double usedPercent) {
+	this.usedPercent = usedPercent;
+    }
+
+}

Added: hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/objects/ClusterHadoopRpc.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/objects/ClusterHadoopRpc.java?rev=783442&view=auto
==============================================================================
--- hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/objects/ClusterHadoopRpc.java (added)
+++ hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/objects/ClusterHadoopRpc.java Wed Jun 10 18:31:05 2009
@@ -0,0 +1,360 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.chukwa.rest.objects;
+
+// Generated May 28, 2009 3:39:53 PM by Hibernate Tools 3.2.4.GA
+
+import java.sql.Timestamp;
+
+/**
+ * ClusterHadoopRpc generated by hbm2java
+ */
+public class ClusterHadoopRpc implements java.io.Serializable {
+
+    static final long serialVersionUID = -5101110703210389623L;
+
+    private Timestamp timestamp;
+    private Integer host;
+    private Double rpcProcessingTimeAvgTime;
+    private Double rpcProcessingTimeNumOps;
+    private Double rpcQueueTimeAvgTime;
+    private Double rpcQueueTimeNumOps;
+    private Double getBuildVersionAvgTime;
+    private Double getBuildVersionNumOps;
+    private Double getJobCountersAvgTime;
+    private Double getJobCountersNumOps;
+    private Double getJobProfileAvgTime;
+    private Double getJobProfileNumOps;
+    private Double getJobStatusAvgTime;
+    private Double getJobStatusNumOps;
+    private Double getNewJobIdAvgTime;
+    private Double getNewJobIdNumOps;
+    private Double getProtocolVersionAvgTime;
+    private Double getProtocolVersionNumOps;
+    private Double getSystemDirAvgTime;
+    private Double getSystemDirNumOps;
+    private Double getTaskCompletionEventsAvgTime;
+    private Double getTaskCompletionEventsNumOps;
+    private Double getTaskDiagnosticsAvgTime;
+    private Double getTaskDiagnosticsNumOps;
+    private Double heartbeatAvgTime;
+    private Double heartbeatNumOps;
+    private Double killJobAvgTime;
+    private Double killJobNumOps;
+    private Double submitJobAvgTime;
+    private Double submitJobNumOps;
+
+    public ClusterHadoopRpc() {
+    }
+
+    public ClusterHadoopRpc(Timestamp timestamp) {
+	this.timestamp = timestamp;
+    }
+
+    public ClusterHadoopRpc(Timestamp timestamp, Integer host,
+				    Double rpcProcessingTimeAvgTime, Double rpcProcessingTimeNumOps,
+				    Double rpcQueueTimeAvgTime, Double rpcQueueTimeNumOps,
+				    Double getBuildVersionAvgTime, Double getBuildVersionNumOps,
+				    Double getJobCountersAvgTime, Double getJobCountersNumOps,
+				    Double getJobProfileAvgTime, Double getJobProfileNumOps,
+				    Double getJobStatusAvgTime, Double getJobStatusNumOps,
+				    Double getNewJobIdAvgTime, Double getNewJobIdNumOps,
+				    Double getProtocolVersionAvgTime, Double getProtocolVersionNumOps,
+				    Double getSystemDirAvgTime, Double getSystemDirNumOps,
+				    Double getTaskCompletionEventsAvgTime,
+				    Double getTaskCompletionEventsNumOps,
+				    Double getTaskDiagnosticsAvgTime, Double getTaskDiagnosticsNumOps,
+				    Double heartbeatAvgTime, Double heartbeatNumOps,
+				    Double killJobAvgTime, Double killJobNumOps,
+				    Double submitJobAvgTime, Double submitJobNumOps) {
+	this.timestamp = timestamp;
+	this.host = host;
+	this.rpcProcessingTimeAvgTime = rpcProcessingTimeAvgTime;
+	this.rpcProcessingTimeNumOps = rpcProcessingTimeNumOps;
+	this.rpcQueueTimeAvgTime = rpcQueueTimeAvgTime;
+	this.rpcQueueTimeNumOps = rpcQueueTimeNumOps;
+	this.getBuildVersionAvgTime = getBuildVersionAvgTime;
+	this.getBuildVersionNumOps = getBuildVersionNumOps;
+	this.getJobCountersAvgTime = getJobCountersAvgTime;
+	this.getJobCountersNumOps = getJobCountersNumOps;
+	this.getJobProfileAvgTime = getJobProfileAvgTime;
+	this.getJobProfileNumOps = getJobProfileNumOps;
+	this.getJobStatusAvgTime = getJobStatusAvgTime;
+	this.getJobStatusNumOps = getJobStatusNumOps;
+	this.getNewJobIdAvgTime = getNewJobIdAvgTime;
+	this.getNewJobIdNumOps = getNewJobIdNumOps;
+	this.getProtocolVersionAvgTime = getProtocolVersionAvgTime;
+	this.getProtocolVersionNumOps = getProtocolVersionNumOps;
+	this.getSystemDirAvgTime = getSystemDirAvgTime;
+	this.getSystemDirNumOps = getSystemDirNumOps;
+	this.getTaskCompletionEventsAvgTime = getTaskCompletionEventsAvgTime;
+	this.getTaskCompletionEventsNumOps = getTaskCompletionEventsNumOps;
+	this.getTaskDiagnosticsAvgTime = getTaskDiagnosticsAvgTime;
+	this.getTaskDiagnosticsNumOps = getTaskDiagnosticsNumOps;
+	this.heartbeatAvgTime = heartbeatAvgTime;
+	this.heartbeatNumOps = heartbeatNumOps;
+	this.killJobAvgTime = killJobAvgTime;
+	this.killJobNumOps = killJobNumOps;
+	this.submitJobAvgTime = submitJobAvgTime;
+	this.submitJobNumOps = submitJobNumOps;
+    }
+
+    public Timestamp getTimestamp() {
+	return this.timestamp;
+    }
+
+    public void setTimestamp(Timestamp timestamp) {
+	this.timestamp = timestamp;
+    }
+
+    public Integer getHost() {
+	return this.host;
+    }
+
+    public void setHost(Integer host) {
+	this.host = host;
+    }
+
+    public Double getRpcProcessingTimeAvgTime() {
+	return this.rpcProcessingTimeAvgTime;
+    }
+
+    public void setRpcProcessingTimeAvgTime(Double rpcProcessingTimeAvgTime) {
+	this.rpcProcessingTimeAvgTime = rpcProcessingTimeAvgTime;
+    }
+
+    public Double getRpcProcessingTimeNumOps() {
+	return this.rpcProcessingTimeNumOps;
+    }
+
+    public void setRpcProcessingTimeNumOps(Double rpcProcessingTimeNumOps) {
+	this.rpcProcessingTimeNumOps = rpcProcessingTimeNumOps;
+    }
+
+    public Double getRpcQueueTimeAvgTime() {
+	return this.rpcQueueTimeAvgTime;
+    }
+
+    public void setRpcQueueTimeAvgTime(Double rpcQueueTimeAvgTime) {
+	this.rpcQueueTimeAvgTime = rpcQueueTimeAvgTime;
+    }
+
+    public Double getRpcQueueTimeNumOps() {
+	return this.rpcQueueTimeNumOps;
+    }
+
+    public void setRpcQueueTimeNumOps(Double rpcQueueTimeNumOps) {
+	this.rpcQueueTimeNumOps = rpcQueueTimeNumOps;
+    }
+
+    public Double getGetBuildVersionAvgTime() {
+	return this.getBuildVersionAvgTime;
+    }
+
+    public void setGetBuildVersionAvgTime(Double getBuildVersionAvgTime) {
+	this.getBuildVersionAvgTime = getBuildVersionAvgTime;
+    }
+
+    public Double getGetBuildVersionNumOps() {
+	return this.getBuildVersionNumOps;
+    }
+
+    public void setGetBuildVersionNumOps(Double getBuildVersionNumOps) {
+	this.getBuildVersionNumOps = getBuildVersionNumOps;
+    }
+
+    public Double getGetJobCountersAvgTime() {
+	return this.getJobCountersAvgTime;
+    }
+
+    public void setGetJobCountersAvgTime(Double getJobCountersAvgTime) {
+	this.getJobCountersAvgTime = getJobCountersAvgTime;
+    }
+
+    public Double getGetJobCountersNumOps() {
+	return this.getJobCountersNumOps;
+    }
+
+    public void setGetJobCountersNumOps(Double getJobCountersNumOps) {
+	this.getJobCountersNumOps = getJobCountersNumOps;
+    }
+
+    public Double getGetJobProfileAvgTime() {
+	return this.getJobProfileAvgTime;
+    }
+
+    public void setGetJobProfileAvgTime(Double getJobProfileAvgTime) {
+	this.getJobProfileAvgTime = getJobProfileAvgTime;
+    }
+
+    public Double getGetJobProfileNumOps() {
+	return this.getJobProfileNumOps;
+    }
+
+    public void setGetJobProfileNumOps(Double getJobProfileNumOps) {
+	this.getJobProfileNumOps = getJobProfileNumOps;
+    }
+
+    public Double getGetJobStatusAvgTime() {
+	return this.getJobStatusAvgTime;
+    }
+
+    public void setGetJobStatusAvgTime(Double getJobStatusAvgTime) {
+	this.getJobStatusAvgTime = getJobStatusAvgTime;
+    }
+
+    public Double getGetJobStatusNumOps() {
+	return this.getJobStatusNumOps;
+    }
+
+    public void setGetJobStatusNumOps(Double getJobStatusNumOps) {
+	this.getJobStatusNumOps = getJobStatusNumOps;
+    }
+
+    public Double getGetNewJobIdAvgTime() {
+	return this.getNewJobIdAvgTime;
+    }
+
+    public void setGetNewJobIdAvgTime(Double getNewJobIdAvgTime) {
+	this.getNewJobIdAvgTime = getNewJobIdAvgTime;
+    }
+
+    public Double getGetNewJobIdNumOps() {
+	return this.getNewJobIdNumOps;
+    }
+
+    public void setGetNewJobIdNumOps(Double getNewJobIdNumOps) {
+	this.getNewJobIdNumOps = getNewJobIdNumOps;
+    }
+
+    public Double getGetProtocolVersionAvgTime() {
+	return this.getProtocolVersionAvgTime;
+    }
+
+    public void setGetProtocolVersionAvgTime(Double getProtocolVersionAvgTime) {
+	this.getProtocolVersionAvgTime = getProtocolVersionAvgTime;
+    }
+
+    public Double getGetProtocolVersionNumOps() {
+	return this.getProtocolVersionNumOps;
+    }
+
+    public void setGetProtocolVersionNumOps(Double getProtocolVersionNumOps) {
+	this.getProtocolVersionNumOps = getProtocolVersionNumOps;
+    }
+
+    public Double getGetSystemDirAvgTime() {
+	return this.getSystemDirAvgTime;
+    }
+
+    public void setGetSystemDirAvgTime(Double getSystemDirAvgTime) {
+	this.getSystemDirAvgTime = getSystemDirAvgTime;
+    }
+
+    public Double getGetSystemDirNumOps() {
+	return this.getSystemDirNumOps;
+    }
+
+    public void setGetSystemDirNumOps(Double getSystemDirNumOps) {
+	this.getSystemDirNumOps = getSystemDirNumOps;
+    }
+
+    public Double getGetTaskCompletionEventsAvgTime() {
+	return this.getTaskCompletionEventsAvgTime;
+    }
+
+    public void setGetTaskCompletionEventsAvgTime(
+						  Double getTaskCompletionEventsAvgTime) {
+	this.getTaskCompletionEventsAvgTime = getTaskCompletionEventsAvgTime;
+    }
+
+    public Double getGetTaskCompletionEventsNumOps() {
+	return this.getTaskCompletionEventsNumOps;
+    }
+
+    public void setGetTaskCompletionEventsNumOps(
+						 Double getTaskCompletionEventsNumOps) {
+	this.getTaskCompletionEventsNumOps = getTaskCompletionEventsNumOps;
+    }
+
+    public Double getGetTaskDiagnosticsAvgTime() {
+	return this.getTaskDiagnosticsAvgTime;
+    }
+
+    public void setGetTaskDiagnosticsAvgTime(Double getTaskDiagnosticsAvgTime) {
+	this.getTaskDiagnosticsAvgTime = getTaskDiagnosticsAvgTime;
+    }
+
+    public Double getGetTaskDiagnosticsNumOps() {
+	return this.getTaskDiagnosticsNumOps;
+    }
+
+    public void setGetTaskDiagnosticsNumOps(Double getTaskDiagnosticsNumOps) {
+	this.getTaskDiagnosticsNumOps = getTaskDiagnosticsNumOps;
+    }
+
+    public Double getHeartbeatAvgTime() {
+	return this.heartbeatAvgTime;
+    }
+
+    public void setHeartbeatAvgTime(Double heartbeatAvgTime) {
+	this.heartbeatAvgTime = heartbeatAvgTime;
+    }
+
+    public Double getHeartbeatNumOps() {
+	return this.heartbeatNumOps;
+    }
+
+    public void setHeartbeatNumOps(Double heartbeatNumOps) {
+	this.heartbeatNumOps = heartbeatNumOps;
+    }
+
+    public Double getKillJobAvgTime() {
+	return this.killJobAvgTime;
+    }
+
+    public void setKillJobAvgTime(Double killJobAvgTime) {
+	this.killJobAvgTime = killJobAvgTime;
+    }
+
+    public Double getKillJobNumOps() {
+	return this.killJobNumOps;
+    }
+
+    public void setKillJobNumOps(Double killJobNumOps) {
+	this.killJobNumOps = killJobNumOps;
+    }
+
+    public Double getSubmitJobAvgTime() {
+	return this.submitJobAvgTime;
+    }
+
+    public void setSubmitJobAvgTime(Double submitJobAvgTime) {
+	this.submitJobAvgTime = submitJobAvgTime;
+    }
+
+    public Double getSubmitJobNumOps() {
+	return this.submitJobNumOps;
+    }
+
+    public void setSubmitJobNumOps(Double submitJobNumOps) {
+	this.submitJobNumOps = submitJobNumOps;
+    }
+
+}

Added: hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/objects/ClusterSystemMetrics.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/objects/ClusterSystemMetrics.java?rev=783442&view=auto
==============================================================================
--- hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/objects/ClusterSystemMetrics.java (added)
+++ hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/objects/ClusterSystemMetrics.java Wed Jun 10 18:31:05 2009
@@ -0,0 +1,610 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.chukwa.rest.objects;
+
+// Generated May 28, 2009 3:39:53 PM by Hibernate Tools 3.2.4.GA
+
+import java.sql.Timestamp;
+
+/**
+ * ClusterSystemMetrics generated by hbm2java
+ */
+public class ClusterSystemMetrics implements java.io.Serializable {
+
+    static final long serialVersionUID = -2569521727910619893L;
+
+    private Timestamp timestamp;
+    private Integer host;
+    private Double load15;
+    private Double load5;
+    private Double load1;
+    private Double taskTotal;
+    private Double taskRunning;
+    private Double taskSleep;
+    private Double taskStopped;
+    private Double taskZombie;
+    private Double memTotal;
+    private Double memBuffers;
+    private Double memCached;
+    private Double memUsed;
+    private Double memFree;
+    private Double eth0Rxerrs;
+    private Double eth0Rxbyts;
+    private Double eth0Rxpcks;
+    private Double eth0Rxdrops;
+    private Double eth0Txerrs;
+    private Double eth0Txbyts;
+    private Double eth0Txpcks;
+    private Double eth0Txdrops;
+    private Double eth1Rxerrs;
+    private Double eth1Rxbyts;
+    private Double eth1Rxpcks;
+    private Double eth1Rxdrops;
+    private Double eth1Txerrs;
+    private Double eth1Txbyts;
+    private Double eth1Txpcks;
+    private Double eth1Txdrops;
+    private Double sdaRkbs;
+    private Double sdaWkbs;
+    private Double sdbRkbs;
+    private Double sdbWkbs;
+    private Double sdcRkbs;
+    private Double sdcWkbs;
+    private Double sddRkbs;
+    private Double sddWkbs;
+    private Float cpuIdlePcnt;
+    private Float cpuNicePcnt;
+    private Float cpuSystemPcnt;
+    private Float cpuUserPcnt;
+    private Float cpuHirqPcnt;
+    private Float cpuSirqPcnt;
+    private Float iowaitPcnt;
+    private Float memBuffersPcnt;
+    private Float memUsedPcnt;
+    private Float eth0BusyPcnt;
+    private Float eth1BusyPcnt;
+    private Float sdaBusyPcnt;
+    private Float sdbBusyPcnt;
+    private Float sdcBusyPcnt;
+    private Float sddBusyPcnt;
+    private Float swapUsedPcnt;
+
+    public ClusterSystemMetrics() {
+    }
+
+    public ClusterSystemMetrics(Timestamp timestamp) {
+	this.timestamp = timestamp;
+    }
+
+    public ClusterSystemMetrics(Timestamp timestamp, Integer host,
+				Double load15, Double load5, Double load1, Double taskTotal,
+				Double taskRunning, Double taskSleep, Double taskStopped,
+				Double taskZombie, Double memTotal, Double memBuffers,
+				Double memCached, Double memUsed, Double memFree,
+				Double eth0Rxerrs, Double eth0Rxbyts, Double eth0Rxpcks,
+				Double eth0Rxdrops, Double eth0Txerrs, Double eth0Txbyts,
+				Double eth0Txpcks, Double eth0Txdrops, Double eth1Rxerrs,
+				Double eth1Rxbyts, Double eth1Rxpcks, Double eth1Rxdrops,
+				Double eth1Txerrs, Double eth1Txbyts, Double eth1Txpcks,
+				Double eth1Txdrops, Double sdaRkbs, Double sdaWkbs, Double sdbRkbs,
+				Double sdbWkbs, Double sdcRkbs, Double sdcWkbs, Double sddRkbs,
+				Double sddWkbs, Float cpuIdlePcnt, Float cpuNicePcnt,
+				Float cpuSystemPcnt, Float cpuUserPcnt, Float cpuHirqPcnt,
+				Float cpuSirqPcnt, Float iowaitPcnt, Float memBuffersPcnt,
+				Float memUsedPcnt, Float eth0BusyPcnt, Float eth1BusyPcnt,
+				Float sdaBusyPcnt, Float sdbBusyPcnt, Float sdcBusyPcnt,
+				Float sddBusyPcnt, Float swapUsedPcnt) {
+	this.timestamp = timestamp;
+	this.host = host;
+	this.load15 = load15;
+	this.load5 = load5;
+	this.load1 = load1;
+	this.taskTotal = taskTotal;
+	this.taskRunning = taskRunning;
+	this.taskSleep = taskSleep;
+	this.taskStopped = taskStopped;
+	this.taskZombie = taskZombie;
+	this.memTotal = memTotal;
+	this.memBuffers = memBuffers;
+	this.memCached = memCached;
+	this.memUsed = memUsed;
+	this.memFree = memFree;
+	this.eth0Rxerrs = eth0Rxerrs;
+	this.eth0Rxbyts = eth0Rxbyts;
+	this.eth0Rxpcks = eth0Rxpcks;
+	this.eth0Rxdrops = eth0Rxdrops;
+	this.eth0Txerrs = eth0Txerrs;
+	this.eth0Txbyts = eth0Txbyts;
+	this.eth0Txpcks = eth0Txpcks;
+	this.eth0Txdrops = eth0Txdrops;
+	this.eth1Rxerrs = eth1Rxerrs;
+	this.eth1Rxbyts = eth1Rxbyts;
+	this.eth1Rxpcks = eth1Rxpcks;
+	this.eth1Rxdrops = eth1Rxdrops;
+	this.eth1Txerrs = eth1Txerrs;
+	this.eth1Txbyts = eth1Txbyts;
+	this.eth1Txpcks = eth1Txpcks;
+	this.eth1Txdrops = eth1Txdrops;
+	this.sdaRkbs = sdaRkbs;
+	this.sdaWkbs = sdaWkbs;
+	this.sdbRkbs = sdbRkbs;
+	this.sdbWkbs = sdbWkbs;
+	this.sdcRkbs = sdcRkbs;
+	this.sdcWkbs = sdcWkbs;
+	this.sddRkbs = sddRkbs;
+	this.sddWkbs = sddWkbs;
+	this.cpuIdlePcnt = cpuIdlePcnt;
+	this.cpuNicePcnt = cpuNicePcnt;
+	this.cpuSystemPcnt = cpuSystemPcnt;
+	this.cpuUserPcnt = cpuUserPcnt;
+	this.cpuHirqPcnt = cpuHirqPcnt;
+	this.cpuSirqPcnt = cpuSirqPcnt;
+	this.iowaitPcnt = iowaitPcnt;
+	this.memBuffersPcnt = memBuffersPcnt;
+	this.memUsedPcnt = memUsedPcnt;
+	this.eth0BusyPcnt = eth0BusyPcnt;
+	this.eth1BusyPcnt = eth1BusyPcnt;
+	this.sdaBusyPcnt = sdaBusyPcnt;
+	this.sdbBusyPcnt = sdbBusyPcnt;
+	this.sdcBusyPcnt = sdcBusyPcnt;
+	this.sddBusyPcnt = sddBusyPcnt;
+	this.swapUsedPcnt = swapUsedPcnt;
+    }
+
+    public Timestamp getTimestamp() {
+	return this.timestamp;
+    }
+
+    public void setTimestamp(Timestamp timestamp) {
+	this.timestamp = timestamp;
+    }
+
+    public Integer getHost() {
+	return this.host;
+    }
+
+    public void setHost(Integer host) {
+	this.host = host;
+    }
+
+    public Double getLoad15() {
+	return this.load15;
+    }
+
+    public void setLoad15(Double load15) {
+	this.load15 = load15;
+    }
+
+    public Double getLoad5() {
+	return this.load5;
+    }
+
+    public void setLoad5(Double load5) {
+	this.load5 = load5;
+    }
+
+    public Double getLoad1() {
+	return this.load1;
+    }
+
+    public void setLoad1(Double load1) {
+	this.load1 = load1;
+    }
+
+    public Double getTaskTotal() {
+	return this.taskTotal;
+    }
+
+    public void setTaskTotal(Double taskTotal) {
+	this.taskTotal = taskTotal;
+    }
+
+    public Double getTaskRunning() {
+	return this.taskRunning;
+    }
+
+    public void setTaskRunning(Double taskRunning) {
+	this.taskRunning = taskRunning;
+    }
+
+    public Double getTaskSleep() {
+	return this.taskSleep;
+    }
+
+    public void setTaskSleep(Double taskSleep) {
+	this.taskSleep = taskSleep;
+    }
+
+    public Double getTaskStopped() {
+	return this.taskStopped;
+    }
+
+    public void setTaskStopped(Double taskStopped) {
+	this.taskStopped = taskStopped;
+    }
+
+    public Double getTaskZombie() {
+	return this.taskZombie;
+    }
+
+    public void setTaskZombie(Double taskZombie) {
+	this.taskZombie = taskZombie;
+    }
+
+    public Double getMemTotal() {
+	return this.memTotal;
+    }
+
+    public void setMemTotal(Double memTotal) {
+	this.memTotal = memTotal;
+    }
+
+    public Double getMemBuffers() {
+	return this.memBuffers;
+    }
+
+    public void setMemBuffers(Double memBuffers) {
+	this.memBuffers = memBuffers;
+    }
+
+    public Double getMemCached() {
+	return this.memCached;
+    }
+
+    public void setMemCached(Double memCached) {
+	this.memCached = memCached;
+    }
+
+    public Double getMemUsed() {
+	return this.memUsed;
+    }
+
+    public void setMemUsed(Double memUsed) {
+	this.memUsed = memUsed;
+    }
+
+    public Double getMemFree() {
+	return this.memFree;
+    }
+
+    public void setMemFree(Double memFree) {
+	this.memFree = memFree;
+    }
+
+    public Double getEth0Rxerrs() {
+	return this.eth0Rxerrs;
+    }
+
+    public void setEth0Rxerrs(Double eth0Rxerrs) {
+	this.eth0Rxerrs = eth0Rxerrs;
+    }
+
+    public Double getEth0Rxbyts() {
+	return this.eth0Rxbyts;
+    }
+
+    public void setEth0Rxbyts(Double eth0Rxbyts) {
+	this.eth0Rxbyts = eth0Rxbyts;
+    }
+
+    public Double getEth0Rxpcks() {
+	return this.eth0Rxpcks;
+    }
+
+    public void setEth0Rxpcks(Double eth0Rxpcks) {
+	this.eth0Rxpcks = eth0Rxpcks;
+    }
+
+    public Double getEth0Rxdrops() {
+	return this.eth0Rxdrops;
+    }
+
+    public void setEth0Rxdrops(Double eth0Rxdrops) {
+	this.eth0Rxdrops = eth0Rxdrops;
+    }
+
+    public Double getEth0Txerrs() {
+	return this.eth0Txerrs;
+    }
+
+    public void setEth0Txerrs(Double eth0Txerrs) {
+	this.eth0Txerrs = eth0Txerrs;
+    }
+
+    public Double getEth0Txbyts() {
+	return this.eth0Txbyts;
+    }
+
+    public void setEth0Txbyts(Double eth0Txbyts) {
+	this.eth0Txbyts = eth0Txbyts;
+    }
+
+    public Double getEth0Txpcks() {
+	return this.eth0Txpcks;
+    }
+
+    public void setEth0Txpcks(Double eth0Txpcks) {
+	this.eth0Txpcks = eth0Txpcks;
+    }
+
+    public Double getEth0Txdrops() {
+	return this.eth0Txdrops;
+    }
+
+    public void setEth0Txdrops(Double eth0Txdrops) {
+	this.eth0Txdrops = eth0Txdrops;
+    }
+
+    public Double getEth1Rxerrs() {
+	return this.eth1Rxerrs;
+    }
+
+    public void setEth1Rxerrs(Double eth1Rxerrs) {
+	this.eth1Rxerrs = eth1Rxerrs;
+    }
+
+    public Double getEth1Rxbyts() {
+	return this.eth1Rxbyts;
+    }
+
+    public void setEth1Rxbyts(Double eth1Rxbyts) {
+	this.eth1Rxbyts = eth1Rxbyts;
+    }
+
+    public Double getEth1Rxpcks() {
+	return this.eth1Rxpcks;
+    }
+
+    public void setEth1Rxpcks(Double eth1Rxpcks) {
+	this.eth1Rxpcks = eth1Rxpcks;
+    }
+
+    public Double getEth1Rxdrops() {
+	return this.eth1Rxdrops;
+    }
+
+    public void setEth1Rxdrops(Double eth1Rxdrops) {
+	this.eth1Rxdrops = eth1Rxdrops;
+    }
+
+    public Double getEth1Txerrs() {
+	return this.eth1Txerrs;
+    }
+
+    public void setEth1Txerrs(Double eth1Txerrs) {
+	this.eth1Txerrs = eth1Txerrs;
+    }
+
+    public Double getEth1Txbyts() {
+	return this.eth1Txbyts;
+    }
+
+    public void setEth1Txbyts(Double eth1Txbyts) {
+	this.eth1Txbyts = eth1Txbyts;
+    }
+
+    public Double getEth1Txpcks() {
+	return this.eth1Txpcks;
+    }
+
+    public void setEth1Txpcks(Double eth1Txpcks) {
+	this.eth1Txpcks = eth1Txpcks;
+    }
+
+    public Double getEth1Txdrops() {
+	return this.eth1Txdrops;
+    }
+
+    public void setEth1Txdrops(Double eth1Txdrops) {
+	this.eth1Txdrops = eth1Txdrops;
+    }
+
+    public Double getSdaRkbs() {
+	return this.sdaRkbs;
+    }
+
+    public void setSdaRkbs(Double sdaRkbs) {
+	this.sdaRkbs = sdaRkbs;
+    }
+
+    public Double getSdaWkbs() {
+	return this.sdaWkbs;
+    }
+
+    public void setSdaWkbs(Double sdaWkbs) {
+	this.sdaWkbs = sdaWkbs;
+    }
+
+    public Double getSdbRkbs() {
+	return this.sdbRkbs;
+    }
+
+    public void setSdbRkbs(Double sdbRkbs) {
+	this.sdbRkbs = sdbRkbs;
+    }
+
+    public Double getSdbWkbs() {
+	return this.sdbWkbs;
+    }
+
+    public void setSdbWkbs(Double sdbWkbs) {
+	this.sdbWkbs = sdbWkbs;
+    }
+
+    public Double getSdcRkbs() {
+	return this.sdcRkbs;
+    }
+
+    public void setSdcRkbs(Double sdcRkbs) {
+	this.sdcRkbs = sdcRkbs;
+    }
+
+    public Double getSdcWkbs() {
+	return this.sdcWkbs;
+    }
+
+    public void setSdcWkbs(Double sdcWkbs) {
+	this.sdcWkbs = sdcWkbs;
+    }
+
+    public Double getSddRkbs() {
+	return this.sddRkbs;
+    }
+
+    public void setSddRkbs(Double sddRkbs) {
+	this.sddRkbs = sddRkbs;
+    }
+
+    public Double getSddWkbs() {
+	return this.sddWkbs;
+    }
+
+    public void setSddWkbs(Double sddWkbs) {
+	this.sddWkbs = sddWkbs;
+    }
+
+    public Float getCpuIdlePcnt() {
+	return this.cpuIdlePcnt;
+    }
+
+    public void setCpuIdlePcnt(Float cpuIdlePcnt) {
+	this.cpuIdlePcnt = cpuIdlePcnt;
+    }
+
+    public Float getCpuNicePcnt() {
+	return this.cpuNicePcnt;
+    }
+
+    public void setCpuNicePcnt(Float cpuNicePcnt) {
+	this.cpuNicePcnt = cpuNicePcnt;
+    }
+
+    public Float getCpuSystemPcnt() {
+	return this.cpuSystemPcnt;
+    }
+
+    public void setCpuSystemPcnt(Float cpuSystemPcnt) {
+	this.cpuSystemPcnt = cpuSystemPcnt;
+    }
+
+    public Float getCpuUserPcnt() {
+	return this.cpuUserPcnt;
+    }
+
+    public void setCpuUserPcnt(Float cpuUserPcnt) {
+	this.cpuUserPcnt = cpuUserPcnt;
+    }
+
+    public Float getCpuHirqPcnt() {
+	return this.cpuHirqPcnt;
+    }
+
+    public void setCpuHirqPcnt(Float cpuHirqPcnt) {
+	this.cpuHirqPcnt = cpuHirqPcnt;
+    }
+
+    public Float getCpuSirqPcnt() {
+	return this.cpuSirqPcnt;
+    }
+
+    public void setCpuSirqPcnt(Float cpuSirqPcnt) {
+	this.cpuSirqPcnt = cpuSirqPcnt;
+    }
+
+    public Float getIowaitPcnt() {
+	return this.iowaitPcnt;
+    }
+
+    public void setIowaitPcnt(Float iowaitPcnt) {
+	this.iowaitPcnt = iowaitPcnt;
+    }
+
+    public Float getMemBuffersPcnt() {
+	return this.memBuffersPcnt;
+    }
+
+    public void setMemBuffersPcnt(Float memBuffersPcnt) {
+	this.memBuffersPcnt = memBuffersPcnt;
+    }
+
+    public Float getMemUsedPcnt() {
+	return this.memUsedPcnt;
+    }
+
+    public void setMemUsedPcnt(Float memUsedPcnt) {
+	this.memUsedPcnt = memUsedPcnt;
+    }
+
+    public Float getEth0BusyPcnt() {
+	return this.eth0BusyPcnt;
+    }
+
+    public void setEth0BusyPcnt(Float eth0BusyPcnt) {
+	this.eth0BusyPcnt = eth0BusyPcnt;
+    }
+
+    public Float getEth1BusyPcnt() {
+	return this.eth1BusyPcnt;
+    }
+
+    public void setEth1BusyPcnt(Float eth1BusyPcnt) {
+	this.eth1BusyPcnt = eth1BusyPcnt;
+    }
+
+    public Float getSdaBusyPcnt() {
+	return this.sdaBusyPcnt;
+    }
+
+    public void setSdaBusyPcnt(Float sdaBusyPcnt) {
+	this.sdaBusyPcnt = sdaBusyPcnt;
+    }
+
+    public Float getSdbBusyPcnt() {
+	return this.sdbBusyPcnt;
+    }
+
+    public void setSdbBusyPcnt(Float sdbBusyPcnt) {
+	this.sdbBusyPcnt = sdbBusyPcnt;
+    }
+
+    public Float getSdcBusyPcnt() {
+	return this.sdcBusyPcnt;
+    }
+
+    public void setSdcBusyPcnt(Float sdcBusyPcnt) {
+	this.sdcBusyPcnt = sdcBusyPcnt;
+    }
+
+    public Float getSddBusyPcnt() {
+	return this.sddBusyPcnt;
+    }
+
+    public void setSddBusyPcnt(Float sddBusyPcnt) {
+	this.sddBusyPcnt = sddBusyPcnt;
+    }
+
+    public Float getSwapUsedPcnt() {
+	return this.swapUsedPcnt;
+    }
+
+    public void setSwapUsedPcnt(Float swapUsedPcnt) {
+	this.swapUsedPcnt = swapUsedPcnt;
+    }
+
+}

Added: hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/objects/DfsDataNode.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/objects/DfsDataNode.java?rev=783442&view=auto
==============================================================================
--- hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/objects/DfsDataNode.java (added)
+++ hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/rest/objects/DfsDataNode.java Wed Jun 10 18:31:05 2009
@@ -0,0 +1,340 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.chukwa.rest.objects;
+
+// Generated May 28, 2009 3:39:53 PM by Hibernate Tools 3.2.4.GA
+import java.sql.Timestamp;
+
+/**
+ * DfsDatanode generated by hbm2java
+ */
+public class DfsDataNode implements java.io.Serializable {
+    static final long serialVersionUID = 3356424477784496801L;
+
+    private Timestamp timestamp;
+    private String host;
+    private Double blockReportsAvgTime;
+    private Double blockReportsNumOps;
+    private Double blockVerificationFailures;
+    private Double blocksRead;
+    private Double blocksRemoved;
+    private Double blocksReplicated;
+    private Double blocksVerified;
+    private Double blocksWritten;
+    private Double bytesRead;
+    private Double bytesWritten;
+    private Double copyBlockOpAvgTime;
+    private Double copyBlockOpNumOps;
+    private Double heartBeatsAvgTime;
+    private Double heartBeatsNumOps;
+    private Double readBlockOpAvgTime;
+    private Double readBlockOpNumOps;
+    private Double readMetadataOpAvgTime;
+    private Double readMetadataOpNumOps;
+    private Double readsFromLocalClient;
+    private Double readsFromRemoteClient;
+    private Double replaceBlockOpAvgTime;
+    private Double replaceBlockOpNumOps;
+    private Double sessionId;
+    private Double writeBlockOpAvgTime;
+    private Double writeBlockOpNumOps;
+    private Double writesFromLocalClient;
+    private Double writesFromRemoteClient;
+
+    public DfsDataNode() {
+    }
+
+    public DfsDataNode(Timestamp timestampe, String host,
+		       Double blockReportsAvgTime, Double blockReportsNumOps,
+		       Double blockVerificationFailures, Double blocksRead,
+		       Double blocksRemoved, Double blocksReplicated,
+		       Double blocksVerified, Double blocksWritten, Double bytesRead,
+		       Double bytesWritten, Double copyBlockOpAvgTime,
+		       Double copyBlockOpNumOps, Double heartBeatsAvgTime,
+		       Double heartBeatsNumOps, Double readBlockOpAvgTime,
+		       Double readBlockOpNumOps, Double readMetadataOpAvgTime,
+		       Double readMetadataOpNumOps, Double readsFromLocalClient,
+		       Double readsFromRemoteClient, Double replaceBlockOpAvgTime,
+		       Double replaceBlockOpNumOps, Double sessionId,
+		       Double writeBlockOpAvgTime, Double writeBlockOpNumOps,
+		       Double writesFromLocalClient, Double writesFromRemoteClient) {
+	this.timestamp=timestamp;
+	this.host=host;
+	this.blockReportsAvgTime = blockReportsAvgTime;
+	this.blockReportsNumOps = blockReportsNumOps;
+	this.blockVerificationFailures = blockVerificationFailures;
+	this.blocksRead = blocksRead;
+	this.blocksRemoved = blocksRemoved;
+	this.blocksReplicated = blocksReplicated;
+	this.blocksVerified = blocksVerified;
+	this.blocksWritten = blocksWritten;
+	this.bytesRead = bytesRead;
+	this.bytesWritten = bytesWritten;
+	this.copyBlockOpAvgTime = copyBlockOpAvgTime;
+	this.copyBlockOpNumOps = copyBlockOpNumOps;
+	this.heartBeatsAvgTime = heartBeatsAvgTime;
+	this.heartBeatsNumOps = heartBeatsNumOps;
+	this.readBlockOpAvgTime = readBlockOpAvgTime;
+	this.readBlockOpNumOps = readBlockOpNumOps;
+	this.readMetadataOpAvgTime = readMetadataOpAvgTime;
+	this.readMetadataOpNumOps = readMetadataOpNumOps;
+	this.readsFromLocalClient = readsFromLocalClient;
+	this.readsFromRemoteClient = readsFromRemoteClient;
+	this.replaceBlockOpAvgTime = replaceBlockOpAvgTime;
+	this.replaceBlockOpNumOps = replaceBlockOpNumOps;
+	this.sessionId = sessionId;
+	this.writeBlockOpAvgTime = writeBlockOpAvgTime;
+	this.writeBlockOpNumOps = writeBlockOpNumOps;
+	this.writesFromLocalClient = writesFromLocalClient;
+	this.writesFromRemoteClient = writesFromRemoteClient;
+    }
+
+    public Timestamp getTimestamp() {
+	return this.timestamp;
+    }
+    
+    public void setTimestamp(Timestamp timestamp) {
+	this.timestamp=timestamp;
+    }
+
+    public String getHost() {
+	return this.host;
+    }
+
+    public void setHost(String host) {
+	this.host = host;
+    }
+
+    public Double getBlockReportsAvgTime() {
+	return this.blockReportsAvgTime;
+    }
+
+    public void setBlockReportsAvgTime(Double blockReportsAvgTime) {
+	this.blockReportsAvgTime = blockReportsAvgTime;
+    }
+
+    public Double getBlockReportsNumOps() {
+	return this.blockReportsNumOps;
+    }
+
+    public void setBlockReportsNumOps(Double blockReportsNumOps) {
+	this.blockReportsNumOps = blockReportsNumOps;
+    }
+
+    public Double getBlockVerificationFailures() {
+	return this.blockVerificationFailures;
+    }
+
+    public void setBlockVerificationFailures(Double blockVerificationFailures) {
+	this.blockVerificationFailures = blockVerificationFailures;
+    }
+
+    public Double getBlocksRead() {
+	return this.blocksRead;
+    }
+
+    public void setBlocksRead(Double blocksRead) {
+	this.blocksRead = blocksRead;
+    }
+
+    public Double getBlocksRemoved() {
+	return this.blocksRemoved;
+    }
+
+    public void setBlocksRemoved(Double blocksRemoved) {
+	this.blocksRemoved = blocksRemoved;
+    }
+
+    public Double getBlocksReplicated() {
+	return this.blocksReplicated;
+    }
+
+    public void setBlocksReplicated(Double blocksReplicated) {
+	this.blocksReplicated = blocksReplicated;
+    }
+
+    public Double getBlocksVerified() {
+	return this.blocksVerified;
+    }
+
+    public void setBlocksVerified(Double blocksVerified) {
+	this.blocksVerified = blocksVerified;
+    }
+
+    public Double getBlocksWritten() {
+	return this.blocksWritten;
+    }
+
+    public void setBlocksWritten(Double blocksWritten) {
+	this.blocksWritten = blocksWritten;
+    }
+
+    public Double getBytesRead() {
+	return this.bytesRead;
+    }
+
+    public void setBytesRead(Double bytesRead) {
+	this.bytesRead = bytesRead;
+    }
+
+    public Double getBytesWritten() {
+	return this.bytesWritten;
+    }
+
+    public void setBytesWritten(Double bytesWritten) {
+	this.bytesWritten = bytesWritten;
+    }
+
+    public Double getCopyBlockOpAvgTime() {
+	return this.copyBlockOpAvgTime;
+    }
+
+    public void setCopyBlockOpAvgTime(Double copyBlockOpAvgTime) {
+	this.copyBlockOpAvgTime = copyBlockOpAvgTime;
+    }
+
+    public Double getCopyBlockOpNumOps() {
+	return this.copyBlockOpNumOps;
+    }
+
+    public void setCopyBlockOpNumOps(Double copyBlockOpNumOps) {
+	this.copyBlockOpNumOps = copyBlockOpNumOps;
+    }
+
+    public Double getHeartBeatsAvgTime() {
+	return this.heartBeatsAvgTime;
+    }
+
+    public void setHeartBeatsAvgTime(Double heartBeatsAvgTime) {
+	this.heartBeatsAvgTime = heartBeatsAvgTime;
+    }
+
+    public Double getHeartBeatsNumOps() {
+	return this.heartBeatsNumOps;
+    }
+
+    public void setHeartBeatsNumOps(Double heartBeatsNumOps) {
+	this.heartBeatsNumOps = heartBeatsNumOps;
+    }
+
+    public Double getReadBlockOpAvgTime() {
+	return this.readBlockOpAvgTime;
+    }
+
+    public void setReadBlockOpAvgTime(Double readBlockOpAvgTime) {
+	this.readBlockOpAvgTime = readBlockOpAvgTime;
+    }
+
+    public Double getReadBlockOpNumOps() {
+	return this.readBlockOpNumOps;
+    }
+
+    public void setReadBlockOpNumOps(Double readBlockOpNumOps) {
+	this.readBlockOpNumOps = readBlockOpNumOps;
+    }
+
+    public Double getReadMetadataOpAvgTime() {
+	return this.readMetadataOpAvgTime;
+    }
+
+    public void setReadMetadataOpAvgTime(Double readMetadataOpAvgTime) {
+	this.readMetadataOpAvgTime = readMetadataOpAvgTime;
+    }
+
+    public Double getReadMetadataOpNumOps() {
+	return this.readMetadataOpNumOps;
+    }
+
+    public void setReadMetadataOpNumOps(Double readMetadataOpNumOps) {
+	this.readMetadataOpNumOps = readMetadataOpNumOps;
+    }
+
+    public Double getReadsFromLocalClient() {
+	return this.readsFromLocalClient;
+    }
+
+    public void setReadsFromLocalClient(Double readsFromLocalClient) {
+	this.readsFromLocalClient = readsFromLocalClient;
+    }
+
+    public Double getReadsFromRemoteClient() {
+	return this.readsFromRemoteClient;
+    }
+
+    public void setReadsFromRemoteClient(Double readsFromRemoteClient) {
+	this.readsFromRemoteClient = readsFromRemoteClient;
+    }
+
+    public Double getReplaceBlockOpAvgTime() {
+	return this.replaceBlockOpAvgTime;
+    }
+
+    public void setReplaceBlockOpAvgTime(Double replaceBlockOpAvgTime) {
+	this.replaceBlockOpAvgTime = replaceBlockOpAvgTime;
+    }
+
+    public Double getReplaceBlockOpNumOps() {
+	return this.replaceBlockOpNumOps;
+    }
+
+    public void setReplaceBlockOpNumOps(Double replaceBlockOpNumOps) {
+	this.replaceBlockOpNumOps = replaceBlockOpNumOps;
+    }
+
+    public Double getSessionId() {
+	return this.sessionId;
+    }
+
+    public void setSessionId(Double sessionId) {
+	this.sessionId = sessionId;
+    }
+
+    public Double getWriteBlockOpAvgTime() {
+	return this.writeBlockOpAvgTime;
+    }
+
+    public void setWriteBlockOpAvgTime(Double writeBlockOpAvgTime) {
+	this.writeBlockOpAvgTime = writeBlockOpAvgTime;
+    }
+
+    public Double getWriteBlockOpNumOps() {
+	return this.writeBlockOpNumOps;
+    }
+
+    public void setWriteBlockOpNumOps(Double writeBlockOpNumOps) {
+	this.writeBlockOpNumOps = writeBlockOpNumOps;
+    }
+
+    public Double getWritesFromLocalClient() {
+	return this.writesFromLocalClient;
+    }
+
+    public void setWritesFromLocalClient(Double writesFromLocalClient) {
+	this.writesFromLocalClient = writesFromLocalClient;
+    }
+
+    public Double getWritesFromRemoteClient() {
+	return this.writesFromRemoteClient;
+    }
+
+    public void setWritesFromRemoteClient(Double writesFromRemoteClient) {
+	this.writesFromRemoteClient = writesFromRemoteClient;
+    }
+
+}



Mime
View raw message