chukwa-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ey...@apache.org
Subject svn commit: r1137280 - in /incubator/chukwa/trunk: ./ ivy/ lib/ src/java/org/apache/hadoop/chukwa/datacollection/writer/hbase/ src/java/org/apache/hadoop/chukwa/datastore/ src/java/org/apache/hadoop/chukwa/hicc/rest/ src/web/hicc/jsp/
Date Sun, 19 Jun 2011 03:50:09 GMT
Author: eyang
Date: Sun Jun 19 03:50:08 2011
New Revision: 1137280

URL: http://svn.apache.org/viewvc?rev=1137280&view=rev
Log:
CHUKWA-586. Updated HBase dependency to HBase 0.90.3. (Eric Yang)

Removed:
    incubator/chukwa/trunk/lib/hbase-0.20.6-test.jar
    incubator/chukwa/trunk/lib/hbase-0.20.6.jar
Modified:
    incubator/chukwa/trunk/CHANGES.txt
    incubator/chukwa/trunk/ivy.xml
    incubator/chukwa/trunk/ivy/libraries.properties
    incubator/chukwa/trunk/src/java/org/apache/hadoop/chukwa/datacollection/writer/hbase/HBaseWriter.java
    incubator/chukwa/trunk/src/java/org/apache/hadoop/chukwa/datastore/ChukwaHBaseStore.java
    incubator/chukwa/trunk/src/java/org/apache/hadoop/chukwa/hicc/rest/MetricsController.java
    incubator/chukwa/trunk/src/web/hicc/jsp/graph_explorer.jsp

Modified: incubator/chukwa/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/CHANGES.txt?rev=1137280&r1=1137279&r2=1137280&view=diff
==============================================================================
--- incubator/chukwa/trunk/CHANGES.txt (original)
+++ incubator/chukwa/trunk/CHANGES.txt Sun Jun 19 03:50:08 2011
@@ -32,6 +32,8 @@ Trunk (unreleased changes)
 
   IMPROVEMENTS
 
+    CHUKWA-586. Updated HBase dependency to HBase 0.90.3. (Eric Yang)
+
     CHUKWA-569. Added temperory fix for load meta data faster for graph_explorer.jsp. (Eric
Yang)
 
     CHUKWA-573. Setup ChukwaMetrics to send to port 9097 and use ChukwaMetricsProcessor for
parsing. (Eric Yang)

Modified: incubator/chukwa/trunk/ivy.xml
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/ivy.xml?rev=1137280&r1=1137279&r2=1137280&view=diff
==============================================================================
--- incubator/chukwa/trunk/ivy.xml (original)
+++ incubator/chukwa/trunk/ivy.xml Sun Jun 19 03:50:08 2011
@@ -53,25 +53,25 @@
     <artifact conf="master"/>
   </publications>
   <dependencies>
-    <dependency org="org.apache.hadoop"
+<!--    <dependency org="org.apache.hadoop"
       name="hadoop-core"
       rev="${hadoop.version}"
       conf="common->master"/>
     <dependency org="org.apache.hadoop"
       name="hadoop-test"
       rev="${hadoop.version}"
-      conf="common->master"/>
+      conf="common->master"/> -->
     <dependency org="com.googlecode.json-simple"
       name="json-simple"
       rev="${json-simple.version}"
       conf="common->master"/>
-<!--    <dependency org="org.apache.hbase"
+    <dependency org="org.apache.hbase"
       name="hbase"
       rev="${hbase.version}"
       conf="common->master">
       <artifact name="hbase" type="jar"/>
       <artifact name="hbase" type="tests" ext="jar" m:classifier="tests"/>
-    </dependency> -->
+    </dependency>
     <dependency org="com.google.guava"
       name="guava"
       rev="${guava.version}"

Modified: incubator/chukwa/trunk/ivy/libraries.properties
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/ivy/libraries.properties?rev=1137280&r1=1137279&r2=1137280&view=diff
==============================================================================
--- incubator/chukwa/trunk/ivy/libraries.properties (original)
+++ incubator/chukwa/trunk/ivy/libraries.properties Sun Jun 19 03:50:08 2011
@@ -15,7 +15,7 @@
 
 
 #This properties file lists the versions of the various artifacts used by chukwa.
-hadoop.version=0.20.2
+#hadoop.version=0.20.3
 
 #These are the versions of our dependencies
 activemq.version=5.3.1
@@ -50,7 +50,7 @@ jdiff.version=1.0.9
 xmlenc.version=0.52
 xerces.version=1.4.4
 #jaxb.version=2.1.12
-hbase.version=0.21.0-SNAPSHOT
+hbase.version=0.90.3
 thrift.version=0.2.0
 zookeeper.version=3.2.2
 jersey.version=1.1.5.1

Modified: incubator/chukwa/trunk/src/java/org/apache/hadoop/chukwa/datacollection/writer/hbase/HBaseWriter.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/java/org/apache/hadoop/chukwa/datacollection/writer/hbase/HBaseWriter.java?rev=1137280&r1=1137279&r2=1137280&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/java/org/apache/hadoop/chukwa/datacollection/writer/hbase/HBaseWriter.java
(original)
+++ incubator/chukwa/trunk/src/java/org/apache/hadoop/chukwa/datacollection/writer/hbase/HBaseWriter.java
Sun Jun 19 03:50:08 2011
@@ -44,6 +44,7 @@ import org.apache.hadoop.hbase.HTableDes
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 
 import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.HTablePool;
 
 import org.apache.hadoop.chukwa.datacollection.writer.hbase.OutputCollector;
@@ -64,7 +65,7 @@ public class HBaseWriter extends Pipelin
       "chukwa.demux.mapper.default.processor",
       "org.apache.hadoop.chukwa.extraction.demux.processor.mapper.DefaultProcessor");
   private HTablePool pool;
-  private HBaseConfiguration hconf;
+  private Configuration hconf;
   
   private class StatReportingTask extends TimerTask {
     private long lastTs = System.currentTimeMillis();
@@ -93,10 +94,10 @@ public class HBaseWriter extends Pipelin
     this.reportStats = reportStats;
     statTimer = new Timer();
     /* HBase Version 0.20.x */
-    hconf = new HBaseConfiguration();
+    //hconf = new HBaseConfiguration();
     
     /* HBase Version 0.89.x */
-    //hconf = HBaseConfiguration.create();
+    hconf = HBaseConfiguration.create();
   }
 
   public HBaseWriter(ChukwaConfiguration conf, HBaseConfiguration hconf) {
@@ -197,7 +198,7 @@ public class HBaseWriter extends Pipelin
               }
             }
             if(table!=null) {
-              HTable hbase = pool.getTable(table.name());  
+              HTableInterface hbase = pool.getTable(table.name().getBytes());  
               processor.process(new ChukwaArchiveKey(), chunk, output, reporter);
               hbase.put(output.getKeyValues());
               pool.putTable(hbase);

Modified: incubator/chukwa/trunk/src/java/org/apache/hadoop/chukwa/datastore/ChukwaHBaseStore.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/java/org/apache/hadoop/chukwa/datastore/ChukwaHBaseStore.java?rev=1137280&r1=1137279&r2=1137280&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/java/org/apache/hadoop/chukwa/datastore/ChukwaHBaseStore.java
(original)
+++ incubator/chukwa/trunk/src/java/org/apache/hadoop/chukwa/datastore/ChukwaHBaseStore.java
Sun Jun 19 03:50:08 2011
@@ -15,12 +15,13 @@ import org.apache.hadoop.chukwa.hicc.bea
 import org.apache.hadoop.chukwa.util.ExceptionUtil;
 
 import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.HTablePool;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
@@ -32,19 +33,22 @@ import org.apache.log4j.Logger;
 import org.mortbay.log.Log;
 
 public class ChukwaHBaseStore {
-  private static HBaseConfiguration hconf = hconf = new HBaseConfiguration();
+  private static Configuration hconf = HBaseConfiguration.create();
   private static HTablePool pool = new HTablePool(hconf, 60);
   static Logger log = Logger.getLogger(ChukwaHBaseStore.class);
   
-  public static Series getSeries(String tableName, String rkey, String column,
+  public static Series getSeries(String tableName, String rkey, String family, String column,
       long startTime, long endTime, boolean filterByRowKey) {
     StringBuilder seriesName = new StringBuilder();
     seriesName.append(rkey);
     seriesName.append(":");
+    seriesName.append(family);
+    seriesName.append(":");
     seriesName.append(column);
+
     Series series = new Series(seriesName.toString());
     try {
-      HTable table = pool.getTable(tableName);
+      HTableInterface table = pool.getTable(tableName);
       Calendar c = Calendar.getInstance();
       c.setTimeInMillis(startTime);
       c.set(Calendar.MINUTE, 0);
@@ -52,7 +56,7 @@ public class ChukwaHBaseStore {
       c.set(Calendar.MILLISECOND, 0);
       String startRow = c.getTimeInMillis()+rkey;
       Scan scan = new Scan();
-      scan.addColumn(column.getBytes());
+      scan.addColumn(family.getBytes(), column.getBytes());
       scan.setStartRow(startRow.getBytes());
       scan.setTimeRange(startTime, endTime);
       scan.setMaxVersions();
@@ -68,7 +72,7 @@ public class ChukwaHBaseStore {
       // size to 1000 data points for graphing optimization. (i.e jwave)
       while(it.hasNext()) {
         Result result = it.next();
-        String temp = new String(result.getCellValue().getValue());
+        String temp = new String(result.getValue(family.getBytes(), column.getBytes()));
         double value = Double.parseDouble(temp);
         // TODO: Pig Store function does not honor HBase timestamp, hence need to parse rowKey
for timestamp.
         String buf = new String(result.getRow());
@@ -88,7 +92,7 @@ public class ChukwaHBaseStore {
   public static Set<String> getFamilyNames(String tableName) {
     Set<String> familyNames = new CopyOnWriteArraySet<String>();
     try {
-      HTable table = pool.getTable(tableName);
+      HTableInterface table = pool.getTable(tableName);
       Calendar c = Calendar.getInstance();
       Set<byte[]> families = table.getTableDescriptor().getFamiliesKeys();
       for(byte[] name : families) {
@@ -120,14 +124,14 @@ public class ChukwaHBaseStore {
     Result result = it.next();
     List<KeyValue> kvList = result.list();
     for(KeyValue kv : kvList) {
-      columnNames.add(new String(kv.getColumn()));
+      columnNames.add(new String(kv.getQualifier()));
     }
   }
   
   public static Set<String> getColumnNames(String tableName, String family, long startTime,
long endTime, boolean fullScan) {
     Set<String> columnNames = new CopyOnWriteArraySet<String>();
     try {
-      HTable table = pool.getTable(tableName);
+      HTableInterface table = pool.getTable(tableName);
       Scan scan = new Scan();
       if(!fullScan) {
         // Take sample columns of the starting time.
@@ -163,12 +167,12 @@ public class ChukwaHBaseStore {
     return columnNames;
   }
   
-  public static Set<String> getRowNames(String tableName, String column, long startTime,
long endTime, boolean fullScan) {
+  public static Set<String> getRowNames(String tableName, String family, String qualifier,
long startTime, long endTime, boolean fullScan) {
     Set<String> rows = new HashSet<String>();
-    HTable table = pool.getTable(tableName);
+    HTableInterface table = pool.getTable(tableName);
     try {
       Scan scan = new Scan();
-      scan.addColumn(column.getBytes());
+      scan.addColumn(family.getBytes(), qualifier.getBytes());
       if(!fullScan) {
         StringBuilder temp = new StringBuilder();
         temp.append(startTime);
@@ -203,24 +207,25 @@ public class ChukwaHBaseStore {
   }
   
   public static Set<String> getHostnames(String cluster, long startTime, long endTime,
boolean fullScan) {
-    return getRowNames("SystemMetrics","system:csource", startTime, endTime, fullScan);
+    return getRowNames("SystemMetrics","system", "csource", startTime, endTime, fullScan);
   }
   
   public static Set<String> getClusterNames(long startTime, long endTime) {
     String tableName = "SystemMetrics";
-    String column = "system:ctags";
+    String family = "system";
+    String column = "ctags";
     Set<String> clusters = new HashSet<String>();
-    HTable table = pool.getTable(tableName);
+    HTableInterface table = pool.getTable(tableName);
     Pattern p = Pattern.compile("\\s*cluster=\"(.*?)\"");
     try {
       Scan scan = new Scan();
-      scan.addColumn(column.getBytes());
+      scan.addColumn(family.getBytes(), column.getBytes());
       scan.setTimeRange(startTime, endTime);
       ResultScanner results = table.getScanner(scan);
       Iterator<Result> it = results.iterator();
       while(it.hasNext()) {
         Result result = it.next();
-        String buffer = new String(result.getValue(column.getBytes()));
+        String buffer = new String(result.getValue(family.getBytes(), column.getBytes()));
         Matcher m = p.matcher(buffer);
         if(m.matches()) {
           clusters.add(m.group(1));

Modified: incubator/chukwa/trunk/src/java/org/apache/hadoop/chukwa/hicc/rest/MetricsController.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/java/org/apache/hadoop/chukwa/hicc/rest/MetricsController.java?rev=1137280&r1=1137279&r2=1137280&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/java/org/apache/hadoop/chukwa/hicc/rest/MetricsController.java
(original)
+++ incubator/chukwa/trunk/src/java/org/apache/hadoop/chukwa/hicc/rest/MetricsController.java
Sun Jun 19 03:50:08 2011
@@ -29,9 +29,9 @@ public class MetricsController {
   private static Logger log = Logger.getLogger(MetricsController.class);
 
   @GET
-  @Path("series/{table}/{column}/rowkey/{rkey}")
+  @Path("series/{table}/{family}/{column}/rowkey/{rkey}")
   @Produces("application/json")
-  public String getSeries(@Context HttpServletRequest request, @PathParam("table") String
table, @PathParam("column") String column, @PathParam("rkey") String rkey, @QueryParam("start")
String start, @QueryParam("end") String end) {
+  public String getSeries(@Context HttpServletRequest request, @PathParam("table") String
table, @PathParam("family") String family, @PathParam("column") String column, @PathParam("rkey")
String rkey, @QueryParam("start") String start, @QueryParam("end") String end) {
     SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss");
     String buffer = "";
     Series series;
@@ -50,7 +50,7 @@ public class MetricsController {
         endTime = time.getEndTime();
       }
       if(rkey!=null) {
-        series = ChukwaHBaseStore.getSeries(table, rkey, column, startTime, endTime, true);
+        series = ChukwaHBaseStore.getSeries(table, rkey, family, column, startTime, endTime,
true);
         buffer = series.toString();
       } else {
         throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST)
@@ -73,6 +73,8 @@ public class MetricsController {
     long startTime = 0;
     long endTime = 0;
     TimeHandler time = new TimeHandler(request);
+    String family = column.split(":")[0];
+    String qualifier = column.split(":")[1];
     try {
       if(start!=null) {
         startTime = sdf.parse(start).getTime();
@@ -89,7 +91,7 @@ public class MetricsController {
           String[] rkeys = (session.getAttribute(skey).toString()).split(",");
           JSONArray seriesList = new JSONArray();
           for(String rowKey : rkeys) {
-            Series output = ChukwaHBaseStore.getSeries(table, rowKey, column, startTime,
endTime, true);
+            Series output = ChukwaHBaseStore.getSeries(table, rowKey, family, qualifier,
startTime, endTime, true);
             seriesList.add(output.toJSONObject());
           }
           buffer = seriesList.toString();
@@ -162,7 +164,7 @@ public class MetricsController {
   }
   
   @GET
-  @Path("rowkey/{table}/{column}")
+  @Path("rowkey/{table}/{family}/{column}")
   @Produces("application/json")
   public String getRowNames(@Context HttpServletRequest request, @PathParam("table") String
tableName, @PathParam("family") String family, @PathParam("column") String column, @QueryParam("start")
String start, @QueryParam("end") String end, @QueryParam("fullScan") @DefaultValue("false")
boolean fullScan) {
     SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss");
@@ -186,7 +188,7 @@ public class MetricsController {
       throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST)
           .entity("Start/End date parse error.  Format: yyyyMMddHHmmss.").build());     

     }
-    Set<String> columnNames = ChukwaHBaseStore.getRowNames(tableName, column, startTime,
endTime, fullScan);
+    Set<String> columnNames = ChukwaHBaseStore.getRowNames(tableName, family, column,
startTime, endTime, fullScan);
     JSONArray rows = new JSONArray();
     for(String row : columnNames) {
       rows.add(row);

Modified: incubator/chukwa/trunk/src/web/hicc/jsp/graph_explorer.jsp
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/web/hicc/jsp/graph_explorer.jsp?rev=1137280&r1=1137279&r2=1137280&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/web/hicc/jsp/graph_explorer.jsp (original)
+++ incubator/chukwa/trunk/src/web/hicc/jsp/graph_explorer.jsp Sun Jun 19 03:50:08 2011
@@ -112,10 +112,11 @@
       var size = $('#row option').size();
       $('#row').find('option').remove();
       var table = $('#table').val();
+      var family = $('#family').val();
       var column = $('#column').val();
       $('#column :selected').each(function(i, selected) {
         var column = $(selected).val();
-        var url = encodeURI("/hicc/v1/metrics/rowkey/"+table+"/"+column);
+        var url = encodeURI("/hicc/v1/metrics/rowkey/"+table+"/"+family+"/"+column);
         $.ajax({ url: url, dataType: "json", success: function(data){
           for(var i in data) {
             $('#row').not(":contains('"+data[i]+"')").append("<option>"+data[i]+"</option>");
@@ -129,13 +130,14 @@
       if(test == null) {
         $('#row option:eq(0)').attr('selected',true);
       }
+      var family = $("#family").val();
       var data = [];
       $('#column :selected').each(function(i, selected) {
         data[i] = $(selected).val();
       });
       var url = [];
       for(var i in data) {
-        url[i] = encodeURI("/hicc/v1/metrics/series/" + $('#table').val() + "/" + data[i]
+ "/rowkey/" + $('#row').val());
+        url[i] = encodeURI("/hicc/v1/metrics/series/" + $('#table').val() + "/" + family
+ "/" + data[i] + "/rowkey/" + $('#row').val());
       } 
       var title = $('#title').val();
       var ymin = $('#ymin').val();



Mime
View raw message