hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From cutt...@apache.org
Subject svn commit: r389910 - in /lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs: DFSShell.java DataNodeReport.java DistributedFileSystem.java
Date Wed, 29 Mar 2006 22:20:22 GMT
Author: cutting
Date: Wed Mar 29 14:20:19 2006
New Revision: 389910

URL: http://svn.apache.org/viewcvs?rev=389910&view=rev
Log:
Fix HADOOP-67.  Add a public API for dfs statistics.  Also switch to use the public API for
reporting in DFSShell.

Added:
    lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DataNodeReport.java
Modified:
    lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSShell.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DistributedFileSystem.java

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSShell.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSShell.java?rev=389910&r1=389909&r2=389910&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSShell.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSShell.java Wed Mar 29 14:20:19 2006
@@ -158,7 +158,7 @@
     /**
      * Return an abbreviated English-language desc of the byte length
      */
-    String byteDesc(long len) {
+    static String byteDesc(long len) {
         double val = 0.0;
         String ending = "";
         if (len < 1024 * 1024) {
@@ -174,7 +174,7 @@
         return limitDecimal(val, 2) + ending;
     }
 
-    String limitDecimal(double d, int placesAfterDecimal) {
+    static String limitDecimal(double d, int placesAfterDecimal) {
         String strVal = Double.toString(d);
         int decpt = strVal.indexOf(".");
         if (decpt >= 0) {
@@ -187,42 +187,28 @@
      * Gives a report on how the FileSystem is doing
      */
     public void report() throws IOException {
-        if (fs instanceof DistributedFileSystem) {
-            DistributedFileSystem dfsfs = (DistributedFileSystem) fs;
-            DFSClient dfs = dfsfs.getClient();
-            long total = dfs.totalRawCapacity();
-            long used = dfs.totalRawUsed();
-            DatanodeInfo info[] = dfs.datanodeReport();
+      if (fs instanceof DistributedFileSystem) {
+        DistributedFileSystem dfs = (DistributedFileSystem)fs;
+        long raw = dfs.getRawCapacity();
+        long rawUsed = dfs.getRawUsed();
+        long used = dfs.getUsed();
 
-            long totalEffectiveBytes = 0;
-            File topItems[] = fs.listFiles(new File("/"));
-            for (int i = 0; i < topItems.length; i++) {
-                DFSFile cur = (DFSFile) topItems[i];
-                totalEffectiveBytes += cur.getContentsLength();
-            }
+        System.out.println("Total raw bytes: " + raw + " (" + byteDesc(raw) + ")");
+        System.out.println("Used raw bytes: " + rawUsed + " (" + byteDesc(rawUsed) + ")");
+        System.out.println("% used: " + limitDecimal(((1.0 * rawUsed) / raw) * 100, 2) +
"%");
+        System.out.println();
+        System.out.println("Total effective bytes: " + used + " (" + byteDesc(used) + ")");
+        System.out.println("Effective replication multiplier: " + (1.0 * rawUsed / used));
 
-            System.out.println("Total raw bytes: " + total + " (" + byteDesc(total) + ")");
-            System.out.println("Used raw bytes: " + used + " (" + byteDesc(used) + ")");
-            System.out.println("% used: " + limitDecimal(((1.0 * used) / total) * 100, 2)
+ "%");
-            System.out.println();
-            System.out.println("Total effective bytes: " + totalEffectiveBytes + " (" + byteDesc(totalEffectiveBytes)
+ ")");
-            System.out.println("Effective replication multiplier: " + (1.0 * used / totalEffectiveBytes));
-
-            System.out.println("-------------------------------------------------");
-            System.out.println("Datanodes available: " + info.length);
-            System.out.println();
-            for (int i = 0; i < info.length; i++) {
-                System.out.println("Name: " + info[i].getName().toString());
-                long c = info[i].getCapacity();
-                long r = info[i].getRemaining();
-                long u = c - r;
-                System.out.println("Total raw bytes: " + c + " (" + byteDesc(c) + ")");
-                System.out.println("Used raw bytes: " + u + " (" + byteDesc(u) + ")");
-                System.out.println("% used: " + limitDecimal(((1.0 * u) / c) * 100, 2) +
"%");
-                System.out.println("Last contact with namenode: " + new Date(info[i].lastUpdate()));
-                System.out.println();
-            }
+        System.out.println("-------------------------------------------------");
+        DataNodeReport info[] = dfs.getDataNodeStats();
+        System.out.println("Datanodes available: " + info.length);
+        System.out.println();
+        for (int i = 0; i < info.length; i++) {
+          System.out.println(info[i]);
+          System.out.println();
         }
+      }
     }
 
     /**

Added: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DataNodeReport.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DataNodeReport.java?rev=389910&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DataNodeReport.java (added)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DataNodeReport.java Wed Mar 29 14:20:19
2006
@@ -0,0 +1,46 @@
+package org.apache.hadoop.dfs;
+
+import java.util.Date;
+
+import org.apache.hadoop.io.UTF8;
+
+/** A report on the status of a DataNode.
+ *
+ * @see DistributedFileSystem#getDataNodeStats
+ */
+public class DataNodeReport {
+  String name;
+  String host;
+  long capacity;
+  long remaining;
+  long lastUpdate;
+  
+  /** The name of the datanode. */
+  public String getName() { return name; }
+
+  /** The hostname of the datanode. */
+  public String getHost() { return host; }
+
+  /** The raw capacity. */
+  public long getCapacity() { return capacity; }
+
+  /** The raw free space. */
+  public long getRemaining() { return remaining; }
+
+  /** The time when this information was accurate. */
+  public long getLastUpdate() { return lastUpdate; }
+
+  public String toString() {
+    StringBuffer buffer = new StringBuffer();
+    long c = getCapacity();
+    long r = getRemaining();
+    long u = c - r;
+    buffer.append("Name: "+name+"\n");
+    buffer.append("Total raw bytes: "+c+" ("+DFSShell.byteDesc(c)+")"+"\n");
+    buffer.append("Used raw bytes: "+u+" ("+DFSShell.byteDesc(u)+")"+"\n");
+    buffer.append("% used: "+DFSShell.limitDecimal(((1.0*u)/c)*100,2)+"%"+"\n");
+    buffer.append("Last contact: "+new Date(lastUpdate)+"\n");
+    return buffer.toString();
+  }
+
+}

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DistributedFileSystem.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DistributedFileSystem.java?rev=389910&r1=389909&r2=389910&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DistributedFileSystem.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DistributedFileSystem.java Wed Mar
29 14:20:19 2006
@@ -299,4 +299,41 @@
       return dfs.BLOCK_SIZE;
     }
 
+    /** Return the total raw capacity of the filesystem, disregarding
+     * replication .*/
+    public long getRawCapacity() throws IOException{
+        return dfs.totalRawCapacity();
+    }
+
+    /** Return the total raw used space in the filesystem, disregarding
+     * replication .*/
+    public long getRawUsed() throws IOException{
+        return dfs.totalRawUsed();
+    }
+
+    /** Return the total size of all files in the filesystem.*/
+    public long getUsed()throws IOException{
+        long used = 0;
+        DFSFileInfo dfsFiles[] = dfs.listFiles(getPath(new File("/")));
+        for(int i=0;i<dfsFiles.length;i++){
+            used += dfsFiles[i].getContentsLen();
+        }
+        return used;
+    }
+
+    /** Return statistics for each datanode.*/
+    public DataNodeReport[] getDataNodeStats() throws IOException {
+      DatanodeInfo[]  dnReport = dfs.datanodeReport();
+      DataNodeReport[] reports = new DataNodeReport[dnReport.length];
+
+      for (int i = 0; i < dnReport.length; i++) {
+        reports[i] = new DataNodeReport();
+        reports[i].name = dnReport[i].getName().toString();
+        reports[i].host = dnReport[i].getHost().toString();
+        reports[i].capacity = dnReport[i].getCapacity();
+        reports[i].remaining = dnReport[i].getRemaining();
+        reports[i].lastUpdate = dnReport[i].lastUpdate();
+      }
+      return reports;
+    }
 }



Mime
View raw message