hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From omal...@apache.org
Subject svn commit: r1081840 [1/3] - in /hadoop/common/branches/branch-0.20-security-203: ./ src/core/org/apache/hadoop/classification/ src/core/org/apache/hadoop/log/ src/core/org/apache/hadoop/log/metrics/ src/core/org/apache/hadoop/metrics/ src/core/org/apa...
Date Tue, 15 Mar 2011 16:27:20 GMT
Author: omalley
Date: Tue Mar 15 16:27:19 2011
New Revision: 1081840

URL: http://svn.apache.org/viewvc?rev=1081840&view=rev
Log:
HADOOP-7190. Add metrics v1 back for backwards compatibility. (omalley)

Added:
    hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/classification/
    hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/classification/InterfaceAudience.java
    hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/classification/InterfaceStability.java
    hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/log/metrics/
    hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/log/metrics/EventCounter.java
    hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/ContextFactory.java
    hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/MetricsContext.java
    hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/MetricsException.java
    hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/MetricsRecord.java
    hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/MetricsServlet.java
    hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/MetricsUtil.java
    hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/Updater.java
    hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/file/FileContext.java
    hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/file/package.html
    hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/ganglia/GangliaContext.java
    hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/ganglia/GangliaContext31.java
    hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/ganglia/package.html
    hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/jvm/EventCounter.java
    hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/jvm/JvmMetrics.java
    hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/package.html
    hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/spi/AbstractMetricsContext.java
    hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/spi/CompositeContext.java
    hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/spi/MetricValue.java
    hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/spi/MetricsRecordImpl.java
    hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/spi/NoEmitMetricsContext.java
    hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/spi/NullContext.java
    hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/spi/NullContextWithUpdateThread.java
    hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/spi/OutputRecord.java
    hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/spi/Util.java
    hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/spi/package.html
    hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/util/MBeanUtil.java
    hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/util/MetricsBase.java
    hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/util/MetricsDynamicMBeanBase.java
    hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/util/MetricsIntValue.java
    hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/util/MetricsLongValue.java
    hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/util/MetricsRegistry.java
    hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/util/MetricsTimeVaryingInt.java
    hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/util/MetricsTimeVaryingLong.java
    hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/util/MetricsTimeVaryingRate.java
    hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/util/package-info.java
Modified:
    hadoop/common/branches/branch-0.20-security-203/CHANGES.txt
    hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/log/EventCounter.java

Modified: hadoop/common/branches/branch-0.20-security-203/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-203/CHANGES.txt?rev=1081840&r1=1081839&r2=1081840&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-203/CHANGES.txt (original)
+++ hadoop/common/branches/branch-0.20-security-203/CHANGES.txt Tue Mar 15 16:27:19 2011
@@ -2,6 +2,8 @@ Hadoop Change Log
 
 Release 0.20.203.0 - unreleased
 
+    HADOOP-7190. Add metrics v1 back for backwards compatibility. (omalley)
+
     MAPREDUCE-2360. Remove stripping of scheme, authority from submit dir in 
     support of viewfs. (cdouglas)
     

Added: hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/classification/InterfaceAudience.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/classification/InterfaceAudience.java?rev=1081840&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/classification/InterfaceAudience.java (added)
+++ hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/classification/InterfaceAudience.java Tue Mar 15 16:27:19 2011
@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.classification;
+
+import java.lang.annotation.Documented;
+
+/**
+ * Annotation to inform users of a package, class or method's intended audience.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+public class InterfaceAudience {
+  /**
+   * Intended for use by any project or application.
+   */
+  @Documented public @interface Public {};
+  
+  /**
+   * Intended only for the project(s) specified in the annotation.
+   * For example, "Common", "HDFS", "MapReduce", "ZooKeeper", "HBase".
+   */
+  @Documented public @interface LimitedPrivate {
+    String[] value();
+  };
+  
+  /**
+   * Intended for use only within Hadoop itself.
+   */
+  @Documented public @interface Private {};
+
+  private InterfaceAudience() {} // Audience can't exist on its own
+}

Added: hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/classification/InterfaceStability.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/classification/InterfaceStability.java?rev=1081840&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/classification/InterfaceStability.java (added)
+++ hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/classification/InterfaceStability.java Tue Mar 15 16:27:19 2011
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.classification;
+
+import java.lang.annotation.Documented;
+
+/**
+ * Annotation to inform users of how much to rely on a particular package,
+ * class or method not changing over time.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+public class InterfaceStability {
+  /**
+   * Can evolve while retaining compatibility for minor release boundaries.; 
+   * can break compatibility only at major release (ie. at m.0).
+   */
+  @Documented
+  public @interface Stable {};
+  
+  /**
+   * Evolving, but can break compatibility at minor release (i.e. m.x)
+   */
+  @Documented
+  public @interface Evolving {};
+  
+  /**
+   * No guarantee is provided as to reliability or stability across any
+   * level of release granularity.
+   */
+  @Documented
+  public @interface Unstable {};
+}

Modified: hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/log/EventCounter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/log/EventCounter.java?rev=1081840&r1=1081839&r2=1081840&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/log/EventCounter.java (original)
+++ hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/log/EventCounter.java Tue Mar 15 16:27:19 2011
@@ -17,76 +17,18 @@
  */
 package org.apache.hadoop.log;
 
-import org.apache.log4j.AppenderSkeleton;
-import org.apache.log4j.Level;
-import org.apache.log4j.spi.LoggingEvent;
-
 /**
  * A log4J Appender that simply counts logging events in three levels:
- * fatal, error and warn.
+ * fatal, error and warn. The class name is used in log4j.properties
+ * @deprecated use {@link org.apache.hadoop.log.metrics.EventCounter} instead
  */
-public class EventCounter extends AppenderSkeleton {
-        
-    private static final int FATAL = 0;
-    private static final int ERROR = 1;
-    private static final int WARN  = 2;
-    private static final int INFO  = 3;
-    
-    private static class EventCounts {
-        private final long[] counts = { 0, 0, 0, 0 };
-    
-        private synchronized void incr(int i) { 
-            ++counts[i]; 
-        }
-        
-        private synchronized long get(int i) { 
-            return counts[i]; 
-        }
-    }
-    private static EventCounts counts = new EventCounts();
-    
-    public static long getFatal() { 
-        return counts.get(FATAL); 
-    }
-    
-    public static long getError() { 
-        return counts.get(ERROR); 
-    }
-    
-    public static long getWarn() { 
-        return counts.get(WARN);  
-    }
-    
-    public static long getInfo() {
-        return counts.get(INFO);
-    }
-    
-    public void append(LoggingEvent event) {
-        Level level = event.getLevel();
-        if (level == Level.INFO) {
-            counts.incr(INFO);
-        }
-        else if (level == Level.WARN) {
-            counts.incr(WARN);
-        }
-        else if (level == Level.ERROR) {
-            counts.incr(ERROR);
-        }
-        else if (level == Level.FATAL) {
-            counts.incr(FATAL);
-        }
-
-    }
-    
-    // Strange: these two methods are abstract in AppenderSkeleton, but not
-    // included in the javadoc (log4j 1.2.13).
-    
-    public void close() {
-    }
-    public boolean requiresLayout() {
-        return false;
-    }
-    
-    
-    
+@Deprecated
+public class EventCounter extends org.apache.hadoop.log.metrics.EventCounter {
+  static {
+    // The logging system is not started yet.
+    System.err.println("WARNING: "+ EventCounter.class.getName() +
+        " is deprecated. Please use "+
+        org.apache.hadoop.log.metrics.EventCounter.class.getName() +
+        " in all the log4j.properties files.");
+  }
 }

Added: hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/log/metrics/EventCounter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/log/metrics/EventCounter.java?rev=1081840&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/log/metrics/EventCounter.java (added)
+++ hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/log/metrics/EventCounter.java Tue Mar 15 16:27:19 2011
@@ -0,0 +1,101 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.log.metrics;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+import org.apache.log4j.AppenderSkeleton;
+import org.apache.log4j.Level;
+import org.apache.log4j.spi.LoggingEvent;
+
+/**
+ * A log4J Appender that simply counts logging events in three levels:
+ * fatal, error and warn. The class name is used in log4j.properties
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+public class EventCounter extends AppenderSkeleton {
+
+  private static final int FATAL = 0;
+  private static final int ERROR = 1;
+  private static final int WARN = 2;
+  private static final int INFO = 3;
+
+  private static class EventCounts {
+
+    private final long[] counts = {0, 0, 0, 0};
+
+    private synchronized void incr(int i) {
+      ++counts[i];
+    }
+
+    private synchronized long get(int i) {
+      return counts[i];
+    }
+  }
+
+  private static EventCounts counts = new EventCounts();
+
+  @InterfaceAudience.Private
+  public static long getFatal() {
+    return counts.get(FATAL);
+  }
+
+  @InterfaceAudience.Private
+  public static long getError() {
+    return counts.get(ERROR);
+  }
+
+  @InterfaceAudience.Private
+  public static long getWarn() {
+    return counts.get(WARN);
+  }
+
+  @InterfaceAudience.Private
+  public static long getInfo() {
+    return counts.get(INFO);
+  }
+
+  @Override
+  public void append(LoggingEvent event) {
+    Level level = event.getLevel();
+    if (level == Level.INFO) {
+      counts.incr(INFO);
+    }
+    else if (level == Level.WARN) {
+      counts.incr(WARN);
+    }
+    else if (level == Level.ERROR) {
+      counts.incr(ERROR);
+    }
+    else if (level == Level.FATAL) {
+      counts.incr(FATAL);
+    }
+
+  }
+
+  @Override
+  public void close() {
+  }
+
+  @Override
+  public boolean requiresLayout() {
+    return false;
+  }
+}

Added: hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/ContextFactory.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/ContextFactory.java?rev=1081840&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/ContextFactory.java (added)
+++ hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/ContextFactory.java Tue Mar 15 16:27:19 2011
@@ -0,0 +1,213 @@
+/*
+ * ContextFactory.java
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Properties;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.metrics.spi.NullContext;
+
+/**
+ * Factory class for creating MetricsContext objects.  To obtain an instance
+ * of this class, use the static <code>getFactory()</code> method.
+ * @deprecated in favor of <code>org.apache.hadoop.metrics2</code> usage.
+ */
+@Deprecated
+@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
+@InterfaceStability.Evolving
+public class ContextFactory {
+    
+  private static final String PROPERTIES_FILE = 
+    "/hadoop-metrics.properties";
+  private static final String CONTEXT_CLASS_SUFFIX =
+    ".class";
+  private static final String DEFAULT_CONTEXT_CLASSNAME =
+    "org.apache.hadoop.metrics.spi.NullContext";
+    
+  private static ContextFactory theFactory = null;
+    
+  private Map<String,Object> attributeMap = new HashMap<String,Object>();
+  private Map<String,MetricsContext> contextMap = 
+    new HashMap<String,MetricsContext>();
+    
+  // Used only when contexts, or the ContextFactory itself, cannot be
+  // created.
+  private static Map<String,MetricsContext> nullContextMap = 
+    new HashMap<String,MetricsContext>();
+    
+  /** Creates a new instance of ContextFactory */
+  protected ContextFactory() {
+  }
+    
+  /**
+   * Returns the value of the named attribute, or null if there is no 
+   * attribute of that name.
+   *
+   * @param attributeName the attribute name
+   * @return the attribute value
+   */
+  public Object getAttribute(String attributeName) {
+    return attributeMap.get(attributeName);
+  }
+    
+  /**
+   * Returns the names of all the factory's attributes.
+   * 
+   * @return the attribute names
+   */
+  public String[] getAttributeNames() {
+    String[] result = new String[attributeMap.size()];
+    int i = 0;
+    // for (String attributeName : attributeMap.keySet()) {
+    Iterator it = attributeMap.keySet().iterator();
+    while (it.hasNext()) {
+      result[i++] = (String) it.next();
+    }
+    return result;
+  }
+    
+  /**
+   * Sets the named factory attribute to the specified value, creating it
+   * if it did not already exist.  If the value is null, this is the same as
+   * calling removeAttribute.
+   *
+   * @param attributeName the attribute name
+   * @param value the new attribute value
+   */
+  public void setAttribute(String attributeName, Object value) {
+    attributeMap.put(attributeName, value);
+  }
+
+  /**
+   * Removes the named attribute if it exists.
+   *
+   * @param attributeName the attribute name
+   */
+  public void removeAttribute(String attributeName) {
+    attributeMap.remove(attributeName);
+  }
+    
+  /**
+   * Returns the named MetricsContext instance, constructing it if necessary 
+   * using the factory's current configuration attributes. <p/>
+   * 
+   * When constructing the instance, if the factory property 
+   * <i>contextName</i>.class</code> exists, 
+   * its value is taken to be the name of the class to instantiate.  Otherwise,
+   * the default is to create an instance of 
+   * <code>org.apache.hadoop.metrics.spi.NullContext</code>, which is a 
+   * dummy "no-op" context which will cause all metric data to be discarded.
+   * 
+   * @param contextName the name of the context
+   * @return the named MetricsContext
+   */
+  public synchronized MetricsContext getContext(String refName, String contextName)
+      throws IOException, ClassNotFoundException,
+             InstantiationException, IllegalAccessException {
+    MetricsContext metricsContext = contextMap.get(refName);
+    if (metricsContext == null) {
+      String classNameAttribute = refName + CONTEXT_CLASS_SUFFIX;
+      String className = (String) getAttribute(classNameAttribute);
+      if (className == null) {
+        className = DEFAULT_CONTEXT_CLASSNAME;
+      }
+      Class contextClass = Class.forName(className);
+      metricsContext = (MetricsContext) contextClass.newInstance();
+      metricsContext.init(contextName, this);
+      contextMap.put(contextName, metricsContext);
+    }
+    return metricsContext;
+  }
+
+  public synchronized MetricsContext getContext(String contextName)
+    throws IOException, ClassNotFoundException, InstantiationException,
+           IllegalAccessException {
+    return getContext(contextName, contextName);
+  }
+  
+  /** 
+   * Returns all MetricsContexts built by this factory.
+   */
+  public synchronized Collection<MetricsContext> getAllContexts() {
+    // Make a copy to avoid race conditions with creating new contexts.
+    return new ArrayList<MetricsContext>(contextMap.values());
+  }
+    
+  /**
+   * Returns a "null" context - one which does nothing.
+   */
+  public static synchronized MetricsContext getNullContext(String contextName) {
+    MetricsContext nullContext = nullContextMap.get(contextName);
+    if (nullContext == null) {
+      nullContext = new NullContext();
+      nullContextMap.put(contextName, nullContext);
+    }
+    return nullContext;
+  }
+    
+  /**
+   * Returns the singleton ContextFactory instance, constructing it if 
+   * necessary. <p/>
+   * 
+   * When the instance is constructed, this method checks if the file 
+   * <code>hadoop-metrics.properties</code> exists on the class path.  If it 
+   * exists, it must be in the format defined by java.util.Properties, and all 
+   * the properties in the file are set as attributes on the newly created
+   * ContextFactory instance.
+   *
+   * @return the singleton ContextFactory instance
+   */
+  public static synchronized ContextFactory getFactory() throws IOException {
+    if (theFactory == null) {
+      theFactory = new ContextFactory();
+      theFactory.setAttributes();
+    }
+    return theFactory;
+  }
+    
+  private void setAttributes() throws IOException {
+    InputStream is = getClass().getResourceAsStream(PROPERTIES_FILE);
+    if (is != null) {
+      try {
+        Properties properties = new Properties();
+        properties.load(is);
+        //for (Object propertyNameObj : properties.keySet()) {
+        Iterator it = properties.keySet().iterator();
+        while (it.hasNext()) {
+          String propertyName = (String) it.next();
+          String propertyValue = properties.getProperty(propertyName);
+          setAttribute(propertyName, propertyValue);
+        }
+      } finally {
+        is.close();
+      }
+    }
+  }
+    
+}

Added: hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/MetricsContext.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/MetricsContext.java?rev=1081840&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/MetricsContext.java (added)
+++ hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/MetricsContext.java Tue Mar 15 16:27:19 2011
@@ -0,0 +1,124 @@
+/*
+ * MetricsContext.java
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics;
+
+import java.io.IOException;
+import java.util.Collection;
+import java.util.Map;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.metrics.spi.OutputRecord;
+
+/**
+ * The main interface to the metrics package. 
+ * @deprecated in favor of <code>org.apache.hadoop.metrics2</code> usage.
+ */
+@Deprecated
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+public interface MetricsContext {
+    
+  /**
+   * Default period in seconds at which data is sent to the metrics system.
+   */
+  public static final int DEFAULT_PERIOD = 5;
+
+  /**
+   * Initialize this context.
+   * @param contextName The given name for this context
+   * @param factory The creator of this context
+   */
+  public void init(String contextName, ContextFactory factory);
+
+  /**
+   * Returns the context name.
+   *
+   * @return the context name
+   */
+  public abstract String getContextName();
+    
+  /**
+   * Starts or restarts monitoring, the emitting of metrics records as they are 
+   * updated. 
+   */
+  public abstract void startMonitoring()
+    throws IOException;
+
+  /**
+   * Stops monitoring.  This does not free any data that the implementation
+   * may have buffered for sending at the next timer event. It
+   * is OK to call <code>startMonitoring()</code> again after calling 
+   * this.
+   * @see #close()
+   */
+  public abstract void stopMonitoring();
+    
+  /**
+   * Returns true if monitoring is currently in progress.
+   */
+  public abstract boolean isMonitoring();
+    
+  /**
+   * Stops monitoring and also frees any buffered data, returning this 
+   * object to its initial state.  
+   */
+  public abstract void close();
+    
+  /**
+   * Creates a new MetricsRecord instance with the given <code>recordName</code>.
+   * Throws an exception if the metrics implementation is configured with a fixed
+   * set of record names and <code>recordName</code> is not in that set.
+   *
+   * @param recordName the name of the record
+   * @throws MetricsException if recordName conflicts with configuration data
+   */
+  public abstract MetricsRecord createRecord(String recordName);
+    
+  /**
+   * Registers a callback to be called at regular time intervals, as 
+   * determined by the implementation-class specific configuration.
+   *
+   * @param updater object to be run periodically; it should updated
+   * some metrics records and then return
+   */
+  public abstract void registerUpdater(Updater updater);
+
+  /**
+   * Removes a callback, if it exists.
+   * 
+   * @param updater object to be removed from the callback list
+   */
+  public abstract void unregisterUpdater(Updater updater);
+  
+  /**
+   * Returns the timer period.
+   */
+  public abstract int getPeriod();
+  
+  /**
+   * Retrieves all the records managed by this MetricsContext.
+   * Useful for monitoring systems that are polling-based.
+   * 
+   * @return A non-null map from all record names to the records managed.
+   */
+   Map<String, Collection<OutputRecord>> getAllRecords();
+}

Added: hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/MetricsException.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/MetricsException.java?rev=1081840&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/MetricsException.java (added)
+++ hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/MetricsException.java Tue Mar 15 16:27:19 2011
@@ -0,0 +1,49 @@
+/*
+ * MetricsException.java
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+/**
+ * General-purpose, unchecked metrics exception.
+ * @deprecated in favor of {@link org.apache.hadoop.metrics2.MetricsException}.
+ */
+@Deprecated
+@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
+@InterfaceStability.Evolving
+public class MetricsException extends RuntimeException {
+    
+  private static final long serialVersionUID = -1643257498540498497L;
+
+  /** Creates a new instance of MetricsException */
+  public MetricsException() {
+  }
+    
+  /** Creates a new instance of MetricsException 
+   *
+   * @param message an error message
+   */
+  public MetricsException(String message) {
+    super(message);
+  }
+    
+}

Added: hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/MetricsRecord.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/MetricsRecord.java?rev=1081840&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/MetricsRecord.java (added)
+++ hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/MetricsRecord.java Tue Mar 15 16:27:19 2011
@@ -0,0 +1,253 @@
+/*
+ * MetricsRecord.java
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+/**
+ * A named and optionally tagged set of records to be sent to the metrics
+ * system. <p/>
+ *
+ * A record name identifies the kind of data to be reported. For example, a
+ * program reporting statistics relating to the disks on a computer might use
+ * a record name "diskStats".<p/>
+ *
+ * A record has zero or more <i>tags</i>. A tag has a name and a value. To
+ * continue the example, the "diskStats" record might use a tag named
+ * "diskName" to identify a particular disk.  Sometimes it is useful to have
+ * more than one tag, so there might also be a "diskType" with value "ide" or
+ * "scsi" or whatever.<p/>
+ *
+ * A record also has zero or more <i>metrics</i>.  These are the named
+ * values that are to be reported to the metrics system.  In the "diskStats"
+ * example, possible metric names would be "diskPercentFull", "diskPercentBusy", 
+ * "kbReadPerSecond", etc.<p/>
+ * 
+ * The general procedure for using a MetricsRecord is to fill in its tag and
+ * metric values, and then call <code>update()</code> to pass the record to the
+ * client library.
+ * Metric data is not immediately sent to the metrics system
+ * each time that <code>update()</code> is called. 
+ * An internal table is maintained, identified by the record name. This
+ * table has columns 
+ * corresponding to the tag and the metric names, and rows 
+ * corresponding to each unique set of tag values. An update
+ * either modifies an existing row in the table, or adds a new row with a set of
+ * tag values that are different from all the other rows.  Note that if there
+ * are no tags, then there can be at most one row in the table. <p/>
+ * 
+ * Once a row is added to the table, its data will be sent to the metrics system 
+ * on every timer period, whether or not it has been updated since the previous
+ * timer period.  If this is inappropriate, for example if metrics were being
+ * reported by some transient object in an application, the <code>remove()</code>
+ * method can be used to remove the row and thus stop the data from being
+ * sent.<p/>
+ *
+ * Note that the <code>update()</code> method is atomic.  This means that it is
+ * safe for different threads to be updating the same metric.  More precisely,
+ * it is OK for different threads to call <code>update()</code> on MetricsRecord instances 
+ * with the same set of tag names and tag values.  Different threads should 
+ * <b>not</b> use the same MetricsRecord instance at the same time.
+ * @deprecated in favor of <code>org.apache.hadoop.metrics2</code> usage.
+ */
+@Deprecated
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+public interface MetricsRecord {
+    
+  /**
+   * Returns the record name. 
+   *
+   * @return the record name
+   */
+  public abstract String getRecordName();
+    
+  /**
+   * Sets the named tag to the specified value.  The tagValue may be null, 
+   * which is treated the same as an empty String.
+   *
+   * @param tagName name of the tag
+   * @param tagValue new value of the tag
+   * @throws MetricsException if the tagName conflicts with the configuration
+   */
+  public abstract void setTag(String tagName, String tagValue);
+    
+  /**
+   * Sets the named tag to the specified value.
+   *
+   * @param tagName name of the tag
+   * @param tagValue new value of the tag
+   * @throws MetricsException if the tagName conflicts with the configuration
+   */
+  public abstract void setTag(String tagName, int tagValue);
+    
+  /**
+   * Sets the named tag to the specified value.
+   *
+   * @param tagName name of the tag
+   * @param tagValue new value of the tag
+   * @throws MetricsException if the tagName conflicts with the configuration
+   */
+  public abstract void setTag(String tagName, long tagValue);
+    
+  /**
+   * Sets the named tag to the specified value.
+   *
+   * @param tagName name of the tag
+   * @param tagValue new value of the tag
+   * @throws MetricsException if the tagName conflicts with the configuration
+   */
+  public abstract void setTag(String tagName, short tagValue);
+    
+  /**
+   * Sets the named tag to the specified value.
+   *
+   * @param tagName name of the tag
+   * @param tagValue new value of the tag
+   * @throws MetricsException if the tagName conflicts with the configuration
+   */
+  public abstract void setTag(String tagName, byte tagValue);
+    
+  /**
+   * Removes any tag of the specified name.
+   *
+   * @param tagName name of a tag
+   */
+  public abstract void removeTag(String tagName);
+  
+  /**
+   * Sets the named metric to the specified value.
+   *
+   * @param metricName name of the metric
+   * @param metricValue new value of the metric
+   * @throws MetricsException if the metricName or the type of the metricValue 
+   * conflicts with the configuration
+   */
+  public abstract void setMetric(String metricName, int metricValue);
+    
+  /**
+   * Sets the named metric to the specified value.
+   *
+   * @param metricName name of the metric
+   * @param metricValue new value of the metric
+   * @throws MetricsException if the metricName or the type of the metricValue 
+   * conflicts with the configuration
+   */
+  public abstract void setMetric(String metricName, long metricValue);
+    
+  /**
+   * Sets the named metric to the specified value.
+   *
+   * @param metricName name of the metric
+   * @param metricValue new value of the metric
+   * @throws MetricsException if the metricName or the type of the metricValue 
+   * conflicts with the configuration
+   */
+  public abstract void setMetric(String metricName, short metricValue);
+    
+  /**
+   * Sets the named metric to the specified value.
+   *
+   * @param metricName name of the metric
+   * @param metricValue new value of the metric
+   * @throws MetricsException if the metricName or the type of the metricValue 
+   * conflicts with the configuration
+   */
+  public abstract void setMetric(String metricName, byte metricValue);
+    
+  /**
+   * Sets the named metric to the specified value.
+   *
+   * @param metricName name of the metric
+   * @param metricValue new value of the metric
+   * @throws MetricsException if the metricName or the type of the metricValue 
+   * conflicts with the configuration
+   */
+  public abstract void setMetric(String metricName, float metricValue);
+    
+  /**
+   * Increments the named metric by the specified value.
+   *
+   * @param metricName name of the metric
+   * @param metricValue incremental value
+   * @throws MetricsException if the metricName or the type of the metricValue 
+   * conflicts with the configuration
+   */
+  public abstract void incrMetric(String metricName, int metricValue);
+    
+  /**
+   * Increments the named metric by the specified value.
+   *
+   * @param metricName name of the metric
+   * @param metricValue incremental value
+   * @throws MetricsException if the metricName or the type of the metricValue 
+   * conflicts with the configuration
+   */
+  public abstract void incrMetric(String metricName, long metricValue);
+    
+  /**
+   * Increments the named metric by the specified value.
+   *
+   * @param metricName name of the metric
+   * @param metricValue incremental value
+   * @throws MetricsException if the metricName or the type of the metricValue 
+   * conflicts with the configuration
+   */
+  public abstract void incrMetric(String metricName, short metricValue);
+    
+  /**
+   * Increments the named metric by the specified value.
+   *
+   * @param metricName name of the metric
+   * @param metricValue incremental value
+   * @throws MetricsException if the metricName or the type of the metricValue 
+   * conflicts with the configuration
+   */
+  public abstract void incrMetric(String metricName, byte metricValue);
+    
+  /**
+   * Increments the named metric by the specified value.
+   *
+   * @param metricName name of the metric
+   * @param metricValue incremental value
+   * @throws MetricsException if the metricName or the type of the metricValue 
+   * conflicts with the configuration
+   */
+  public abstract void incrMetric(String metricName, float metricValue);
+    
+  /**
+   * Updates the table of buffered data which is to be sent periodically.
+   * If the tag values match an existing row, that row is updated; 
+   * otherwise, a new row is added.
+   */
+  public abstract void update();
+    
+  /**
+   * Removes, from the buffered data table, all rows having tags 
+   * that equal the tags that have been set on this record. For example,
+   * if there are no tags on this record, all rows for this record name
+   * would be removed.  Or, if there is a single tag on this record, then
+   * just rows containing a tag with the same name and value would be removed.
+   */
+  public abstract void remove();
+    
+}

Added: hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/MetricsServlet.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/MetricsServlet.java?rev=1081840&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/MetricsServlet.java (added)
+++ hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/MetricsServlet.java Tue Mar 15 16:27:19 2011
@@ -0,0 +1,174 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.metrics;
+
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.http.HttpServer;
+import org.apache.hadoop.metrics.spi.OutputRecord;
+import org.apache.hadoop.metrics.spi.AbstractMetricsContext.MetricMap;
+import org.apache.hadoop.metrics.spi.AbstractMetricsContext.TagMap;
+import org.mortbay.util.ajax.JSON;
+import org.mortbay.util.ajax.JSON.Output;
+
+/**
+ * A servlet to print out metrics data.  By default, the servlet returns a 
+ * textual representation (no promises are made for parseability), and
+ * users can use "?format=json" for parseable output.
+ * @deprecated in favor of <code>org.apache.hadoop.metrics2</code> usage.
+ */
+@Deprecated
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+public class MetricsServlet extends HttpServlet {
+  
+  /**
+   * A helper class to hold a TagMap and MetricMap.
+   */
+  static class TagsMetricsPair implements JSON.Convertible {
+    final TagMap tagMap;
+    final MetricMap metricMap;
+    
+    public TagsMetricsPair(TagMap tagMap, MetricMap metricMap) {
+      this.tagMap = tagMap;
+      this.metricMap = metricMap;
+    }
+
+    @SuppressWarnings("unchecked")
+    public void fromJSON(Map map) {
+      throw new UnsupportedOperationException();
+    }
+
+    /** Converts to JSON by providing an array. */
+    public void toJSON(Output out) {
+      out.add(new Object[] { tagMap, metricMap });
+    }
+  }
+  
+  /**
+   * Collects all metric data, and returns a map:
+   *   contextName -> recordName -> [ (tag->tagValue), (metric->metricValue) ].
+   * The values are either String or Number.  The final value is implemented
+   * as a list of TagsMetricsPair.
+   */
+   Map<String, Map<String, List<TagsMetricsPair>>> makeMap(
+       Collection<MetricsContext> contexts) throws IOException {
+    Map<String, Map<String, List<TagsMetricsPair>>> map = 
+      new TreeMap<String, Map<String, List<TagsMetricsPair>>>();
+
+    for (MetricsContext context : contexts) {
+      Map<String, List<TagsMetricsPair>> records = 
+        new TreeMap<String, List<TagsMetricsPair>>();
+      map.put(context.getContextName(), records);
+    
+      for (Map.Entry<String, Collection<OutputRecord>> r : 
+          context.getAllRecords().entrySet()) {
+        List<TagsMetricsPair> metricsAndTags = 
+          new ArrayList<TagsMetricsPair>();
+        records.put(r.getKey(), metricsAndTags);
+        for (OutputRecord outputRecord : r.getValue()) {
+          TagMap tagMap = outputRecord.getTagsCopy();
+          MetricMap metricMap = outputRecord.getMetricsCopy();
+          metricsAndTags.add(new TagsMetricsPair(tagMap, metricMap));
+        }
+      }
+    }
+    return map;
+  }
+  
+  @Override
+  public void doGet(HttpServletRequest request, HttpServletResponse response)
+      throws ServletException, IOException {
+
+    // Do the authorization
+    if (!HttpServer.hasAdministratorAccess(getServletContext(), request,
+        response)) {
+      return;
+    }
+
+    PrintWriter out = new PrintWriter(response.getOutputStream());
+    String format = request.getParameter("format");
+    Collection<MetricsContext> allContexts = 
+      ContextFactory.getFactory().getAllContexts();
+    if ("json".equals(format)) {
+      // Uses Jetty's built-in JSON support to convert the map into JSON.
+      out.print(new JSON().toJSON(makeMap(allContexts)));
+    } else {
+      printMap(out, makeMap(allContexts));
+    }
+    out.close();
+  }
+  
+  /**
+   * Prints metrics data in a multi-line text form.
+   */
+  void printMap(PrintWriter out, Map<String, Map<String, List<TagsMetricsPair>>> map) {
+    for (Map.Entry<String, Map<String, List<TagsMetricsPair>>> context : map.entrySet()) {
+      out.println(context.getKey());
+      for (Map.Entry<String, List<TagsMetricsPair>> record : context.getValue().entrySet()) {
+        indent(out, 1);
+        out.println(record.getKey());
+        for (TagsMetricsPair pair : record.getValue()) {
+          indent(out, 2);
+          // Prints tag values in the form "{key=value,key=value}:"
+          out.print("{");
+          boolean first = true;
+          for (Map.Entry<String, Object> tagValue : pair.tagMap.entrySet()) {
+            if (first) {
+              first = false;
+            } else {
+              out.print(",");
+            }
+            out.print(tagValue.getKey());
+            out.print("=");
+            out.print(tagValue.getValue().toString());
+          }
+          out.println("}:");
+          
+          // Now print metric values, one per line
+          for (Map.Entry<String, Number> metricValue : 
+              pair.metricMap.entrySet()) {
+            indent(out, 3);
+            out.print(metricValue.getKey());
+            out.print("=");
+            out.println(metricValue.getValue().toString());
+          }
+        }
+      }
+    }    
+  }
+  
+  private void indent(PrintWriter out, int indent) {
+    for (int i = 0; i < indent; ++i) {
+      out.append("  ");
+    }
+  }
+}

Added: hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/MetricsUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/MetricsUtil.java?rev=1081840&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/MetricsUtil.java (added)
+++ hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/MetricsUtil.java Tue Mar 15 16:27:19 2011
@@ -0,0 +1,106 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.metrics;
+
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+/**
+ * Utility class to simplify creation and reporting of hadoop metrics.
+ *
+ * For examples of usage, see NameNodeMetrics.
+ * @see org.apache.hadoop.metrics.MetricsRecord
+ * @see org.apache.hadoop.metrics.MetricsContext
+ * @see org.apache.hadoop.metrics.ContextFactory
+ * @deprecated in favor of <code>org.apache.hadoop.metrics2</code> usage.
+ */
+@Deprecated
+@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
+@InterfaceStability.Evolving
+public class MetricsUtil {
+    
+  public static final Log LOG =
+    LogFactory.getLog(MetricsUtil.class);
+
+  /**
+   * Don't allow creation of a new instance of Metrics
+   */
+  private MetricsUtil() {}
+    
+  public static MetricsContext getContext(String contextName) {
+    return getContext(contextName, contextName);
+  }
+
+  /**
+   * Utility method to return the named context.
+   * If the desired context cannot be created for any reason, the exception
+   * is logged, and a null context is returned.
+   */
+  public static MetricsContext getContext(String refName, String contextName) {
+    MetricsContext metricsContext;
+    try {
+      metricsContext =
+        ContextFactory.getFactory().getContext(refName, contextName);
+      if (!metricsContext.isMonitoring()) {
+        metricsContext.startMonitoring();
+      }
+    } catch (Exception ex) {
+      LOG.error("Unable to create metrics context " + contextName, ex);
+      metricsContext = ContextFactory.getNullContext(contextName);
+    }
+    return metricsContext;
+  }
+
+  /**
+   * Utility method to create and return new metrics record instance within the
+   * given context. This record is tagged with the host name.
+   *
+   * @param context the context
+   * @param recordName name of the record
+   * @return newly created metrics record
+   */
+  public static MetricsRecord createRecord(MetricsContext context, 
+                                           String recordName) 
+  {
+    MetricsRecord metricsRecord = context.createRecord(recordName);
+    metricsRecord.setTag("hostName", getHostName());
+    return metricsRecord;        
+  }
+    
+  /**
+   * Returns the host name.  If the host name is unobtainable, logs the
+   * exception and returns "unknown".
+   */
+  private static String getHostName() {
+    String hostName = null;
+    try {
+      hostName = InetAddress.getLocalHost().getHostName();
+    } 
+    catch (UnknownHostException ex) {
+      LOG.info("Unable to obtain hostName", ex);
+      hostName = "unknown";
+    }
+    return hostName;
+  }
+
+}

Added: hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/Updater.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/Updater.java?rev=1081840&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/Updater.java (added)
+++ hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/Updater.java Tue Mar 15 16:27:19 2011
@@ -0,0 +1,40 @@
+/*
+ * Updater.java
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+/**
+ * Call-back interface.  See <code>MetricsContext.registerUpdater()</code>.
+ * @deprecated in favor of <code>org.apache.hadoop.metrics2</code> usage.
+ */
+@Deprecated
+@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
+@InterfaceStability.Evolving
+public interface Updater {
+    
+  /**
+   * Timer-based call-back from the metric library. 
+   */
+  public abstract void doUpdates(MetricsContext context);
+
+}

Added: hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/file/FileContext.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/file/FileContext.java?rev=1081840&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/file/FileContext.java (added)
+++ hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/file/FileContext.java Tue Mar 15 16:27:19 2011
@@ -0,0 +1,154 @@
+/*
+ * FileContext.java
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics.file;
+
+import java.io.BufferedOutputStream;
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.PrintWriter;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.metrics.ContextFactory;
+import org.apache.hadoop.metrics.spi.AbstractMetricsContext;
+import org.apache.hadoop.metrics.spi.OutputRecord;
+
+/**
+ * Metrics context for writing metrics to a file.<p/>
+ *
+ * This class is configured by setting ContextFactory attributes which in turn
+ * are usually configured through a properties file.  All the attributes are
+ * prefixed by the contextName. For example, the properties file might contain:
+ * <pre>
+ * myContextName.fileName=/tmp/metrics.log
+ * myContextName.period=5
+ * </pre>
+ * @deprecated use {@link org.apache.hadoop.metrics2.sink.FileSink} instead.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+@Deprecated
+public class FileContext extends AbstractMetricsContext {
+    
+  /* Configuration attribute names */
+  @InterfaceAudience.Private
+  protected static final String FILE_NAME_PROPERTY = "fileName";
+  @InterfaceAudience.Private
+  protected static final String PERIOD_PROPERTY = "period";
+    
+  private File file = null;              // file for metrics to be written to
+  private PrintWriter writer = null;
+    
+  /** Creates a new instance of FileContext */
+  @InterfaceAudience.Private
+  public FileContext() {}
+    
+  @InterfaceAudience.Private
+  public void init(String contextName, ContextFactory factory) {
+    super.init(contextName, factory);
+        
+    String fileName = getAttribute(FILE_NAME_PROPERTY);
+    if (fileName != null) {
+      file = new File(fileName);
+    }
+        
+    parseAndSetPeriod(PERIOD_PROPERTY);
+  }
+
+  /**
+   * Returns the configured file name, or null.
+   */
+  @InterfaceAudience.Private
+  public String getFileName() {
+    if (file == null) {
+      return null;
+    } else {
+      return file.getName();
+    }
+  }
+    
+  /**
+   * Starts or restarts monitoring, by opening in append-mode, the
+   * file specified by the <code>fileName</code> attribute,
+   * if specified. Otherwise the data will be written to standard
+   * output.
+   */
+  @InterfaceAudience.Private
+  public void startMonitoring()
+    throws IOException 
+  {
+    if (file == null) {
+      writer = new PrintWriter(new BufferedOutputStream(System.out));
+    } else {
+      writer = new PrintWriter(new FileWriter(file, true));
+    }
+    super.startMonitoring();
+  }
+    
+  /**
+   * Stops monitoring, closing the file.
+   * @see #close()
+   */
+  @InterfaceAudience.Private
+  public void stopMonitoring() {
+    super.stopMonitoring();
+        
+    if (writer != null) {
+      writer.close();
+      writer = null;
+    }
+  }
+    
+  /**
+   * Emits a metrics record to a file.
+   */
+  @InterfaceAudience.Private
+  public void emitRecord(String contextName, String recordName, OutputRecord outRec) {
+    writer.print(contextName);
+    writer.print(".");
+    writer.print(recordName);
+    String separator = ": ";
+    for (String tagName : outRec.getTagNames()) {
+      writer.print(separator);
+      separator = ", ";
+      writer.print(tagName);
+      writer.print("=");
+      writer.print(outRec.getTag(tagName));
+    }
+    for (String metricName : outRec.getMetricNames()) {
+      writer.print(separator);
+      separator = ", ";
+      writer.print(metricName);
+      writer.print("=");
+      writer.print(outRec.getMetric(metricName));
+    }
+    writer.println();
+  }
+    
+  /**
+   * Flushes the output writer, forcing updates to disk.
+   */
+  @InterfaceAudience.Private
+  public void flush() {
+    writer.flush();
+  }
+}

Added: hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/file/package.html
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/file/package.html?rev=1081840&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/file/package.html (added)
+++ hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/file/package.html Tue Mar 15 16:27:19 2011
@@ -0,0 +1,43 @@
+<html>
+
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<body>
+Implementation of the metrics package that writes the metrics to a file.
+Programmers should not normally need to use this package directly. Instead
+they should use org.hadoop.metrics.
+
+<p/>
+These are the implementation specific factory attributes 
+(See ContextFactory.getFactory()):
+
+<dl>
+    <dt><i>contextName</i>.fileName</dt>
+    <dd>The path of the file to which metrics in context <i>contextName</i>
+    are to be appended.  If this attribute is not specified, the metrics
+    are written to standard output by default.</dd>
+    
+    <dt><i>contextName</i>.period</dt>
+    <dd>The period in seconds on which the metric data is written to the
+    file.</dd>
+    
+</dl>
+
+
+</body>
+</html>

Added: hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/ganglia/GangliaContext.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/ganglia/GangliaContext.java?rev=1081840&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/ganglia/GangliaContext.java (added)
+++ hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/ganglia/GangliaContext.java Tue Mar 15 16:27:19 2011
@@ -0,0 +1,240 @@
+/*
+ * GangliaContext.java
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics.ganglia;
+
+import java.io.IOException;
+import java.net.DatagramPacket;
+import java.net.DatagramSocket;
+import java.net.SocketAddress;
+import java.net.SocketException;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.metrics.ContextFactory;
+import org.apache.hadoop.metrics.MetricsException;
+import org.apache.hadoop.metrics.spi.AbstractMetricsContext;
+import org.apache.hadoop.metrics.spi.OutputRecord;
+import org.apache.hadoop.metrics.spi.Util;
+
+/**
+ * Context for sending metrics to Ganglia.
+ * 
+ * @deprecated in favor of <code>org.apache.hadoop.metrics2</code> usage.
+ */
+@Deprecated
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+public class GangliaContext extends AbstractMetricsContext {
+    
+  private static final String PERIOD_PROPERTY = "period";
+  private static final String SERVERS_PROPERTY = "servers";
+  private static final String UNITS_PROPERTY = "units";
+  private static final String SLOPE_PROPERTY = "slope";
+  private static final String TMAX_PROPERTY = "tmax";
+  private static final String DMAX_PROPERTY = "dmax";
+    
+  private static final String DEFAULT_UNITS = "";
+  private static final String DEFAULT_SLOPE = "both";
+  private static final int DEFAULT_TMAX = 60;
+  private static final int DEFAULT_DMAX = 0;
+  private static final int DEFAULT_PORT = 8649;
+  private static final int BUFFER_SIZE = 1500;       // as per libgmond.c
+
+  private final Log LOG = LogFactory.getLog(this.getClass());    
+
+  private static final Map<Class,String> typeTable = new HashMap<Class,String>(5);
+    
+  static {
+    typeTable.put(String.class, "string");
+    typeTable.put(Byte.class, "int8");
+    typeTable.put(Short.class, "int16");
+    typeTable.put(Integer.class, "int32");
+    typeTable.put(Long.class, "float");
+    typeTable.put(Float.class, "float");
+  }
+    
+  protected byte[] buffer = new byte[BUFFER_SIZE];
+  protected int offset;
+    
+  protected List<? extends SocketAddress> metricsServers;
+  private Map<String,String> unitsTable;
+  private Map<String,String> slopeTable;
+  private Map<String,String> tmaxTable;
+  private Map<String,String> dmaxTable;
+    
+  protected DatagramSocket datagramSocket;
+    
+  /** Creates a new instance of GangliaContext */
+  @InterfaceAudience.Private
+  public GangliaContext() {
+  }
+    
+  @InterfaceAudience.Private
+  public void init(String contextName, ContextFactory factory) {
+    super.init(contextName, factory);
+    parseAndSetPeriod(PERIOD_PROPERTY);
+        
+    metricsServers = 
+      Util.parse(getAttribute(SERVERS_PROPERTY), DEFAULT_PORT); 
+        
+    unitsTable = getAttributeTable(UNITS_PROPERTY);
+    slopeTable = getAttributeTable(SLOPE_PROPERTY);
+    tmaxTable  = getAttributeTable(TMAX_PROPERTY);
+    dmaxTable  = getAttributeTable(DMAX_PROPERTY);
+        
+    try {
+      datagramSocket = new DatagramSocket();
+    }
+    catch (SocketException se) {
+      se.printStackTrace();
+    }
+  }
+
+  @InterfaceAudience.Private
+  public void emitRecord(String contextName, String recordName,
+    OutputRecord outRec) 
+  throws IOException {
+    // Setup so that the records have the proper leader names so they are
+    // unambiguous at the ganglia level, and this prevents a lot of rework
+    StringBuilder sb = new StringBuilder();
+    sb.append(contextName);
+    sb.append('.');
+    sb.append(recordName);
+    sb.append('.');
+    int sbBaseLen = sb.length();
+
+    // emit each metric in turn
+    for (String metricName : outRec.getMetricNames()) {
+      Object metric = outRec.getMetric(metricName);
+      String type = typeTable.get(metric.getClass());
+      if (type != null) {
+        sb.append(metricName);
+        emitMetric(sb.toString(), type, metric.toString());
+        sb.setLength(sbBaseLen);
+      } else {
+        LOG.warn("Unknown metrics type: " + metric.getClass());
+      }
+    }
+  }
+    
+  protected void emitMetric(String name, String type,  String value) 
+  throws IOException {
+    String units = getUnits(name);
+    int slope = getSlope(name);
+    int tmax = getTmax(name);
+    int dmax = getDmax(name);
+        
+    offset = 0;
+    xdr_int(0);             // metric_user_defined
+    xdr_string(type);
+    xdr_string(name);
+    xdr_string(value);
+    xdr_string(units);
+    xdr_int(slope);
+    xdr_int(tmax);
+    xdr_int(dmax);
+        
+    for (SocketAddress socketAddress : metricsServers) {
+      DatagramPacket packet = 
+        new DatagramPacket(buffer, offset, socketAddress);
+      datagramSocket.send(packet);
+    }
+  }
+    
+  protected String getUnits(String metricName) {
+    String result = unitsTable.get(metricName);
+    if (result == null) {
+      result = DEFAULT_UNITS;
+    }
+    return result;
+  }
+    
+  protected int getSlope(String metricName) {
+    String slopeString = slopeTable.get(metricName);
+    if (slopeString == null) {
+      slopeString = DEFAULT_SLOPE; 
+    }
+    return ("zero".equals(slopeString) ? 0 : 3); // see gmetric.c
+  }
+    
+  protected int getTmax(String metricName) {
+    if (tmaxTable == null) {
+      return DEFAULT_TMAX;
+    }
+    String tmaxString = tmaxTable.get(metricName);
+    if (tmaxString == null) {
+      return DEFAULT_TMAX;
+    }
+    else {
+      return Integer.parseInt(tmaxString);
+    }
+  }
+    
+  protected int getDmax(String metricName) {
+    String dmaxString = dmaxTable.get(metricName);
+    if (dmaxString == null) {
+      return DEFAULT_DMAX;
+    }
+    else {
+      return Integer.parseInt(dmaxString);
+    }
+  }
+    
+  /**
+   * Puts a string into the buffer by first writing the size of the string
+   * as an int, followed by the bytes of the string, padded if necessary to
+   * a multiple of 4.
+   */
+  protected void xdr_string(String s) {
+    byte[] bytes = s.getBytes();
+    int len = bytes.length;
+    xdr_int(len);
+    System.arraycopy(bytes, 0, buffer, offset, len);
+    offset += len;
+    pad();
+  }
+
+  /**
+   * Pads the buffer with zero bytes up to the nearest multiple of 4.
+   */
+  private void pad() {
+    int newOffset = ((offset + 3) / 4) * 4;
+    while (offset < newOffset) {
+      buffer[offset++] = 0;
+    }
+  }
+        
+  /**
+   * Puts an integer into the buffer as 4 bytes, big-endian.
+   */
+  protected void xdr_int(int i) {
+    buffer[offset++] = (byte)((i >> 24) & 0xff);
+    buffer[offset++] = (byte)((i >> 16) & 0xff);
+    buffer[offset++] = (byte)((i >> 8) & 0xff);
+    buffer[offset++] = (byte)(i & 0xff);
+  }
+}

Added: hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/ganglia/GangliaContext31.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/ganglia/GangliaContext31.java?rev=1081840&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/ganglia/GangliaContext31.java (added)
+++ hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/ganglia/GangliaContext31.java Tue Mar 15 16:27:19 2011
@@ -0,0 +1,144 @@
+/*
+ * GangliaContext.java
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics.ganglia;
+
+import java.io.IOException;
+import java.net.DatagramPacket;
+import java.net.SocketAddress;
+import java.net.UnknownHostException;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.metrics.ContextFactory;
+import org.apache.hadoop.net.DNS;
+
+/**
+ * Context for sending metrics to Ganglia version 3.1.x.
+ * 
+ * 3.1.1 has a slightly different wire portal compared to 3.0.x.
+ */
+public class GangliaContext31 extends GangliaContext {
+
+  String hostName = "UNKNOWN.example.com";
+
+  private static final Log LOG = 
+    LogFactory.getLog("org.apache.hadoop.util.GangliaContext31");
+
+  public void init(String contextName, ContextFactory factory) {
+    super.init(contextName, factory);
+
+    LOG.debug("Initializing the GangliaContext31 for Ganglia 3.1 metrics.");
+
+    // Take the hostname from the DNS class.
+
+    Configuration conf = new Configuration();
+
+    if (conf.get("slave.host.name") != null) {
+      hostName = conf.get("slave.host.name");
+    } else {
+      try {
+        hostName = DNS.getDefaultHost(
+          conf.get("dfs.datanode.dns.interface","default"),
+          conf.get("dfs.datanode.dns.nameserver","default"));
+      } catch (UnknownHostException uhe) {
+        LOG.error(uhe);
+    	hostName = "UNKNOWN.example.com";
+      }
+    }
+  }
+
+  protected void emitMetric(String name, String type,  String value) 
+    throws IOException
+  {
+    if (name == null) {
+      LOG.warn("Metric was emitted with no name.");
+      return;
+    } else if (value == null) {
+      LOG.warn("Metric name " + name +" was emitted with a null value.");
+      return;
+    } else if (type == null) {
+      LOG.warn("Metric name " + name + ", value " + value + " has no type.");
+      return;
+    }
+
+    LOG.debug("Emitting metric " + name + ", type " + type + ", value " + 
+      value + " from hostname" + hostName);
+
+    String units = getUnits(name);
+    if (units == null) {
+      LOG.warn("Metric name " + name + ", value " + value
+        + " had 'null' units");
+      units = "";
+    }
+    int slope = getSlope(name);
+    int tmax = getTmax(name);
+    int dmax = getDmax(name);
+    offset = 0;
+    String groupName = name.substring(0,name.lastIndexOf("."));
+
+    // The following XDR recipe was done through a careful reading of
+    // gm_protocol.x in Ganglia 3.1 and carefully examining the output of
+    // the gmetric utility with strace.
+
+    // First we send out a metadata message
+    xdr_int(128);         // metric_id = metadata_msg
+    xdr_string(hostName); // hostname
+    xdr_string(name);     // metric name
+    xdr_int(0);           // spoof = False
+    xdr_string(type);     // metric type
+    xdr_string(name);     // metric name
+    xdr_string(units);    // units
+    xdr_int(slope);       // slope
+    xdr_int(tmax);        // tmax, the maximum time between metrics
+    xdr_int(dmax);        // dmax, the maximum data value
+
+    xdr_int(1);             /*Num of the entries in extra_value field for 
+                              Ganglia 3.1.x*/
+    xdr_string("GROUP");    /*Group attribute*/
+    xdr_string(groupName);  /*Group value*/
+
+    for (SocketAddress socketAddress : metricsServers) {
+      DatagramPacket packet =
+        new DatagramPacket(buffer, offset, socketAddress);
+      datagramSocket.send(packet);
+    }
+
+    // Now we send out a message with the actual value.
+    // Technically, we only need to send out the metadata message once for
+    // each metric, but I don't want to have to record which metrics we did and
+    // did not send.
+    offset = 0;
+    xdr_int(133);         // we are sending a string value
+    xdr_string(hostName); // hostName
+    xdr_string(name);     // metric name
+    xdr_int(0);           // spoof = False
+    xdr_string("%s");     // format field
+    xdr_string(value);    // metric value
+        
+    for (SocketAddress socketAddress : metricsServers) {
+      DatagramPacket packet = 
+        new DatagramPacket(buffer, offset, socketAddress);
+      datagramSocket.send(packet);
+    }
+  }
+
+}

Added: hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/ganglia/package.html
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/ganglia/package.html?rev=1081840&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/ganglia/package.html (added)
+++ hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/ganglia/package.html Tue Mar 15 16:27:19 2011
@@ -0,0 +1,74 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
+<html>
+
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<body>
+<!--
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+-->
+
+Implementation of the metrics package that sends metric data to 
+<a href="http://ganglia.sourceforge.net/">Ganglia</a>.
+Programmers should not normally need to use this package directly. Instead
+they should use org.hadoop.metrics.
+
+<p/>
+These are the implementation specific factory attributes 
+(See ContextFactory.getFactory()):
+
+<dl>
+    <dt><i>contextName</i>.servers</dt>
+    <dd>Space and/or comma separated sequence of servers to which UDP
+    messages should be sent.</dd>
+    
+    <dt><i>contextName</i>.period</dt>
+    <dd>The period in seconds on which the metric data is sent to the
+    server(s).</dd>
+    
+    <dt><i>contextName</i>.units.<i>recordName</i>.<i>metricName</i></dt>
+    <dd>The units for the specified metric in the specified record.</dd>
+    
+    <dt><i>contextName</i>.slope.<i>recordName</i>.<i>metricName</i></dt>
+    <dd>The slope for the specified metric in the specified record.</dd>
+    
+    <dt><i>contextName</i>.tmax.<i>recordName</i>.<i>metricName</i></dt>
+    <dd>The tmax for the specified metric in the specified record.</dd>
+    
+    <dt><i>contextName</i>.dmax.<i>recordName</i>.<i>metricName</i></dt>
+    <dd>The dmax for the specified metric in the specified record.</dd>
+    
+</dl>
+
+
+</body>
+</html>

Added: hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/jvm/EventCounter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/jvm/EventCounter.java?rev=1081840&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/jvm/EventCounter.java (added)
+++ hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/metrics/jvm/EventCounter.java Tue Mar 15 16:27:19 2011
@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.metrics.jvm;
+
+/**
+ * A log4J Appender that simply counts logging events in three levels:
+ * fatal, error and warn.
+ * @deprecated use {@link org.apache.hadoop.log.metrics.EventCounter} instead
+ */
+@Deprecated
+public class EventCounter extends org.apache.hadoop.log.metrics.EventCounter {
+
+  static {
+    // The logging system is not started yet.
+    System.err.println("WARNING: "+ EventCounter.class.getName() +
+        " is deprecated. Please use "+
+        org.apache.hadoop.log.metrics.EventCounter.class.getName() +
+        " in all the log4j.properties files.");
+  }
+}



Mime
View raw message