hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From sur...@apache.org
Subject svn commit: r1401071 [2/4] - in /hadoop/common/branches/branch-trunk-win/hadoop-common-project: hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/ hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/ hado...
Date Mon, 22 Oct 2012 20:43:25 GMT
Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java?rev=1401071&r1=1401070&r2=1401071&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java Mon Oct 22 20:43:16 2012
@@ -24,6 +24,7 @@ import java.net.URI;
 import java.security.PrivilegedExceptionAction;
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.Collections;
 import java.util.EnumSet;
 import java.util.HashMap;
 import java.util.HashSet;
@@ -1391,11 +1392,11 @@ public abstract class FileSystem extends
   }
 
   final private static PathFilter DEFAULT_FILTER = new PathFilter() {
-      @Override
-      public boolean accept(Path file) {
-        return true;
-      }     
-    };
+    @Override
+    public boolean accept(Path file) {
+      return true;
+    }
+  };
     
   /**
    * List the statuses of the files/directories in the given path if the path is
@@ -1559,136 +1560,128 @@ public abstract class FileSystem extends
   }
   
   /**
-   * Return an array of FileStatus objects whose path names match pathPattern
-   * and is accepted by the user-supplied path filter. Results are sorted by
-   * their path names.
-   * Return null if pathPattern has no glob and the path does not exist.
-   * Return an empty array if pathPattern has a glob and no path matches it. 
-   * 
-   * @param pathPattern
-   *          a regular expression specifying the path pattern
-   * @param filter
-   *          a user-supplied path filter
-   * @return an array of FileStatus objects
+   * Return an array of FileStatus objects whose path names match
+   * {@code pathPattern} and is accepted by the user-supplied path filter.
+   * Results are sorted by their path names.
+   * 
+   * @param pathPattern a regular expression specifying the path pattern
+   * @param filter a user-supplied path filter
+   * @return null if {@code pathPattern} has no glob and the path does not exist
+   *         an empty array if {@code pathPattern} has a glob and no path
+   *         matches it else an array of {@link FileStatus} objects matching the
+   *         pattern
    * @throws IOException if any I/O error occurs when fetching file status
    */
   public FileStatus[] globStatus(Path pathPattern, PathFilter filter)
       throws IOException {
     String filename = pathPattern.toUri().getPath();
+    List<FileStatus> allMatches = null;
+    
     List<String> filePatterns = GlobExpander.expand(filename);
-    if (filePatterns.size() == 1) {
-      return globStatusInternal(pathPattern, filter);
-    } else {
-      List<FileStatus> results = new ArrayList<FileStatus>();
-      for (String filePattern : filePatterns) {
-        FileStatus[] files = globStatusInternal(new Path(filePattern), filter);
-        for (FileStatus file : files) {
-          results.add(file);
+    for (String filePattern : filePatterns) {
+      Path path = new Path(filePattern.isEmpty() ? Path.CUR_DIR : filePattern);
+      List<FileStatus> matches = globStatusInternal(path, filter);
+      if (matches != null) {
+        if (allMatches == null) {
+          allMatches = matches;
+        } else {
+          allMatches.addAll(matches);
         }
       }
-      return results.toArray(new FileStatus[results.size()]);
     }
+    
+    FileStatus[] results = null;
+    if (allMatches != null) {
+      results = allMatches.toArray(new FileStatus[allMatches.size()]);
+    } else if (filePatterns.size() > 1) {
+      // no matches with multiple expansions is a non-matching glob 
+      results = new FileStatus[0];
+    }
+    return results;
   }
 
-  private FileStatus[] globStatusInternal(Path pathPattern, PathFilter filter)
-      throws IOException {
-    Path[] parents = new Path[1];
+  // sort gripes because FileStatus Comparable isn't parameterized...
+  @SuppressWarnings("unchecked") 
+  private List<FileStatus> globStatusInternal(Path pathPattern,
+      PathFilter filter) throws IOException {
+    boolean patternHasGlob = false;       // pathPattern has any globs
+    List<FileStatus> matches = new ArrayList<FileStatus>();
+
+    // determine starting point
     int level = 0;
-    String filename = pathPattern.toUri().getPath();
+    String baseDir = Path.CUR_DIR;
+    if (pathPattern.isAbsolute()) {
+      level = 1; // need to skip empty item at beginning of split list
+      baseDir = Path.SEPARATOR;
+    }
     
-    // path has only zero component
-    if (filename.isEmpty() || Path.SEPARATOR.equals(filename)) {
-      return getFileStatus(new Path[]{pathPattern});
+    // parse components and determine if it's a glob
+    String[] components = null;
+    GlobFilter[] filters = null;
+    String filename = pathPattern.toUri().getPath();
+    if (!filename.isEmpty() && !Path.SEPARATOR.equals(filename)) {
+      components = filename.split(Path.SEPARATOR);
+      filters = new GlobFilter[components.length];
+      for (int i=level; i < components.length; i++) {
+        filters[i] = new GlobFilter(components[i]);
+        patternHasGlob |= filters[i].hasPattern();
+      }
+      if (!patternHasGlob) {
+        baseDir = unquotePathComponent(filename);
+        components = null; // short through to filter check
+      }
     }
-
-    // path has at least one component
-    String[] components = filename.split(Path.SEPARATOR);
-    // get the first component
-    if (pathPattern.isAbsolute()) {
-      parents[0] = new Path(Path.SEPARATOR);
-      level = 1;
-    } else {
-      parents[0] = new Path(Path.CUR_DIR);
+    
+    // seed the parent directory path, return if it doesn't exist
+    try {
+      matches.add(getFileStatus(new Path(baseDir)));
+    } catch (FileNotFoundException e) {
+      return patternHasGlob ? matches : null;
     }
-
-    // glob the paths that match the parent path, i.e., [0, components.length-1]
-    boolean[] hasGlob = new boolean[]{false};
-    Path[] parentPaths = globPathsLevel(parents, components, level, hasGlob);
-    FileStatus[] results;
-    if (parentPaths == null || parentPaths.length == 0) {
-      results = null;
-    } else {
-      // Now work on the last component of the path
-      GlobFilter fp = new GlobFilter(components[components.length - 1], filter);
-      if (fp.hasPattern()) { // last component has a pattern
-        // list parent directories and then glob the results
-        try {
-          results = listStatus(parentPaths, fp);
-        } catch (FileNotFoundException e) {
-          results = null;
-        }
-        hasGlob[0] = true;
-      } else { // last component does not have a pattern
-        // remove the quoting of metachars in a non-regexp expansion
-        String name = unquotePathComponent(components[components.length - 1]);
-        // get all the path names
-        ArrayList<Path> filteredPaths = new ArrayList<Path>(parentPaths.length);
-        for (int i = 0; i < parentPaths.length; i++) {
-          parentPaths[i] = new Path(parentPaths[i], name);
-          if (fp.accept(parentPaths[i])) {
-            filteredPaths.add(parentPaths[i]);
+    
+    // skip if there are no components other than the basedir
+    if (components != null) {
+      // iterate through each path component
+      for (int i=level; (i < components.length) && !matches.isEmpty(); i++) {
+        List<FileStatus> children = new ArrayList<FileStatus>();
+        for (FileStatus match : matches) {
+          // don't look for children in a file matched by a glob
+          if (!match.isDirectory()) {
+            continue;
+          }
+          try {
+            if (filters[i].hasPattern()) {
+              // get all children matching the filter
+              FileStatus[] statuses = listStatus(match.getPath(), filters[i]);
+              children.addAll(Arrays.asList(statuses));
+            } else {
+              // the component does not have a pattern
+              String component = unquotePathComponent(components[i]);
+              Path child = new Path(match.getPath(), component);
+              children.add(getFileStatus(child));
+            }
+          } catch (FileNotFoundException e) {
+            // don't care
           }
         }
-        // get all their statuses
-        results = getFileStatus(
-            filteredPaths.toArray(new Path[filteredPaths.size()]));
+        matches = children;
       }
     }
-
-    // Decide if the pathPattern contains a glob or not
-    if (results == null) {
-      if (hasGlob[0]) {
-        results = new FileStatus[0];
-      }
-    } else {
-      if (results.length == 0 ) {
-        if (!hasGlob[0]) {
-          results = null;
+    // remove anything that didn't match the filter
+    if (!matches.isEmpty()) {
+      Iterator<FileStatus> iter = matches.iterator();
+      while (iter.hasNext()) {
+        if (!filter.accept(iter.next().getPath())) {
+          iter.remove();
         }
-      } else {
-        Arrays.sort(results);
       }
     }
-    return results;
-  }
-
-  /*
-   * For a path of N components, return a list of paths that match the
-   * components [<code>level</code>, <code>N-1</code>].
-   */
-  private Path[] globPathsLevel(Path[] parents, String[] filePattern,
-      int level, boolean[] hasGlob) throws IOException {
-    if (level == filePattern.length - 1)
-      return parents;
-    if (parents == null || parents.length == 0) {
-      return null;
-    }
-    GlobFilter fp = new GlobFilter(filePattern[level]);
-    if (fp.hasPattern()) {
-      try {
-        parents = FileUtil.stat2Paths(listStatus(parents, fp));
-      } catch (FileNotFoundException e) {
-        parents = null;
-      }
-      hasGlob[0] = true;
-    } else { // the component does not have a pattern
-      // remove the quoting of metachars in a non-regexp expansion
-      String name = unquotePathComponent(filePattern[level]);
-      for (int i = 0; i < parents.length; i++) {
-        parents[i] = new Path(parents[i], name);
-      }
+    // no final paths, if there were any globs return empty list
+    if (matches.isEmpty()) {
+      return patternHasGlob ? matches : null;
     }
-    return globPathsLevel(parents, filePattern, level + 1, hasGlob);
+    Collections.sort(matches);
+    return matches;
   }
 
   /**
@@ -2165,30 +2158,6 @@ public abstract class FileSystem extends
   }
 
   /**
-   * Return a list of file status objects that corresponds to the list of paths
-   * excluding those non-existent paths.
-   * 
-   * @param paths
-   *          the list of paths we want information from
-   * @return a list of FileStatus objects
-   * @throws IOException
-   *           see specific implementation
-   */
-  private FileStatus[] getFileStatus(Path[] paths) throws IOException {
-    if (paths == null) {
-      return null;
-    }
-    ArrayList<FileStatus> results = new ArrayList<FileStatus>(paths.length);
-    for (int i = 0; i < paths.length; i++) {
-      try {
-        results.add(getFileStatus(paths[i]));
-      } catch (FileNotFoundException e) { // do nothing
-      }
-    }
-    return results.toArray(new FileStatus[results.size()]);
-  }
-  
-  /**
    * Returns a status object describing the use and capacity of the
    * file system. If the file system has multiple partitions, the
    * use and capacity of the root partition is reflected.

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobExpander.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobExpander.java?rev=1401071&r1=1401070&r2=1401071&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobExpander.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobExpander.java Mon Oct 22 20:43:16 2012
@@ -39,12 +39,26 @@ class GlobExpander {
   }
   
   /**
-   * Expand globs in the given <code>filePattern</code> into a collection of 
-   * file patterns so that in the expanded set no file pattern has a
-   * slash character ("/") in a curly bracket pair.
+   * Expand globs in the given <code>filePattern</code> into a collection of
+   * file patterns so that in the expanded set no file pattern has a slash
+   * character ("/") in a curly bracket pair.
+   * <p>
+   * Some examples of how the filePattern is expanded:<br>
+   * <pre>
+   * <b>
+   * filePattern         - Expanded file pattern </b>
+   * {a/b}               - a/b
+   * /}{a/b}             - /}a/b
+   * p{a/b,c/d}s         - pa/bs, pc/ds
+   * {a/b,c/d,{e,f}}     - a/b, c/d, {e,f}
+   * {a/b,c/d}{e,f}      - a/b{e,f}, c/d{e,f}
+   * {a,b}/{b,{c/d,e/f}} - {a,b}/b, {a,b}/c/d, {a,b}/e/f
+   * {a,b}/{c/\d}        - {a,b}/c/d
+   * </pre>
+   * 
    * @param filePattern
    * @return expanded file patterns
-   * @throws IOException 
+   * @throws IOException
    */
   public static List<String> expand(String filePattern) throws IOException {
     List<String> fullyExpanded = new ArrayList<String>();
@@ -65,7 +79,7 @@ class GlobExpander {
   /**
    * Expand the leftmost outer curly bracket pair containing a
    * slash character ("/") in <code>filePattern</code>.
-   * @param filePattern
+   * @param filePatternWithOffset
    * @return expanded file patterns
    * @throws IOException 
    */

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInGzipDecompressor.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInGzipDecompressor.java?rev=1401071&r1=1401070&r2=1401071&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInGzipDecompressor.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInGzipDecompressor.java Mon Oct 22 20:43:16 2012
@@ -387,7 +387,7 @@ public class BuiltInGzipDecompressor imp
       copyBytesToLocal(n);       // modifies userBufLen, etc.
       if (localBufOff >= 4) {    // should be strictly ==
         long inputSize = readUIntLE(localBuf, 0);
-        if (inputSize != (inflater.getBytesWritten() & 0xffffffff)) {
+        if (inputSize != (inflater.getBytesWritten() & 0xffffffffL)) {
           throw new IOException(
             "stored gzip size doesn't match decompressed size");
         }
@@ -571,7 +571,7 @@ public class BuiltInGzipDecompressor imp
     return ((((long)(b[off+3] & 0xff) << 24) |
              ((long)(b[off+2] & 0xff) << 16) |
              ((long)(b[off+1] & 0xff) <<  8) |
-             ((long)(b[off]   & 0xff)      )) & 0xffffffff);
+             ((long)(b[off]   & 0xff)      )) & 0xffffffffL);
   }
 
 }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1401071&r1=1401070&r2=1401071&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java Mon Oct 22 20:43:16 2012
@@ -225,7 +225,6 @@ public class Client {
     private IpcConnectionContextProto connectionContext;   // connection context
     private final ConnectionId remoteId;                // connection id
     private AuthMethod authMethod; // authentication method
-    private boolean useSasl;
     private Token<? extends TokenIdentifier> token;
     private SaslRpcClient saslRpcClient;
     
@@ -270,8 +269,7 @@ public class Client {
 
       UserGroupInformation ticket = remoteId.getTicket();
       Class<?> protocol = remoteId.getProtocol();
-      this.useSasl = UserGroupInformation.isSecurityEnabled();
-      if (useSasl && protocol != null) {
+      if (protocol != null) {
         TokenInfo tokenInfo = SecurityUtil.getTokenInfo(protocol, conf);
         if (tokenInfo != null) {
           TokenSelector<? extends TokenIdentifier> tokenSelector = null;
@@ -296,12 +294,12 @@ public class Client {
         }
       }
       
-      if (!useSasl) {
-        authMethod = AuthMethod.SIMPLE;
-      } else if (token != null) {
+      if (token != null) {
         authMethod = AuthMethod.DIGEST;
-      } else {
+      } else if (UserGroupInformation.isSecurityEnabled()) {
         authMethod = AuthMethod.KERBEROS;
+      } else {
+        authMethod = AuthMethod.SIMPLE;
       }
       
       connectionContext = ProtoUtil.makeIpcConnectionContext(
@@ -576,14 +574,12 @@ public class Client {
           InputStream inStream = NetUtils.getInputStream(socket);
           OutputStream outStream = NetUtils.getOutputStream(socket);
           writeConnectionHeader(outStream);
-          if (useSasl) {
+          if (authMethod != AuthMethod.SIMPLE) {
             final InputStream in2 = inStream;
             final OutputStream out2 = outStream;
             UserGroupInformation ticket = remoteId.getTicket();
-            if (authMethod == AuthMethod.KERBEROS) {
-              if (ticket.getRealUser() != null) {
-                ticket = ticket.getRealUser();
-              }
+            if (ticket.getRealUser() != null) {
+              ticket = ticket.getRealUser();
             }
             boolean continueSasl = false;
             try {
@@ -614,7 +610,6 @@ public class Client {
                   connectionContext.getProtocol(), 
                   ProtoUtil.getUgi(connectionContext.getUserInfo()),
                   authMethod);
-              useSasl = false;
             }
           }
         
@@ -1174,7 +1169,7 @@ public class Client {
                   call.error);
         }
       } else {
-        return call.rpcResponse;
+        return call.getRpcResult();
       }
     }
   }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java?rev=1401071&r1=1401070&r2=1401071&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java Mon Oct 22 20:43:16 2012
@@ -113,12 +113,17 @@ import org.codehaus.jackson.JsonGenerato
  *  All other objects will be converted to a string and output as such.
  *  
  *  The bean's name and modelerType will be returned for all beans.
+ *
+ *  Optional paramater "callback" should be used to deliver JSONP response.
+ *  
  */
 public class JMXJsonServlet extends HttpServlet {
   private static final Log LOG = LogFactory.getLog(JMXJsonServlet.class);
 
   private static final long serialVersionUID = 1L;
 
+  private static final String CALLBACK_PARAM = "callback";
+
   /**
    * MBean server.
    */
@@ -154,11 +159,22 @@ public class JMXJsonServlet extends Http
         return;
       }
       JsonGenerator jg = null;
+      String jsonpcb = null;
+      PrintWriter writer = null;
       try {
-        response.setContentType("application/json; charset=utf8");
+        writer = response.getWriter();
+ 
+        // "callback" parameter implies JSONP outpout
+        jsonpcb = request.getParameter(CALLBACK_PARAM);
+        if (jsonpcb != null) {
+          response.setContentType("application/javascript; charset=utf8");
+          writer.write(jsonpcb + "(");
+        } else {
+          response.setContentType("application/json; charset=utf8");
+        }
 
-        PrintWriter writer = response.getWriter();
         jg = jsonFactory.createJsonGenerator(writer);
+        jg.disable(JsonGenerator.Feature.AUTO_CLOSE_TARGET);
         jg.useDefaultPrettyPrinter();
         jg.writeStartObject();
 
@@ -188,6 +204,12 @@ public class JMXJsonServlet extends Http
         if (jg != null) {
           jg.close();
         }
+        if (jsonpcb != null) {
+           writer.write(");");
+        }
+        if (writer != null) {
+          writer.close();
+        }
       }
     } catch (IOException e) {
       LOG.error("Caught an exception while processing JMX request", e);

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ContextFactory.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ContextFactory.java?rev=1401071&r1=1401070&r2=1401071&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ContextFactory.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ContextFactory.java Mon Oct 22 20:43:16 2012
@@ -1,211 +1,211 @@
-/*
- * ContextFactory.java
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metrics;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.Map;
-import java.util.Properties;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.metrics.spi.NullContext;
-
-/**
- * Factory class for creating MetricsContext objects.  To obtain an instance
- * of this class, use the static <code>getFactory()</code> method.
- */
-@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
-@InterfaceStability.Evolving
-public class ContextFactory {
-    
-  private static final String PROPERTIES_FILE = 
-    "/hadoop-metrics.properties";
-  private static final String CONTEXT_CLASS_SUFFIX =
-    ".class";
-  private static final String DEFAULT_CONTEXT_CLASSNAME =
-    "org.apache.hadoop.metrics.spi.NullContext";
-    
-  private static ContextFactory theFactory = null;
-    
-  private Map<String,Object> attributeMap = new HashMap<String,Object>();
-  private Map<String,MetricsContext> contextMap = 
-    new HashMap<String,MetricsContext>();
-    
-  // Used only when contexts, or the ContextFactory itself, cannot be
-  // created.
-  private static Map<String,MetricsContext> nullContextMap = 
-    new HashMap<String,MetricsContext>();
-    
-  /** Creates a new instance of ContextFactory */
-  protected ContextFactory() {
-  }
-    
-  /**
-   * Returns the value of the named attribute, or null if there is no 
-   * attribute of that name.
-   *
-   * @param attributeName the attribute name
-   * @return the attribute value
-   */
-  public Object getAttribute(String attributeName) {
-    return attributeMap.get(attributeName);
-  }
-    
-  /**
-   * Returns the names of all the factory's attributes.
-   * 
-   * @return the attribute names
-   */
-  public String[] getAttributeNames() {
-    String[] result = new String[attributeMap.size()];
-    int i = 0;
-    // for (String attributeName : attributeMap.keySet()) {
-    Iterator it = attributeMap.keySet().iterator();
-    while (it.hasNext()) {
-      result[i++] = (String) it.next();
-    }
-    return result;
-  }
-    
-  /**
-   * Sets the named factory attribute to the specified value, creating it
-   * if it did not already exist.  If the value is null, this is the same as
-   * calling removeAttribute.
-   *
-   * @param attributeName the attribute name
-   * @param value the new attribute value
-   */
-  public void setAttribute(String attributeName, Object value) {
-    attributeMap.put(attributeName, value);
-  }
-
-  /**
-   * Removes the named attribute if it exists.
-   *
-   * @param attributeName the attribute name
-   */
-  public void removeAttribute(String attributeName) {
-    attributeMap.remove(attributeName);
-  }
-    
-  /**
-   * Returns the named MetricsContext instance, constructing it if necessary 
-   * using the factory's current configuration attributes. <p/>
-   * 
-   * When constructing the instance, if the factory property 
-   * <i>contextName</i>.class</code> exists, 
-   * its value is taken to be the name of the class to instantiate.  Otherwise,
-   * the default is to create an instance of 
-   * <code>org.apache.hadoop.metrics.spi.NullContext</code>, which is a 
-   * dummy "no-op" context which will cause all metric data to be discarded.
-   * 
-   * @param contextName the name of the context
-   * @return the named MetricsContext
-   */
-  public synchronized MetricsContext getContext(String refName, String contextName)
-      throws IOException, ClassNotFoundException,
-             InstantiationException, IllegalAccessException {
-    MetricsContext metricsContext = contextMap.get(refName);
-    if (metricsContext == null) {
-      String classNameAttribute = refName + CONTEXT_CLASS_SUFFIX;
-      String className = (String) getAttribute(classNameAttribute);
-      if (className == null) {
-        className = DEFAULT_CONTEXT_CLASSNAME;
-      }
-      Class contextClass = Class.forName(className);
-      metricsContext = (MetricsContext) contextClass.newInstance();
-      metricsContext.init(contextName, this);
-      contextMap.put(contextName, metricsContext);
-    }
-    return metricsContext;
-  }
-
-  public synchronized MetricsContext getContext(String contextName)
-    throws IOException, ClassNotFoundException, InstantiationException,
-           IllegalAccessException {
-    return getContext(contextName, contextName);
-  }
-  
-  /** 
-   * Returns all MetricsContexts built by this factory.
-   */
-  public synchronized Collection<MetricsContext> getAllContexts() {
-    // Make a copy to avoid race conditions with creating new contexts.
-    return new ArrayList<MetricsContext>(contextMap.values());
-  }
-    
-  /**
-   * Returns a "null" context - one which does nothing.
-   */
-  public static synchronized MetricsContext getNullContext(String contextName) {
-    MetricsContext nullContext = nullContextMap.get(contextName);
-    if (nullContext == null) {
-      nullContext = new NullContext();
-      nullContextMap.put(contextName, nullContext);
-    }
-    return nullContext;
-  }
-    
-  /**
-   * Returns the singleton ContextFactory instance, constructing it if 
-   * necessary. <p/>
-   * 
-   * When the instance is constructed, this method checks if the file 
-   * <code>hadoop-metrics.properties</code> exists on the class path.  If it 
-   * exists, it must be in the format defined by java.util.Properties, and all 
-   * the properties in the file are set as attributes on the newly created
-   * ContextFactory instance.
-   *
-   * @return the singleton ContextFactory instance
-   */
-  public static synchronized ContextFactory getFactory() throws IOException {
-    if (theFactory == null) {
-      theFactory = new ContextFactory();
-      theFactory.setAttributes();
-    }
-    return theFactory;
-  }
-    
-  private void setAttributes() throws IOException {
-    InputStream is = getClass().getResourceAsStream(PROPERTIES_FILE);
-    if (is != null) {
-      try {
-        Properties properties = new Properties();
-        properties.load(is);
-        //for (Object propertyNameObj : properties.keySet()) {
-        Iterator it = properties.keySet().iterator();
-        while (it.hasNext()) {
-          String propertyName = (String) it.next();
-          String propertyValue = properties.getProperty(propertyName);
-          setAttribute(propertyName, propertyValue);
-        }
-      } finally {
-        is.close();
-      }
-    }
-  }
-    
-}
+/*
+ * ContextFactory.java
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Properties;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.metrics.spi.NullContext;
+
+/**
+ * Factory class for creating MetricsContext objects.  To obtain an instance
+ * of this class, use the static <code>getFactory()</code> method.
+ */
+@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
+@InterfaceStability.Evolving
+public class ContextFactory {
+    
+  private static final String PROPERTIES_FILE = 
+    "/hadoop-metrics.properties";
+  private static final String CONTEXT_CLASS_SUFFIX =
+    ".class";
+  private static final String DEFAULT_CONTEXT_CLASSNAME =
+    "org.apache.hadoop.metrics.spi.NullContext";
+    
+  private static ContextFactory theFactory = null;
+    
+  private Map<String,Object> attributeMap = new HashMap<String,Object>();
+  private Map<String,MetricsContext> contextMap = 
+    new HashMap<String,MetricsContext>();
+    
+  // Used only when contexts, or the ContextFactory itself, cannot be
+  // created.
+  private static Map<String,MetricsContext> nullContextMap = 
+    new HashMap<String,MetricsContext>();
+    
+  /** Creates a new instance of ContextFactory */
+  protected ContextFactory() {
+  }
+    
+  /**
+   * Returns the value of the named attribute, or null if there is no 
+   * attribute of that name.
+   *
+   * @param attributeName the attribute name
+   * @return the attribute value
+   */
+  public Object getAttribute(String attributeName) {
+    return attributeMap.get(attributeName);
+  }
+    
+  /**
+   * Returns the names of all the factory's attributes.
+   * 
+   * @return the attribute names
+   */
+  public String[] getAttributeNames() {
+    String[] result = new String[attributeMap.size()];
+    int i = 0;
+    // for (String attributeName : attributeMap.keySet()) {
+    Iterator it = attributeMap.keySet().iterator();
+    while (it.hasNext()) {
+      result[i++] = (String) it.next();
+    }
+    return result;
+  }
+    
+  /**
+   * Sets the named factory attribute to the specified value, creating it
+   * if it did not already exist.  If the value is null, this is the same as
+   * calling removeAttribute.
+   *
+   * @param attributeName the attribute name
+   * @param value the new attribute value
+   */
+  public void setAttribute(String attributeName, Object value) {
+    attributeMap.put(attributeName, value);
+  }
+
+  /**
+   * Removes the named attribute if it exists.
+   *
+   * @param attributeName the attribute name
+   */
+  public void removeAttribute(String attributeName) {
+    attributeMap.remove(attributeName);
+  }
+    
+  /**
+   * Returns the named MetricsContext instance, constructing it if necessary 
+   * using the factory's current configuration attributes. <p/>
+   * 
+   * When constructing the instance, if the factory property 
+   * <i>contextName</i>.class</code> exists, 
+   * its value is taken to be the name of the class to instantiate.  Otherwise,
+   * the default is to create an instance of 
+   * <code>org.apache.hadoop.metrics.spi.NullContext</code>, which is a 
+   * dummy "no-op" context which will cause all metric data to be discarded.
+   * 
+   * @param contextName the name of the context
+   * @return the named MetricsContext
+   */
+  public synchronized MetricsContext getContext(String refName, String contextName)
+      throws IOException, ClassNotFoundException,
+             InstantiationException, IllegalAccessException {
+    MetricsContext metricsContext = contextMap.get(refName);
+    if (metricsContext == null) {
+      String classNameAttribute = refName + CONTEXT_CLASS_SUFFIX;
+      String className = (String) getAttribute(classNameAttribute);
+      if (className == null) {
+        className = DEFAULT_CONTEXT_CLASSNAME;
+      }
+      Class contextClass = Class.forName(className);
+      metricsContext = (MetricsContext) contextClass.newInstance();
+      metricsContext.init(contextName, this);
+      contextMap.put(contextName, metricsContext);
+    }
+    return metricsContext;
+  }
+
+  public synchronized MetricsContext getContext(String contextName)
+    throws IOException, ClassNotFoundException, InstantiationException,
+           IllegalAccessException {
+    return getContext(contextName, contextName);
+  }
+  
+  /** 
+   * Returns all MetricsContexts built by this factory.
+   */
+  public synchronized Collection<MetricsContext> getAllContexts() {
+    // Make a copy to avoid race conditions with creating new contexts.
+    return new ArrayList<MetricsContext>(contextMap.values());
+  }
+    
+  /**
+   * Returns a "null" context - one which does nothing.
+   */
+  public static synchronized MetricsContext getNullContext(String contextName) {
+    MetricsContext nullContext = nullContextMap.get(contextName);
+    if (nullContext == null) {
+      nullContext = new NullContext();
+      nullContextMap.put(contextName, nullContext);
+    }
+    return nullContext;
+  }
+    
+  /**
+   * Returns the singleton ContextFactory instance, constructing it if 
+   * necessary. <p/>
+   * 
+   * When the instance is constructed, this method checks if the file 
+   * <code>hadoop-metrics.properties</code> exists on the class path.  If it 
+   * exists, it must be in the format defined by java.util.Properties, and all 
+   * the properties in the file are set as attributes on the newly created
+   * ContextFactory instance.
+   *
+   * @return the singleton ContextFactory instance
+   */
+  public static synchronized ContextFactory getFactory() throws IOException {
+    if (theFactory == null) {
+      theFactory = new ContextFactory();
+      theFactory.setAttributes();
+    }
+    return theFactory;
+  }
+    
+  private void setAttributes() throws IOException {
+    InputStream is = getClass().getResourceAsStream(PROPERTIES_FILE);
+    if (is != null) {
+      try {
+        Properties properties = new Properties();
+        properties.load(is);
+        //for (Object propertyNameObj : properties.keySet()) {
+        Iterator it = properties.keySet().iterator();
+        while (it.hasNext()) {
+          String propertyName = (String) it.next();
+          String propertyValue = properties.getProperty(propertyName);
+          setAttribute(propertyName, propertyValue);
+        }
+      } finally {
+        is.close();
+      }
+    }
+  }
+    
+}

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsContext.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsContext.java?rev=1401071&r1=1401070&r2=1401071&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsContext.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsContext.java Mon Oct 22 20:43:16 2012
@@ -1,122 +1,122 @@
-/*
- * MetricsContext.java
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metrics;
-
-import java.io.IOException;
-import java.util.Collection;
-import java.util.Map;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.metrics.spi.OutputRecord;
-
-/**
- * The main interface to the metrics package. 
- */
-@InterfaceAudience.Private
-@InterfaceStability.Evolving
-public interface MetricsContext {
-    
-  /**
-   * Default period in seconds at which data is sent to the metrics system.
-   */
-  public static final int DEFAULT_PERIOD = 5;
-
-  /**
-   * Initialize this context.
-   * @param contextName The given name for this context
-   * @param factory The creator of this context
-   */
-  public void init(String contextName, ContextFactory factory);
-
-  /**
-   * Returns the context name.
-   *
-   * @return the context name
-   */
-  public abstract String getContextName();
-    
-  /**
-   * Starts or restarts monitoring, the emitting of metrics records as they are 
-   * updated. 
-   */
-  public abstract void startMonitoring()
-    throws IOException;
-
-  /**
-   * Stops monitoring.  This does not free any data that the implementation
-   * may have buffered for sending at the next timer event. It
-   * is OK to call <code>startMonitoring()</code> again after calling 
-   * this.
-   * @see #close()
-   */
-  public abstract void stopMonitoring();
-    
-  /**
-   * Returns true if monitoring is currently in progress.
-   */
-  public abstract boolean isMonitoring();
-    
-  /**
-   * Stops monitoring and also frees any buffered data, returning this 
-   * object to its initial state.  
-   */
-  public abstract void close();
-    
-  /**
-   * Creates a new MetricsRecord instance with the given <code>recordName</code>.
-   * Throws an exception if the metrics implementation is configured with a fixed
-   * set of record names and <code>recordName</code> is not in that set.
-   *
-   * @param recordName the name of the record
-   * @throws MetricsException if recordName conflicts with configuration data
-   */
-  public abstract MetricsRecord createRecord(String recordName);
-    
-  /**
-   * Registers a callback to be called at regular time intervals, as 
-   * determined by the implementation-class specific configuration.
-   *
-   * @param updater object to be run periodically; it should updated
-   * some metrics records and then return
-   */
-  public abstract void registerUpdater(Updater updater);
-
-  /**
-   * Removes a callback, if it exists.
-   * 
-   * @param updater object to be removed from the callback list
-   */
-  public abstract void unregisterUpdater(Updater updater);
-  
-  /**
-   * Returns the timer period.
-   */
-  public abstract int getPeriod();
-  
-  /**
-   * Retrieves all the records managed by this MetricsContext.
-   * Useful for monitoring systems that are polling-based.
-   * 
-   * @return A non-null map from all record names to the records managed.
-   */
-   Map<String, Collection<OutputRecord>> getAllRecords();
-}
+/*
+ * MetricsContext.java
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics;
+
+import java.io.IOException;
+import java.util.Collection;
+import java.util.Map;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.metrics.spi.OutputRecord;
+
+/**
+ * The main interface to the metrics package. 
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+public interface MetricsContext {
+    
+  /**
+   * Default period in seconds at which data is sent to the metrics system.
+   */
+  public static final int DEFAULT_PERIOD = 5;
+
+  /**
+   * Initialize this context.
+   * @param contextName The given name for this context
+   * @param factory The creator of this context
+   */
+  public void init(String contextName, ContextFactory factory);
+
+  /**
+   * Returns the context name.
+   *
+   * @return the context name
+   */
+  public abstract String getContextName();
+    
+  /**
+   * Starts or restarts monitoring, the emitting of metrics records as they are 
+   * updated. 
+   */
+  public abstract void startMonitoring()
+    throws IOException;
+
+  /**
+   * Stops monitoring.  This does not free any data that the implementation
+   * may have buffered for sending at the next timer event. It
+   * is OK to call <code>startMonitoring()</code> again after calling 
+   * this.
+   * @see #close()
+   */
+  public abstract void stopMonitoring();
+    
+  /**
+   * Returns true if monitoring is currently in progress.
+   */
+  public abstract boolean isMonitoring();
+    
+  /**
+   * Stops monitoring and also frees any buffered data, returning this 
+   * object to its initial state.  
+   */
+  public abstract void close();
+    
+  /**
+   * Creates a new MetricsRecord instance with the given <code>recordName</code>.
+   * Throws an exception if the metrics implementation is configured with a fixed
+   * set of record names and <code>recordName</code> is not in that set.
+   *
+   * @param recordName the name of the record
+   * @throws MetricsException if recordName conflicts with configuration data
+   */
+  public abstract MetricsRecord createRecord(String recordName);
+    
+  /**
+   * Registers a callback to be called at regular time intervals, as 
+   * determined by the implementation-class specific configuration.
+   *
+   * @param updater object to be run periodically; it should updated
+   * some metrics records and then return
+   */
+  public abstract void registerUpdater(Updater updater);
+
+  /**
+   * Removes a callback, if it exists.
+   * 
+   * @param updater object to be removed from the callback list
+   */
+  public abstract void unregisterUpdater(Updater updater);
+  
+  /**
+   * Returns the timer period.
+   */
+  public abstract int getPeriod();
+  
+  /**
+   * Retrieves all the records managed by this MetricsContext.
+   * Useful for monitoring systems that are polling-based.
+   * 
+   * @return A non-null map from all record names to the records managed.
+   */
+   Map<String, Collection<OutputRecord>> getAllRecords();
+}

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsException.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsException.java?rev=1401071&r1=1401070&r2=1401071&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsException.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsException.java Mon Oct 22 20:43:16 2012
@@ -1,47 +1,47 @@
-/*
- * MetricsException.java
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metrics;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-
-/**
- * General-purpose, unchecked metrics exception.
- */
-@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
-@InterfaceStability.Evolving
-public class MetricsException extends RuntimeException {
-    
-  private static final long serialVersionUID = -1643257498540498497L;
-
-  /** Creates a new instance of MetricsException */
-  public MetricsException() {
-  }
-    
-  /** Creates a new instance of MetricsException 
-   *
-   * @param message an error message
-   */
-  public MetricsException(String message) {
-    super(message);
-  }
-    
-}
+/*
+ * MetricsException.java
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+/**
+ * General-purpose, unchecked metrics exception.
+ */
+@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
+@InterfaceStability.Evolving
+public class MetricsException extends RuntimeException {
+    
+  private static final long serialVersionUID = -1643257498540498497L;
+
+  /** Creates a new instance of MetricsException */
+  public MetricsException() {
+  }
+    
+  /** Creates a new instance of MetricsException 
+   *
+   * @param message an error message
+   */
+  public MetricsException(String message) {
+    super(message);
+  }
+    
+}

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsRecord.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsRecord.java?rev=1401071&r1=1401070&r2=1401071&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsRecord.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsRecord.java Mon Oct 22 20:43:16 2012
@@ -1,251 +1,251 @@
-/*
- * MetricsRecord.java
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metrics;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-
-/**
- * A named and optionally tagged set of records to be sent to the metrics
- * system. <p/>
- *
- * A record name identifies the kind of data to be reported. For example, a
- * program reporting statistics relating to the disks on a computer might use
- * a record name "diskStats".<p/>
- *
- * A record has zero or more <i>tags</i>. A tag has a name and a value. To
- * continue the example, the "diskStats" record might use a tag named
- * "diskName" to identify a particular disk.  Sometimes it is useful to have
- * more than one tag, so there might also be a "diskType" with value "ide" or
- * "scsi" or whatever.<p/>
- *
- * A record also has zero or more <i>metrics</i>.  These are the named
- * values that are to be reported to the metrics system.  In the "diskStats"
- * example, possible metric names would be "diskPercentFull", "diskPercentBusy", 
- * "kbReadPerSecond", etc.<p/>
- * 
- * The general procedure for using a MetricsRecord is to fill in its tag and
- * metric values, and then call <code>update()</code> to pass the record to the
- * client library.
- * Metric data is not immediately sent to the metrics system
- * each time that <code>update()</code> is called. 
- * An internal table is maintained, identified by the record name. This
- * table has columns 
- * corresponding to the tag and the metric names, and rows 
- * corresponding to each unique set of tag values. An update
- * either modifies an existing row in the table, or adds a new row with a set of
- * tag values that are different from all the other rows.  Note that if there
- * are no tags, then there can be at most one row in the table. <p/>
- * 
- * Once a row is added to the table, its data will be sent to the metrics system 
- * on every timer period, whether or not it has been updated since the previous
- * timer period.  If this is inappropriate, for example if metrics were being
- * reported by some transient object in an application, the <code>remove()</code>
- * method can be used to remove the row and thus stop the data from being
- * sent.<p/>
- *
- * Note that the <code>update()</code> method is atomic.  This means that it is
- * safe for different threads to be updating the same metric.  More precisely,
- * it is OK for different threads to call <code>update()</code> on MetricsRecord instances 
- * with the same set of tag names and tag values.  Different threads should 
- * <b>not</b> use the same MetricsRecord instance at the same time.
- */
-@InterfaceAudience.Private
-@InterfaceStability.Evolving
-public interface MetricsRecord {
-    
-  /**
-   * Returns the record name. 
-   *
-   * @return the record name
-   */
-  public abstract String getRecordName();
-    
-  /**
-   * Sets the named tag to the specified value.  The tagValue may be null, 
-   * which is treated the same as an empty String.
-   *
-   * @param tagName name of the tag
-   * @param tagValue new value of the tag
-   * @throws MetricsException if the tagName conflicts with the configuration
-   */
-  public abstract void setTag(String tagName, String tagValue);
-    
-  /**
-   * Sets the named tag to the specified value.
-   *
-   * @param tagName name of the tag
-   * @param tagValue new value of the tag
-   * @throws MetricsException if the tagName conflicts with the configuration
-   */
-  public abstract void setTag(String tagName, int tagValue);
-    
-  /**
-   * Sets the named tag to the specified value.
-   *
-   * @param tagName name of the tag
-   * @param tagValue new value of the tag
-   * @throws MetricsException if the tagName conflicts with the configuration
-   */
-  public abstract void setTag(String tagName, long tagValue);
-    
-  /**
-   * Sets the named tag to the specified value.
-   *
-   * @param tagName name of the tag
-   * @param tagValue new value of the tag
-   * @throws MetricsException if the tagName conflicts with the configuration
-   */
-  public abstract void setTag(String tagName, short tagValue);
-    
-  /**
-   * Sets the named tag to the specified value.
-   *
-   * @param tagName name of the tag
-   * @param tagValue new value of the tag
-   * @throws MetricsException if the tagName conflicts with the configuration
-   */
-  public abstract void setTag(String tagName, byte tagValue);
-    
-  /**
-   * Removes any tag of the specified name.
-   *
-   * @param tagName name of a tag
-   */
-  public abstract void removeTag(String tagName);
-  
-  /**
-   * Sets the named metric to the specified value.
-   *
-   * @param metricName name of the metric
-   * @param metricValue new value of the metric
-   * @throws MetricsException if the metricName or the type of the metricValue 
-   * conflicts with the configuration
-   */
-  public abstract void setMetric(String metricName, int metricValue);
-    
-  /**
-   * Sets the named metric to the specified value.
-   *
-   * @param metricName name of the metric
-   * @param metricValue new value of the metric
-   * @throws MetricsException if the metricName or the type of the metricValue 
-   * conflicts with the configuration
-   */
-  public abstract void setMetric(String metricName, long metricValue);
-    
-  /**
-   * Sets the named metric to the specified value.
-   *
-   * @param metricName name of the metric
-   * @param metricValue new value of the metric
-   * @throws MetricsException if the metricName or the type of the metricValue 
-   * conflicts with the configuration
-   */
-  public abstract void setMetric(String metricName, short metricValue);
-    
-  /**
-   * Sets the named metric to the specified value.
-   *
-   * @param metricName name of the metric
-   * @param metricValue new value of the metric
-   * @throws MetricsException if the metricName or the type of the metricValue 
-   * conflicts with the configuration
-   */
-  public abstract void setMetric(String metricName, byte metricValue);
-    
-  /**
-   * Sets the named metric to the specified value.
-   *
-   * @param metricName name of the metric
-   * @param metricValue new value of the metric
-   * @throws MetricsException if the metricName or the type of the metricValue 
-   * conflicts with the configuration
-   */
-  public abstract void setMetric(String metricName, float metricValue);
-    
-  /**
-   * Increments the named metric by the specified value.
-   *
-   * @param metricName name of the metric
-   * @param metricValue incremental value
-   * @throws MetricsException if the metricName or the type of the metricValue 
-   * conflicts with the configuration
-   */
-  public abstract void incrMetric(String metricName, int metricValue);
-    
-  /**
-   * Increments the named metric by the specified value.
-   *
-   * @param metricName name of the metric
-   * @param metricValue incremental value
-   * @throws MetricsException if the metricName or the type of the metricValue 
-   * conflicts with the configuration
-   */
-  public abstract void incrMetric(String metricName, long metricValue);
-    
-  /**
-   * Increments the named metric by the specified value.
-   *
-   * @param metricName name of the metric
-   * @param metricValue incremental value
-   * @throws MetricsException if the metricName or the type of the metricValue 
-   * conflicts with the configuration
-   */
-  public abstract void incrMetric(String metricName, short metricValue);
-    
-  /**
-   * Increments the named metric by the specified value.
-   *
-   * @param metricName name of the metric
-   * @param metricValue incremental value
-   * @throws MetricsException if the metricName or the type of the metricValue 
-   * conflicts with the configuration
-   */
-  public abstract void incrMetric(String metricName, byte metricValue);
-    
-  /**
-   * Increments the named metric by the specified value.
-   *
-   * @param metricName name of the metric
-   * @param metricValue incremental value
-   * @throws MetricsException if the metricName or the type of the metricValue 
-   * conflicts with the configuration
-   */
-  public abstract void incrMetric(String metricName, float metricValue);
-    
-  /**
-   * Updates the table of buffered data which is to be sent periodically.
-   * If the tag values match an existing row, that row is updated; 
-   * otherwise, a new row is added.
-   */
-  public abstract void update();
-    
-  /**
-   * Removes, from the buffered data table, all rows having tags 
-   * that equal the tags that have been set on this record. For example,
-   * if there are no tags on this record, all rows for this record name
-   * would be removed.  Or, if there is a single tag on this record, then
-   * just rows containing a tag with the same name and value would be removed.
-   */
-  public abstract void remove();
-    
-}
+/*
+ * MetricsRecord.java
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+/**
+ * A named and optionally tagged set of records to be sent to the metrics
+ * system. <p/>
+ *
+ * A record name identifies the kind of data to be reported. For example, a
+ * program reporting statistics relating to the disks on a computer might use
+ * a record name "diskStats".<p/>
+ *
+ * A record has zero or more <i>tags</i>. A tag has a name and a value. To
+ * continue the example, the "diskStats" record might use a tag named
+ * "diskName" to identify a particular disk.  Sometimes it is useful to have
+ * more than one tag, so there might also be a "diskType" with value "ide" or
+ * "scsi" or whatever.<p/>
+ *
+ * A record also has zero or more <i>metrics</i>.  These are the named
+ * values that are to be reported to the metrics system.  In the "diskStats"
+ * example, possible metric names would be "diskPercentFull", "diskPercentBusy", 
+ * "kbReadPerSecond", etc.<p/>
+ * 
+ * The general procedure for using a MetricsRecord is to fill in its tag and
+ * metric values, and then call <code>update()</code> to pass the record to the
+ * client library.
+ * Metric data is not immediately sent to the metrics system
+ * each time that <code>update()</code> is called. 
+ * An internal table is maintained, identified by the record name. This
+ * table has columns 
+ * corresponding to the tag and the metric names, and rows 
+ * corresponding to each unique set of tag values. An update
+ * either modifies an existing row in the table, or adds a new row with a set of
+ * tag values that are different from all the other rows.  Note that if there
+ * are no tags, then there can be at most one row in the table. <p/>
+ * 
+ * Once a row is added to the table, its data will be sent to the metrics system 
+ * on every timer period, whether or not it has been updated since the previous
+ * timer period.  If this is inappropriate, for example if metrics were being
+ * reported by some transient object in an application, the <code>remove()</code>
+ * method can be used to remove the row and thus stop the data from being
+ * sent.<p/>
+ *
+ * Note that the <code>update()</code> method is atomic.  This means that it is
+ * safe for different threads to be updating the same metric.  More precisely,
+ * it is OK for different threads to call <code>update()</code> on MetricsRecord instances 
+ * with the same set of tag names and tag values.  Different threads should 
+ * <b>not</b> use the same MetricsRecord instance at the same time.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+public interface MetricsRecord {
+    
+  /**
+   * Returns the record name. 
+   *
+   * @return the record name
+   */
+  public abstract String getRecordName();
+    
+  /**
+   * Sets the named tag to the specified value.  The tagValue may be null, 
+   * which is treated the same as an empty String.
+   *
+   * @param tagName name of the tag
+   * @param tagValue new value of the tag
+   * @throws MetricsException if the tagName conflicts with the configuration
+   */
+  public abstract void setTag(String tagName, String tagValue);
+    
+  /**
+   * Sets the named tag to the specified value.
+   *
+   * @param tagName name of the tag
+   * @param tagValue new value of the tag
+   * @throws MetricsException if the tagName conflicts with the configuration
+   */
+  public abstract void setTag(String tagName, int tagValue);
+    
+  /**
+   * Sets the named tag to the specified value.
+   *
+   * @param tagName name of the tag
+   * @param tagValue new value of the tag
+   * @throws MetricsException if the tagName conflicts with the configuration
+   */
+  public abstract void setTag(String tagName, long tagValue);
+    
+  /**
+   * Sets the named tag to the specified value.
+   *
+   * @param tagName name of the tag
+   * @param tagValue new value of the tag
+   * @throws MetricsException if the tagName conflicts with the configuration
+   */
+  public abstract void setTag(String tagName, short tagValue);
+    
+  /**
+   * Sets the named tag to the specified value.
+   *
+   * @param tagName name of the tag
+   * @param tagValue new value of the tag
+   * @throws MetricsException if the tagName conflicts with the configuration
+   */
+  public abstract void setTag(String tagName, byte tagValue);
+    
+  /**
+   * Removes any tag of the specified name.
+   *
+   * @param tagName name of a tag
+   */
+  public abstract void removeTag(String tagName);
+  
+  /**
+   * Sets the named metric to the specified value.
+   *
+   * @param metricName name of the metric
+   * @param metricValue new value of the metric
+   * @throws MetricsException if the metricName or the type of the metricValue 
+   * conflicts with the configuration
+   */
+  public abstract void setMetric(String metricName, int metricValue);
+    
+  /**
+   * Sets the named metric to the specified value.
+   *
+   * @param metricName name of the metric
+   * @param metricValue new value of the metric
+   * @throws MetricsException if the metricName or the type of the metricValue 
+   * conflicts with the configuration
+   */
+  public abstract void setMetric(String metricName, long metricValue);
+    
+  /**
+   * Sets the named metric to the specified value.
+   *
+   * @param metricName name of the metric
+   * @param metricValue new value of the metric
+   * @throws MetricsException if the metricName or the type of the metricValue 
+   * conflicts with the configuration
+   */
+  public abstract void setMetric(String metricName, short metricValue);
+    
+  /**
+   * Sets the named metric to the specified value.
+   *
+   * @param metricName name of the metric
+   * @param metricValue new value of the metric
+   * @throws MetricsException if the metricName or the type of the metricValue 
+   * conflicts with the configuration
+   */
+  public abstract void setMetric(String metricName, byte metricValue);
+    
+  /**
+   * Sets the named metric to the specified value.
+   *
+   * @param metricName name of the metric
+   * @param metricValue new value of the metric
+   * @throws MetricsException if the metricName or the type of the metricValue 
+   * conflicts with the configuration
+   */
+  public abstract void setMetric(String metricName, float metricValue);
+    
+  /**
+   * Increments the named metric by the specified value.
+   *
+   * @param metricName name of the metric
+   * @param metricValue incremental value
+   * @throws MetricsException if the metricName or the type of the metricValue 
+   * conflicts with the configuration
+   */
+  public abstract void incrMetric(String metricName, int metricValue);
+    
+  /**
+   * Increments the named metric by the specified value.
+   *
+   * @param metricName name of the metric
+   * @param metricValue incremental value
+   * @throws MetricsException if the metricName or the type of the metricValue 
+   * conflicts with the configuration
+   */
+  public abstract void incrMetric(String metricName, long metricValue);
+    
+  /**
+   * Increments the named metric by the specified value.
+   *
+   * @param metricName name of the metric
+   * @param metricValue incremental value
+   * @throws MetricsException if the metricName or the type of the metricValue 
+   * conflicts with the configuration
+   */
+  public abstract void incrMetric(String metricName, short metricValue);
+    
+  /**
+   * Increments the named metric by the specified value.
+   *
+   * @param metricName name of the metric
+   * @param metricValue incremental value
+   * @throws MetricsException if the metricName or the type of the metricValue 
+   * conflicts with the configuration
+   */
+  public abstract void incrMetric(String metricName, byte metricValue);
+    
+  /**
+   * Increments the named metric by the specified value.
+   *
+   * @param metricName name of the metric
+   * @param metricValue incremental value
+   * @throws MetricsException if the metricName or the type of the metricValue 
+   * conflicts with the configuration
+   */
+  public abstract void incrMetric(String metricName, float metricValue);
+    
+  /**
+   * Updates the table of buffered data which is to be sent periodically.
+   * If the tag values match an existing row, that row is updated; 
+   * otherwise, a new row is added.
+   */
+  public abstract void update();
+    
+  /**
+   * Removes, from the buffered data table, all rows having tags 
+   * that equal the tags that have been set on this record. For example,
+   * if there are no tags on this record, all rows for this record name
+   * would be removed.  Or, if there is a single tag on this record, then
+   * just rows containing a tag with the same name and value would be removed.
+   */
+  public abstract void remove();
+    
+}

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/file/FileContext.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/file/FileContext.java?rev=1401071&r1=1401070&r2=1401071&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/file/FileContext.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/file/FileContext.java Mon Oct 22 20:43:16 2012
@@ -1,159 +1,159 @@
-/*
- * FileContext.java
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metrics.file;
-
-import java.io.BufferedOutputStream;
-import java.io.File;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.io.PrintWriter;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.metrics.ContextFactory;
-import org.apache.hadoop.metrics.spi.AbstractMetricsContext;
-import org.apache.hadoop.metrics.spi.OutputRecord;
-
-/**
- * Metrics context for writing metrics to a file.<p/>
- *
- * This class is configured by setting ContextFactory attributes which in turn
- * are usually configured through a properties file.  All the attributes are
- * prefixed by the contextName. For example, the properties file might contain:
- * <pre>
- * myContextName.fileName=/tmp/metrics.log
- * myContextName.period=5
- * </pre>
- * @see org.apache.hadoop.metrics2.sink.FileSink for metrics 2.0.
- */
-@InterfaceAudience.Public
-@InterfaceStability.Evolving
-@Deprecated
-public class FileContext extends AbstractMetricsContext {
-    
-  /* Configuration attribute names */
-  @InterfaceAudience.Private
-  protected static final String FILE_NAME_PROPERTY = "fileName";
-  @InterfaceAudience.Private
-  protected static final String PERIOD_PROPERTY = "period";
-    
-  private File file = null;              // file for metrics to be written to
-  private PrintWriter writer = null;
-    
-  /** Creates a new instance of FileContext */
-  @InterfaceAudience.Private
-  public FileContext() {}
-    
-  @Override
-  @InterfaceAudience.Private
-  public void init(String contextName, ContextFactory factory) {
-    super.init(contextName, factory);
-        
-    String fileName = getAttribute(FILE_NAME_PROPERTY);
-    if (fileName != null) {
-      file = new File(fileName);
-    }
-        
-    parseAndSetPeriod(PERIOD_PROPERTY);
-  }
-
-  /**
-   * Returns the configured file name, or null.
-   */
-  @InterfaceAudience.Private
-  public String getFileName() {
-    if (file == null) {
-      return null;
-    } else {
-      return file.getName();
-    }
-  }
-    
-  /**
-   * Starts or restarts monitoring, by opening in append-mode, the
-   * file specified by the <code>fileName</code> attribute,
-   * if specified. Otherwise the data will be written to standard
-   * output.
-   */
-  @Override
-  @InterfaceAudience.Private
-  public void startMonitoring()
-    throws IOException 
-  {
-    if (file == null) {
-      writer = new PrintWriter(new BufferedOutputStream(System.out));
-    } else {
-      writer = new PrintWriter(new FileWriter(file, true));
-    }
-    super.startMonitoring();
-  }
-    
-  /**
-   * Stops monitoring, closing the file.
-   * @see #close()
-   */
-  @Override
-  @InterfaceAudience.Private
-  public void stopMonitoring() {
-    super.stopMonitoring();
-        
-    if (writer != null) {
-      writer.close();
-      writer = null;
-    }
-  }
-    
-  /**
-   * Emits a metrics record to a file.
-   */
-  @Override
-  @InterfaceAudience.Private
-  public void emitRecord(String contextName, String recordName, OutputRecord outRec) {
-    writer.print(contextName);
-    writer.print(".");
-    writer.print(recordName);
-    String separator = ": ";
-    for (String tagName : outRec.getTagNames()) {
-      writer.print(separator);
-      separator = ", ";
-      writer.print(tagName);
-      writer.print("=");
-      writer.print(outRec.getTag(tagName));
-    }
-    for (String metricName : outRec.getMetricNames()) {
-      writer.print(separator);
-      separator = ", ";
-      writer.print(metricName);
-      writer.print("=");
-      writer.print(outRec.getMetric(metricName));
-    }
-    writer.println();
-  }
-    
-  /**
-   * Flushes the output writer, forcing updates to disk.
-   */
-  @Override
-  @InterfaceAudience.Private
-  public void flush() {
-    writer.flush();
-  }
-}
+/*
+ * FileContext.java
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics.file;
+
+import java.io.BufferedOutputStream;
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.PrintWriter;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.metrics.ContextFactory;
+import org.apache.hadoop.metrics.spi.AbstractMetricsContext;
+import org.apache.hadoop.metrics.spi.OutputRecord;
+
+/**
+ * Metrics context for writing metrics to a file.<p/>
+ *
+ * This class is configured by setting ContextFactory attributes which in turn
+ * are usually configured through a properties file.  All the attributes are
+ * prefixed by the contextName. For example, the properties file might contain:
+ * <pre>
+ * myContextName.fileName=/tmp/metrics.log
+ * myContextName.period=5
+ * </pre>
+ * @see org.apache.hadoop.metrics2.sink.FileSink for metrics 2.0.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+@Deprecated
+public class FileContext extends AbstractMetricsContext {
+    
+  /* Configuration attribute names */
+  @InterfaceAudience.Private
+  protected static final String FILE_NAME_PROPERTY = "fileName";
+  @InterfaceAudience.Private
+  protected static final String PERIOD_PROPERTY = "period";
+    
+  private File file = null;              // file for metrics to be written to
+  private PrintWriter writer = null;
+    
+  /** Creates a new instance of FileContext */
+  @InterfaceAudience.Private
+  public FileContext() {}
+    
+  @Override
+  @InterfaceAudience.Private
+  public void init(String contextName, ContextFactory factory) {
+    super.init(contextName, factory);
+        
+    String fileName = getAttribute(FILE_NAME_PROPERTY);
+    if (fileName != null) {
+      file = new File(fileName);
+    }
+        
+    parseAndSetPeriod(PERIOD_PROPERTY);
+  }
+
+  /**
+   * Returns the configured file name, or null.
+   */
+  @InterfaceAudience.Private
+  public String getFileName() {
+    if (file == null) {
+      return null;
+    } else {
+      return file.getName();
+    }
+  }
+    
+  /**
+   * Starts or restarts monitoring, by opening in append-mode, the
+   * file specified by the <code>fileName</code> attribute,
+   * if specified. Otherwise the data will be written to standard
+   * output.
+   */
+  @Override
+  @InterfaceAudience.Private
+  public void startMonitoring()
+    throws IOException 
+  {
+    if (file == null) {
+      writer = new PrintWriter(new BufferedOutputStream(System.out));
+    } else {
+      writer = new PrintWriter(new FileWriter(file, true));
+    }
+    super.startMonitoring();
+  }
+    
+  /**
+   * Stops monitoring, closing the file.
+   * @see #close()
+   */
+  @Override
+  @InterfaceAudience.Private
+  public void stopMonitoring() {
+    super.stopMonitoring();
+        
+    if (writer != null) {
+      writer.close();
+      writer = null;
+    }
+  }
+    
+  /**
+   * Emits a metrics record to a file.
+   */
+  @Override
+  @InterfaceAudience.Private
+  public void emitRecord(String contextName, String recordName, OutputRecord outRec) {
+    writer.print(contextName);
+    writer.print(".");
+    writer.print(recordName);
+    String separator = ": ";
+    for (String tagName : outRec.getTagNames()) {
+      writer.print(separator);
+      separator = ", ";
+      writer.print(tagName);
+      writer.print("=");
+      writer.print(outRec.getTag(tagName));
+    }
+    for (String metricName : outRec.getMetricNames()) {
+      writer.print(separator);
+      separator = ", ";
+      writer.print(metricName);
+      writer.print("=");
+      writer.print(outRec.getMetric(metricName));
+    }
+    writer.println();
+  }
+    
+  /**
+   * Flushes the output writer, forcing updates to disk.
+   */
+  @Override
+  @InterfaceAudience.Private
+  public void flush() {
+    writer.flush();
+  }
+}



Mime
View raw message