hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From sunc...@apache.org
Subject [18/50] [abbrv] hadoop git commit: HADOOP-15821. Move YARN Registry to Hadoop Registry. Contributed by Íñigo Goiri
Date Wed, 24 Oct 2018 05:52:38 GMT
http://git-wip-us.apache.org/repos/asf/hadoop/blob/e2a9fa84/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/binding/RegistryPathUtils.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/binding/RegistryPathUtils.java b/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/binding/RegistryPathUtils.java
new file mode 100644
index 0000000..b8e9ba1
--- /dev/null
+++ b/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/binding/RegistryPathUtils.java
@@ -0,0 +1,238 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.registry.client.binding;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.fs.PathNotFoundException;
+import org.apache.hadoop.registry.client.exceptions.InvalidPathnameException;
+import org.apache.hadoop.registry.client.impl.zk.RegistryInternalConstants;
+import org.apache.hadoop.registry.server.dns.BaseServiceRecordProcessor;
+import org.apache.zookeeper.common.PathUtils;
+
+import java.net.IDN;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+/**
+ * Basic operations on paths: manipulating them and creating and validating
+ * path elements.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+public class RegistryPathUtils {
+
+  /**
+   * Compiled down pattern to validate single entries in the path
+   */
+  private static final Pattern PATH_ENTRY_VALIDATION_PATTERN =
+      Pattern.compile(RegistryInternalConstants.VALID_PATH_ENTRY_PATTERN);
+
+  private static final Pattern USER_NAME =
+      Pattern.compile("/users/([a-z][a-z0-9-.]*)");
+
+  /**
+   * Validate ZK path with the path itself included in
+   * the exception text
+   * @param path path to validate
+   * @return the path parameter
+   * @throws InvalidPathnameException if the pathname is invalid.
+   */
+  public static String validateZKPath(String path) throws
+      InvalidPathnameException {
+    try {
+      PathUtils.validatePath(path);
+
+    } catch (IllegalArgumentException e) {
+      throw new InvalidPathnameException(path,
+          "Invalid Path \"" + path + "\" : " + e, e);
+    }
+    return path;
+  }
+
+  /**
+   * Validate ZK path as valid for a DNS hostname.
+   * @param path path to validate
+   * @return the path parameter
+   * @throws InvalidPathnameException if the pathname is invalid.
+   */
+  public static String validateElementsAsDNS(String path) throws
+      InvalidPathnameException {
+    List<String> splitpath = split(path);
+    for (String fragment : splitpath) {
+      if (!PATH_ENTRY_VALIDATION_PATTERN.matcher(fragment).matches()) {
+        throw new InvalidPathnameException(path,
+            "Invalid Path element \"" + fragment + "\"");
+      }
+    }
+    return path;
+  }
+
+  /**
+   * Create a full path from the registry root and the supplied subdir
+   * @param path path of operation
+   * @return an absolute path
+   * @throws InvalidPathnameException if the path is invalid
+   */
+  public static String createFullPath(String base, String path) throws
+      InvalidPathnameException {
+    Preconditions.checkArgument(path != null, "null path");
+    Preconditions.checkArgument(base != null, "null path");
+    return validateZKPath(join(base, path));
+  }
+
+  /**
+   * Join two paths, guaranteeing that there will not be exactly
+   * one separator between the two, and exactly one at the front
+   * of the path. There will be no trailing "/" except for the special
+   * case that this is the root path
+   * @param base base path
+   * @param path second path to add
+   * @return a combined path.
+   */
+  public static String join(String base, String path) {
+    Preconditions.checkArgument(path != null, "null path");
+    Preconditions.checkArgument(base != null, "null path");
+    StringBuilder fullpath = new StringBuilder();
+
+    if (!base.startsWith("/")) {
+      fullpath.append('/');
+    }
+    fullpath.append(base);
+
+    // guarantee a trailing /
+    if (!fullpath.toString().endsWith("/")) {
+      fullpath.append("/");
+    }
+    // strip off any at the beginning
+    if (path.startsWith("/")) {
+      // path starts with /, so append all other characters -if present
+      if (path.length() > 1) {
+        fullpath.append(path.substring(1));
+      }
+    } else {
+      fullpath.append(path);
+    }
+
+    //here there may be a trailing "/"
+    String finalpath = fullpath.toString();
+    if (finalpath.endsWith("/") && !"/".equals(finalpath)) {
+      finalpath = finalpath.substring(0, finalpath.length() - 1);
+
+    }
+    return finalpath;
+  }
+
+  /**
+   * split a path into elements, stripping empty elements
+   * @param path the path
+   * @return the split path
+   */
+  public static List<String> split(String path) {
+    //
+    String[] pathelements = path.split("/");
+    List<String> dirs = new ArrayList<String>(pathelements.length);
+    for (String pathelement : pathelements) {
+      if (!pathelement.isEmpty()) {
+        dirs.add(pathelement);
+      }
+    }
+    return dirs;
+  }
+
+  /**
+   * Get the last entry in a path; for an empty path
+   * returns "". The split logic is that of
+   * {@link #split(String)}
+   * @param path path of operation
+   * @return the last path entry or "" if none.
+   */
+  public static String lastPathEntry(String path) {
+    List<String> splits = split(path);
+    if (splits.isEmpty()) {
+      // empty path. Return ""
+      return "";
+    } else {
+      return splits.get(splits.size() - 1);
+    }
+  }
+
+  /**
+   * Get the parent of a path
+   * @param path path to look at
+   * @return the parent path
+   * @throws PathNotFoundException if the path was at root.
+   */
+  public static String parentOf(String path) throws PathNotFoundException {
+    List<String> elements = split(path);
+
+    int size = elements.size();
+    if (size == 0) {
+      throw new PathNotFoundException("No parent of " + path);
+    }
+    if (size == 1) {
+      return "/";
+    }
+    elements.remove(size - 1);
+    StringBuilder parent = new StringBuilder(path.length());
+    for (String element : elements) {
+      parent.append("/");
+      parent.append(element);
+    }
+    return parent.toString();
+  }
+
+  /**
+   * Perform any formatting for the registry needed to convert
+   * non-simple-DNS elements
+   * @param element element to encode
+   * @return an encoded string
+   */
+  public static String encodeForRegistry(String element) {
+    return IDN.toASCII(element);
+  }
+
+  /**
+   * Perform whatever transforms are needed to get a YARN ID into
+   * a DNS-compatible name
+   * @param yarnId ID as string of YARN application, instance or container
+   * @return a string suitable for use in registry paths.
+   */
+  public static String encodeYarnID(String yarnId) {
+    return yarnId.replace("container", "ctr").replace("_", "-");
+  }
+
+  /**
+   * Return the username found in the ZK path.
+   *
+   * @param recPath the ZK recPath.
+   * @return the user name.
+   */
+  public static String getUsername(String recPath) {
+    String user = "anonymous";
+    Matcher matcher = USER_NAME.matcher(recPath);
+    if (matcher.find()) {
+      user = matcher.group(1);
+    }
+    return user;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e2a9fa84/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/binding/RegistryTypeUtils.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/binding/RegistryTypeUtils.java b/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/binding/RegistryTypeUtils.java
new file mode 100644
index 0000000..05df325
--- /dev/null
+++ b/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/binding/RegistryTypeUtils.java
@@ -0,0 +1,291 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.registry.client.binding;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.registry.client.exceptions.InvalidRecordException;
+import org.apache.hadoop.registry.client.types.Endpoint;
+import org.apache.hadoop.registry.client.types.ProtocolTypes;
+import org.apache.hadoop.registry.client.types.ServiceRecord;
+
+import java.net.InetSocketAddress;
+import java.net.MalformedURLException;
+import java.net.URI;
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.apache.hadoop.registry.client.types.AddressTypes.*;
+
+/**
+ * Static methods to work with registry types —primarily endpoints and the
+ * list representation of addresses.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+public class RegistryTypeUtils {
+
+  /**
+   * Create a URL endpoint from a list of URIs
+   * @param api implemented API
+   * @param protocolType protocol type
+   * @param uris URIs
+   * @return a new endpoint
+   */
+  public static Endpoint urlEndpoint(String api,
+      String protocolType,
+      URI... uris) {
+    return new Endpoint(api, protocolType, uris);
+  }
+
+  /**
+   * Create a REST endpoint from a list of URIs
+   * @param api implemented API
+   * @param uris URIs
+   * @return a new endpoint
+   */
+  public static Endpoint restEndpoint(String api,
+      URI... uris) {
+    return urlEndpoint(api, ProtocolTypes.PROTOCOL_REST, uris);
+  }
+
+  /**
+   * Create a Web UI endpoint from a list of URIs
+   * @param api implemented API
+   * @param uris URIs
+   * @return a new endpoint
+   */
+  public static Endpoint webEndpoint(String api,
+      URI... uris) {
+    return urlEndpoint(api, ProtocolTypes.PROTOCOL_WEBUI, uris);
+  }
+
+  /**
+   * Create an internet address endpoint from a list of URIs
+   * @param api implemented API
+   * @param protocolType protocol type
+   * @param hostname hostname/FQDN
+   * @param port port
+   * @return a new endpoint
+   */
+
+  public static Endpoint inetAddrEndpoint(String api,
+      String protocolType,
+      String hostname,
+      int port) {
+    Preconditions.checkArgument(api != null, "null API");
+    Preconditions.checkArgument(protocolType != null, "null protocolType");
+    Preconditions.checkArgument(hostname != null, "null hostname");
+    return new Endpoint(api,
+        ADDRESS_HOSTNAME_AND_PORT,
+        protocolType,
+        hostnamePortPair(hostname, port));
+  }
+
+  /**
+   * Create an IPC endpoint
+   * @param api API
+   * @param address the address as a tuple of (hostname, port)
+   * @return the new endpoint
+   */
+  public static Endpoint ipcEndpoint(String api, InetSocketAddress address) {
+    return new Endpoint(api,
+        ADDRESS_HOSTNAME_AND_PORT,
+        ProtocolTypes.PROTOCOL_HADOOP_IPC,
+        address== null ? null: hostnamePortPair(address));
+  }
+
+  /**
+   * Create a single entry map
+   * @param key map entry key
+   * @param val map entry value
+   * @return a 1 entry map.
+   */
+  public static Map<String, String> map(String key, String val) {
+    Map<String, String> map = new HashMap<String, String>(1);
+    map.put(key, val);
+    return map;
+  }
+
+  /**
+   * Create a URI
+   * @param uri value
+   * @return a 1 entry map.
+   */
+  public static Map<String, String> uri(String uri) {
+    return map(ADDRESS_URI, uri);
+  }
+
+  /**
+   * Create a (hostname, port) address pair
+   * @param hostname hostname
+   * @param port port
+   * @return a 1 entry map.
+   */
+  public static Map<String, String> hostnamePortPair(String hostname, int port) {
+    Map<String, String> map =
+        map(ADDRESS_HOSTNAME_FIELD, hostname);
+    map.put(ADDRESS_PORT_FIELD, Integer.toString(port));
+    return map;
+  }
+
+  /**
+   * Create a (hostname, port) address pair
+   * @param address socket address whose hostname and port are used for the
+   * generated address.
+   * @return a 1 entry map.
+   */
+  public static Map<String, String> hostnamePortPair(InetSocketAddress address) {
+    return hostnamePortPair(address.getHostName(), address.getPort());
+  }
+
+  /**
+   * Require a specific address type on an endpoint
+   * @param required required type
+   * @param epr endpoint
+   * @throws InvalidRecordException if the type is wrong
+   */
+  public static void requireAddressType(String required, Endpoint epr) throws
+      InvalidRecordException {
+    if (!required.equals(epr.addressType)) {
+      throw new InvalidRecordException(
+          epr.toString(),
+          "Address type of " + epr.addressType
+          + " does not match required type of "
+          + required);
+    }
+  }
+
+  /**
+   * Get a single URI endpoint
+   * @param epr endpoint
+   * @return the uri of the first entry in the address list. Null if the endpoint
+   * itself is null
+   * @throws InvalidRecordException if the type is wrong, there are no addresses
+   * or the payload ill-formatted
+   */
+  public static List<String> retrieveAddressesUriType(Endpoint epr)
+      throws InvalidRecordException {
+    if (epr == null) {
+      return null;
+    }
+    requireAddressType(ADDRESS_URI, epr);
+    List<Map<String, String>> addresses = epr.addresses;
+    if (addresses.size() < 1) {
+      throw new InvalidRecordException(epr.toString(),
+          "No addresses in endpoint");
+    }
+    List<String> results = new ArrayList<String>(addresses.size());
+    for (Map<String, String> address : addresses) {
+      results.add(getAddressField(address, ADDRESS_URI));
+    }
+    return results;
+  }
+
+  /**
+   * Get a specific field from an address -raising an exception if
+   * the field is not present
+   * @param address address to query
+   * @param field field to resolve
+   * @return the resolved value. Guaranteed to be non-null.
+   * @throws InvalidRecordException if the field did not resolve
+   */
+  public static String getAddressField(Map<String, String> address,
+      String field) throws InvalidRecordException {
+    String val = address.get(field);
+    if (val == null) {
+      throw new InvalidRecordException("", "Missing address field: " + field);
+    }
+    return val;
+  }
+
+  /**
+   * Get the address URLs. Guranteed to return at least one address.
+   * @param epr endpoint
+   * @return the address as a URL
+   * @throws InvalidRecordException if the type is wrong, there are no addresses
+   * or the payload ill-formatted
+   * @throws MalformedURLException address can't be turned into a URL
+   */
+  public static List<URL> retrieveAddressURLs(Endpoint epr)
+      throws InvalidRecordException, MalformedURLException {
+    if (epr == null) {
+      throw new InvalidRecordException("", "Null endpoint");
+    }
+    List<String> addresses = retrieveAddressesUriType(epr);
+    List<URL> results = new ArrayList<URL>(addresses.size());
+    for (String address : addresses) {
+      results.add(new URL(address));
+    }
+    return results;
+  }
+
+  /**
+   * Validate the record by checking for null fields and other invalid
+   * conditions
+   * @param path path for exceptions
+   * @param record record to validate. May be null
+   * @throws InvalidRecordException on invalid entries
+   */
+  public static void validateServiceRecord(String path, ServiceRecord record)
+      throws InvalidRecordException {
+    if (record == null) {
+      throw new InvalidRecordException(path, "Null record");
+    }
+    if (!ServiceRecord.RECORD_TYPE.equals(record.type)) {
+      throw new InvalidRecordException(path,
+          "invalid record type field: \"" + record.type + "\"");
+    }
+
+    if (record.external != null) {
+      for (Endpoint endpoint : record.external) {
+        validateEndpoint(path, endpoint);
+      }
+    }
+    if (record.internal != null) {
+      for (Endpoint endpoint : record.internal) {
+        validateEndpoint(path, endpoint);
+      }
+    }
+  }
+
+  /**
+   * Validate the endpoint by checking for null fields and other invalid
+   * conditions
+   * @param path path for exceptions
+   * @param endpoint endpoint to validate. May be null
+   * @throws InvalidRecordException on invalid entries
+   */
+  public static void validateEndpoint(String path, Endpoint endpoint)
+      throws InvalidRecordException {
+    if (endpoint == null) {
+      throw new InvalidRecordException(path, "Null endpoint");
+    }
+    try {
+      endpoint.validate();
+    } catch (RuntimeException e) {
+      throw new InvalidRecordException(path, e.toString());
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e2a9fa84/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/binding/RegistryUtils.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/binding/RegistryUtils.java b/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/binding/RegistryUtils.java
new file mode 100644
index 0000000..1b839c2
--- /dev/null
+++ b/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/binding/RegistryUtils.java
@@ -0,0 +1,399 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.registry.client.binding;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.fs.PathNotFoundException;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.registry.client.api.RegistryConstants;
+import org.apache.hadoop.registry.client.api.RegistryOperations;
+import org.apache.hadoop.registry.client.exceptions.InvalidPathnameException;
+import org.apache.hadoop.registry.client.exceptions.InvalidRecordException;
+import org.apache.hadoop.registry.client.exceptions.NoRecordException;
+import org.apache.hadoop.registry.client.impl.zk.RegistryInternalConstants;
+import org.apache.hadoop.registry.client.types.RegistryPathStatus;
+import org.apache.hadoop.registry.client.types.ServiceRecord;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static org.apache.hadoop.registry.client.binding.RegistryPathUtils.*;
+
+import java.io.EOFException;
+import java.io.IOException;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+
+/**
+ * Utility methods for working with a registry.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+public class RegistryUtils {
+  private static final Logger LOG =
+      LoggerFactory.getLogger(RegistryUtils.class);
+
+  /**
+   * Buld the user path -switches to the system path if the user is "".
+   * It also cross-converts the username to ascii via punycode
+   * @param username username or ""
+   * @return the path to the user
+   */
+  public static String homePathForUser(String username) {
+    Preconditions.checkArgument(username != null, "null user");
+
+    // catch recursion
+    if (username.startsWith(RegistryConstants.PATH_USERS)) {
+      return username;
+    }
+    if (username.isEmpty()) {
+      return RegistryConstants.PATH_SYSTEM_SERVICES;
+    }
+
+    // convert username to registry name
+    String convertedName = convertUsername(username);
+
+    return RegistryPathUtils.join(RegistryConstants.PATH_USERS,
+        encodeForRegistry(convertedName));
+  }
+
+  /**
+   * Convert the username to that which can be used for registry
+   * entries. Lower cases it,
+   * Strip the kerberos realm off a username if needed, and any "/" hostname
+   * entries
+   * @param username user
+   * @return the converted username
+   */
+  public static String convertUsername(String username) {
+    String converted =
+        org.apache.hadoop.util.StringUtils.toLowerCase(username);
+    int atSymbol = converted.indexOf('@');
+    if (atSymbol > 0) {
+      converted = converted.substring(0, atSymbol);
+    }
+    int slashSymbol = converted.indexOf('/');
+    if (slashSymbol > 0) {
+      converted = converted.substring(0, slashSymbol);
+    }
+    return converted;
+  }
+
+  /**
+   * Create a service classpath
+   * @param user username or ""
+   * @param serviceClass service name
+   * @return a full path
+   */
+  public static String serviceclassPath(String user,
+      String serviceClass) {
+    String services = join(homePathForUser(user),
+        RegistryConstants.PATH_USER_SERVICES);
+    return join(services,
+        serviceClass);
+  }
+
+  /**
+   * Create a path to a service under a user and service class
+   * @param user username or ""
+   * @param serviceClass service name
+   * @param serviceName service name unique for that user and service class
+   * @return a full path
+   */
+  public static String servicePath(String user,
+      String serviceClass,
+      String serviceName) {
+
+    return join(
+        serviceclassPath(user, serviceClass),
+        serviceName);
+  }
+
+  /**
+   * Create a path for listing components under a service
+   * @param user username or ""
+   * @param serviceClass service name
+   * @param serviceName service name unique for that user and service class
+   * @return a full path
+   */
+  public static String componentListPath(String user,
+      String serviceClass, String serviceName) {
+
+    return join(servicePath(user, serviceClass, serviceName),
+        RegistryConstants.SUBPATH_COMPONENTS);
+  }
+
+  /**
+   * Create the path to a service record for a component
+   * @param user username or ""
+   * @param serviceClass service name
+   * @param serviceName service name unique for that user and service class
+   * @param componentName unique name/ID of the component
+   * @return a full path
+   */
+  public static String componentPath(String user,
+      String serviceClass, String serviceName, String componentName) {
+
+    return join(
+        componentListPath(user, serviceClass, serviceName),
+        componentName);
+  }
+
+  /**
+   * List service records directly under a path
+   * @param registryOperations registry operations instance
+   * @param path path to list
+   * @return a mapping of the service records that were resolved, indexed
+   * by their full path
+   * @throws IOException
+   */
+  public static Map<String, ServiceRecord> listServiceRecords(
+      RegistryOperations registryOperations,
+      String path) throws IOException {
+    Map<String, RegistryPathStatus> children =
+        statChildren(registryOperations, path);
+    return extractServiceRecords(registryOperations,
+        path,
+        children.values());
+  }
+
+  /**
+   * List children of a directory and retrieve their
+   * {@link RegistryPathStatus} values.
+   * <p>
+   * This is not an atomic operation; A child may be deleted
+   * during the iteration through the child entries. If this happens,
+   * the <code>PathNotFoundException</code> is caught and that child
+   * entry ommitted.
+   *
+   * @param path path
+   * @return a possibly empty map of child entries listed by
+   * their short name.
+   * @throws PathNotFoundException path is not in the registry.
+   * @throws InvalidPathnameException the path is invalid.
+   * @throws IOException Any other IO Exception
+   */
+  public static Map<String, RegistryPathStatus> statChildren(
+      RegistryOperations registryOperations,
+      String path)
+      throws PathNotFoundException,
+      InvalidPathnameException,
+      IOException {
+    List<String> childNames = registryOperations.list(path);
+    Map<String, RegistryPathStatus> results =
+        new HashMap<String, RegistryPathStatus>();
+    for (String childName : childNames) {
+      String child = join(path, childName);
+      try {
+        RegistryPathStatus stat = registryOperations.stat(child);
+        results.put(childName, stat);
+      } catch (PathNotFoundException pnfe) {
+        if (LOG.isDebugEnabled()) {
+          LOG.debug("stat failed on {}: moved? {}", child, pnfe, pnfe);
+        }
+        // and continue
+      }
+    }
+    return results;
+  }
+
+  /**
+   * Get the home path of the current user.
+   * <p>
+   *  In an insecure cluster, the environment variable
+   *  <code>HADOOP_USER_NAME</code> is queried <i>first</i>.
+   * <p>
+   * This means that in a YARN container where the creator set this
+   * environment variable to propagate their identity, the defined
+   * user name is used in preference to the actual user.
+   * <p>
+   * In a secure cluster, the kerberos identity of the current user is used.
+   * @return a path for the current user's home dir.
+   * @throws RuntimeException if the current user identity cannot be determined
+   * from the OS/kerberos.
+   */
+  public static String homePathForCurrentUser() {
+    String shortUserName = currentUsernameUnencoded();
+    return homePathForUser(shortUserName);
+  }
+
+  /**
+   * Get the current username, before any encoding has been applied.
+   * @return the current user from the kerberos identity, falling back
+   * to the user and/or env variables.
+   */
+  private static String currentUsernameUnencoded() {
+    String env_hadoop_username = System.getenv(
+        RegistryInternalConstants.HADOOP_USER_NAME);
+    return getCurrentUsernameUnencoded(env_hadoop_username);
+  }
+
+  /**
+   * Get the current username, using the value of the parameter
+   * <code>env_hadoop_username</code> if it is set on an insecure cluster.
+   * This ensures that the username propagates correctly across processes
+   * started by YARN.
+   * <p>
+   * This method is primarly made visible for testing.
+   * @param env_hadoop_username the environment variable
+   * @return the selected username
+   * @throws RuntimeException if there is a problem getting the short user
+   * name of the current user.
+   */
+  @VisibleForTesting
+  public static String getCurrentUsernameUnencoded(String env_hadoop_username) {
+    String shortUserName = null;
+    if (!UserGroupInformation.isSecurityEnabled()) {
+      shortUserName = env_hadoop_username;
+    }
+    if (StringUtils.isEmpty(shortUserName)) {
+      try {
+        shortUserName = UserGroupInformation.getCurrentUser().getShortUserName();
+      } catch (IOException e) {
+        throw new RuntimeException(e);
+      }
+    }
+    return shortUserName;
+  }
+
+  /**
+   * Get the current user path formatted for the registry
+   * <p>
+   *  In an insecure cluster, the environment variable
+   *  <code>HADOOP_USER_NAME </code> is queried <i>first</i>.
+   * <p>
+   * This means that in a YARN container where the creator set this
+   * environment variable to propagate their identity, the defined
+   * user name is used in preference to the actual user.
+   * <p>
+   * In a secure cluster, the kerberos identity of the current user is used.
+   * @return the encoded shortname of the current user
+   * @throws RuntimeException if the current user identity cannot be determined
+   * from the OS/kerberos.
+   *
+   */
+  public static String currentUser() {
+    String shortUserName = currentUsernameUnencoded();
+    return registryUser(shortUserName);
+  }
+
+  /**
+   * Convert the given user name formatted for the registry.
+   *
+   * @param shortUserName
+   * @return converted user name
+   */
+  public static String registryUser(String shortUserName) {
+    String encodedName =  encodeForRegistry(shortUserName);
+    // DNS name doesn't allow "_", replace it with "-"
+    encodedName = RegistryUtils.convertUsername(encodedName);
+    return encodedName.replace("_", "-");
+  }
+
+  /**
+   * Extract all service records under a list of stat operations...this
+   * skips entries that are too short or simply not matching
+   * @param operations operation support for fetches
+   * @param parentpath path of the parent of all the entries
+   * @param stats Collection of stat results
+   * @return a possibly empty map of fullpath:record.
+   * @throws IOException for any IO Operation that wasn't ignored.
+   */
+  public static Map<String, ServiceRecord> extractServiceRecords(
+      RegistryOperations operations,
+      String parentpath,
+      Collection<RegistryPathStatus> stats) throws IOException {
+    Map<String, ServiceRecord> results = new HashMap<String, ServiceRecord>(stats.size());
+    for (RegistryPathStatus stat : stats) {
+      if (stat.size > ServiceRecord.RECORD_TYPE.length()) {
+        // maybe has data
+        String path = join(parentpath, stat.path);
+        try {
+          ServiceRecord serviceRecord = operations.resolve(path);
+          results.put(path, serviceRecord);
+        } catch (EOFException ignored) {
+          if (LOG.isDebugEnabled()) {
+            LOG.debug("data too short for {}", path);
+          }
+        } catch (InvalidRecordException record) {
+          if (LOG.isDebugEnabled()) {
+            LOG.debug("Invalid record at {}", path);
+          }
+        } catch (NoRecordException record) {
+          if (LOG.isDebugEnabled()) {
+            LOG.debug("No record at {}", path);
+          }
+        }
+      }
+    }
+    return results;
+  }
+
+  /**
+   * Extract all service records under a list of stat operations...this
+   * non-atomic action skips entries that are too short or simply not matching.
+   * <p>
+   * @param operations operation support for fetches
+   * @param parentpath path of the parent of all the entries
+   * @return a possibly empty map of fullpath:record.
+   * @throws IOException for any IO Operation that wasn't ignored.
+   */
+  public static Map<String, ServiceRecord> extractServiceRecords(
+      RegistryOperations operations,
+      String parentpath,
+      Map<String , RegistryPathStatus> stats) throws IOException {
+    return extractServiceRecords(operations, parentpath, stats.values());
+  }
+
+
+  /**
+   * Extract all service records under a list of stat operations...this
+   * non-atomic action skips entries that are too short or simply not matching.
+   * <p>
+   * @param operations operation support for fetches
+   * @param parentpath path of the parent of all the entries
+   * @return a possibly empty map of fullpath:record.
+   * @throws IOException for any IO Operation that wasn't ignored.
+   */
+  public static Map<String, ServiceRecord> extractServiceRecords(
+      RegistryOperations operations,
+      String parentpath) throws IOException {
+    return
+    extractServiceRecords(operations,
+        parentpath,
+        statChildren(operations, parentpath).values());
+  }
+
+
+
+  /**
+   * Static instance of service record marshalling
+   */
+  public static class ServiceRecordMarshal extends JsonSerDeser<ServiceRecord> {
+    public ServiceRecordMarshal() {
+      super(ServiceRecord.class);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e2a9fa84/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/binding/package-info.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/binding/package-info.java b/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/binding/package-info.java
new file mode 100644
index 0000000..f99aa71
--- /dev/null
+++ b/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/binding/package-info.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Registry binding utility classes.
+ */
+package org.apache.hadoop.registry.client.binding;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e2a9fa84/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/exceptions/AuthenticationFailedException.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/exceptions/AuthenticationFailedException.java b/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/exceptions/AuthenticationFailedException.java
new file mode 100644
index 0000000..aadb7fc
--- /dev/null
+++ b/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/exceptions/AuthenticationFailedException.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.registry.client.exceptions;
+
+/**
+ * Exception raised when client access wasn't authenticated.
+ * That is: the credentials provided were incomplete or invalid.
+ */
+public class AuthenticationFailedException extends RegistryIOException {
+  public AuthenticationFailedException(String path, Throwable cause) {
+    super(path, cause);
+  }
+
+  public AuthenticationFailedException(String path, String error) {
+    super(path, error);
+  }
+
+  public AuthenticationFailedException(String path,
+      String error,
+      Throwable cause) {
+    super(path, error, cause);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e2a9fa84/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/exceptions/InvalidPathnameException.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/exceptions/InvalidPathnameException.java b/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/exceptions/InvalidPathnameException.java
new file mode 100644
index 0000000..c984f41
--- /dev/null
+++ b/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/exceptions/InvalidPathnameException.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.registry.client.exceptions;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+/**
+ * A path name was invalid. This is raised when a path string has
+ * characters in it that are not permitted.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+public class InvalidPathnameException extends RegistryIOException {
+  public InvalidPathnameException(String path, String message) {
+    super(path, message);
+  }
+
+  public InvalidPathnameException(String path,
+      String message,
+      Throwable cause) {
+    super(path, message, cause);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e2a9fa84/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/exceptions/InvalidRecordException.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/exceptions/InvalidRecordException.java b/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/exceptions/InvalidRecordException.java
new file mode 100644
index 0000000..e4f545e
--- /dev/null
+++ b/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/exceptions/InvalidRecordException.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.registry.client.exceptions;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+/**
+ * Raised if an attempt to parse a record failed.
+ *
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+public class InvalidRecordException extends RegistryIOException {
+
+  public InvalidRecordException(String path, String error) {
+    super(path, error);
+  }
+
+  public InvalidRecordException(String path,
+      String error,
+      Throwable cause) {
+    super(path, error, cause);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e2a9fa84/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/exceptions/NoChildrenForEphemeralsException.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/exceptions/NoChildrenForEphemeralsException.java b/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/exceptions/NoChildrenForEphemeralsException.java
new file mode 100644
index 0000000..24070a5
--- /dev/null
+++ b/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/exceptions/NoChildrenForEphemeralsException.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.registry.client.exceptions;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+/**
+ * This is a manifestation of the Zookeeper restrictions about
+ * what nodes may act as parents.
+ *
+ * Children are not allowed under ephemeral nodes. This is an aspect
+ * of ZK which isn't directly exposed to the registry API. It may
+ * surface if the registry is manipulated outside of the registry API.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+public class NoChildrenForEphemeralsException extends RegistryIOException {
+  public NoChildrenForEphemeralsException(String path, Throwable cause) {
+    super(path, cause);
+  }
+
+  public NoChildrenForEphemeralsException(String path, String error) {
+    super(path, error);
+  }
+
+  public NoChildrenForEphemeralsException(String path,
+      String error,
+      Throwable cause) {
+    super(path, error, cause);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e2a9fa84/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/exceptions/NoPathPermissionsException.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/exceptions/NoPathPermissionsException.java b/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/exceptions/NoPathPermissionsException.java
new file mode 100644
index 0000000..ce84f5b
--- /dev/null
+++ b/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/exceptions/NoPathPermissionsException.java
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.registry.client.exceptions;
+
+import org.apache.hadoop.fs.PathIOException;
+
+/**
+ * Raised on path permission exceptions.
+ * <p>
+ * This is similar to PathIOException, except that exception doesn't let
+ */
+public class NoPathPermissionsException extends RegistryIOException {
+  public NoPathPermissionsException(String path, Throwable cause) {
+    super(path, cause);
+  }
+
+  public NoPathPermissionsException(String path, String error) {
+    super(path, error);
+  }
+
+  public NoPathPermissionsException(String path, String error, Throwable cause) {
+    super(path, error, cause);
+  }
+
+  public NoPathPermissionsException(String message,
+      PathIOException cause) {
+    super(message, cause);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e2a9fa84/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/exceptions/NoRecordException.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/exceptions/NoRecordException.java b/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/exceptions/NoRecordException.java
new file mode 100644
index 0000000..b81b9d4
--- /dev/null
+++ b/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/exceptions/NoRecordException.java
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.registry.client.exceptions;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.registry.client.types.ServiceRecord;
+
+/**
+ * Raised if there is no {@link ServiceRecord} resolved at the end
+ * of the specified path.
+ * <p>
+ * There may be valid data of some form at the end of the path, but it does
+ * not appear to be a Service Record.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+public class NoRecordException extends RegistryIOException {
+
+  public NoRecordException(String path, String error) {
+    super(path, error);
+  }
+
+  public NoRecordException(String path,
+      String error,
+      Throwable cause) {
+    super(path, error, cause);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e2a9fa84/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/exceptions/RegistryIOException.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/exceptions/RegistryIOException.java b/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/exceptions/RegistryIOException.java
new file mode 100644
index 0000000..ca966db
--- /dev/null
+++ b/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/exceptions/RegistryIOException.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.registry.client.exceptions;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.fs.PathIOException;
+
+/**
+ * Base exception for registry operations.
+ * <p>
+ * These exceptions include the path of the failing operation wherever possible;
+ * this can be retrieved via {@link PathIOException#getPath()}.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+public class RegistryIOException extends PathIOException {
+
+  /**
+   * Build an exception from any other Path IO Exception.
+   * This propagates the path of the original exception
+   * @param message more specific text
+   * @param cause cause
+   */
+  public RegistryIOException(String message, PathIOException cause) {
+    super(cause.getPath() != null ? cause.getPath().toString() : "",
+        message,
+        cause);
+  }
+
+  public RegistryIOException(String path, Throwable cause) {
+    super(path, cause);
+  }
+
+  public RegistryIOException(String path, String error) {
+    super(path, error);
+  }
+
+  public RegistryIOException(String path, String error, Throwable cause) {
+    super(path, error, cause);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e2a9fa84/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/exceptions/package-info.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/exceptions/package-info.java b/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/exceptions/package-info.java
new file mode 100644
index 0000000..7d9c8ad
--- /dev/null
+++ b/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/exceptions/package-info.java
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Registry Service Exceptions
+ * <p>
+ * These are the Registry-specific exceptions that may be raised during
+ * Registry operations.
+ * <p>
+ * Other exceptions may be raised, especially <code>IOExceptions</code>
+ * triggered by network problems, and <code>IllegalArgumentException</code>
+ * exceptions that may be raised if invalid (often null) arguments are passed
+ * to a method call.
+ * <p>
+ *   All exceptions in this package are derived from
+ *   {@link org.apache.hadoop.registry.client.exceptions.RegistryIOException}
+ */
+package org.apache.hadoop.registry.client.exceptions;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e2a9fa84/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/impl/FSRegistryOperationsService.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/impl/FSRegistryOperationsService.java b/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/impl/FSRegistryOperationsService.java
new file mode 100644
index 0000000..41884a9
--- /dev/null
+++ b/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/impl/FSRegistryOperationsService.java
@@ -0,0 +1,248 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.registry.client.impl;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.lang3.NotImplementedException;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileAlreadyExistsException;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException;
+import org.apache.hadoop.fs.PathNotFoundException;
+import org.apache.hadoop.registry.client.api.BindFlags;
+import org.apache.hadoop.registry.client.api.RegistryOperations;
+import org.apache.hadoop.registry.client.binding.RegistryTypeUtils;
+import org.apache.hadoop.registry.client.binding.RegistryUtils;
+import org.apache.hadoop.registry.client.exceptions.InvalidPathnameException;
+import org.apache.hadoop.registry.client.exceptions.InvalidRecordException;
+import org.apache.hadoop.registry.client.exceptions.NoRecordException;
+import org.apache.hadoop.registry.client.types.RegistryPathStatus;
+import org.apache.hadoop.registry.client.types.ServiceRecord;
+import org.apache.hadoop.service.CompositeService;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+
+/**
+ * Filesystem-based implementation of RegistryOperations. This class relies
+ * entirely on the configured FS for security and does no extra checks.
+ */
+public class FSRegistryOperationsService extends CompositeService
+    implements RegistryOperations {
+
+  private FileSystem fs;
+  private static final Logger LOG =
+      LoggerFactory.getLogger(FSRegistryOperationsService.class);
+  private final RegistryUtils.ServiceRecordMarshal serviceRecordMarshal =
+      new RegistryUtils.ServiceRecordMarshal();
+
+  public FSRegistryOperationsService() {
+    super(FSRegistryOperationsService.class.getName());
+  }
+
+  @VisibleForTesting
+  public FileSystem getFs() {
+    return this.fs;
+  }
+
+  @Override
+  protected void serviceInit(Configuration conf) {
+    try {
+      this.fs = FileSystem.get(conf);
+      LOG.info("Initialized Yarn-registry with Filesystem "
+          + fs.getClass().getCanonicalName());
+    } catch (IOException e) {
+      LOG.error("Failed to get FileSystem for registry", e);
+      throw new RuntimeException(e);
+    }
+  }
+
+  private Path makePath(String path) {
+    return new Path(path);
+  }
+
+  private Path formatDataPath(String basePath) {
+    return Path.mergePaths(new Path(basePath), new Path("/_record"));
+  }
+
+  private String relativize(String basePath, String childPath) {
+    String relative = new File(basePath).toURI()
+        .relativize(new File(childPath).toURI()).getPath();
+    return relative;
+  }
+
+  @Override
+  public boolean mknode(String path, boolean createParents)
+      throws PathNotFoundException, InvalidPathnameException, IOException {
+    Path registryPath = makePath(path);
+
+    // getFileStatus throws FileNotFound if the path doesn't exist. If the
+    // file already exists, return.
+    try {
+      fs.getFileStatus(registryPath);
+      return false;
+    } catch (FileNotFoundException e) {
+    }
+
+    if (createParents) {
+      // By default, mkdirs creates any parent dirs it needs
+      fs.mkdirs(registryPath);
+    } else {
+      FileStatus parentStatus = null;
+
+      if (registryPath.getParent() != null) {
+        parentStatus = fs.getFileStatus(registryPath.getParent());
+      }
+
+      if (registryPath.getParent() == null || parentStatus.isDirectory()) {
+        fs.mkdirs(registryPath);
+      } else {
+        throw new PathNotFoundException("no parent for " + path);
+      }
+    }
+    return true;
+  }
+
+  @Override
+  public void bind(String path, ServiceRecord record, int flags)
+      throws PathNotFoundException, FileAlreadyExistsException,
+      InvalidPathnameException, IOException {
+
+    // Preserve same overwrite semantics as ZK implementation
+    Preconditions.checkArgument(record != null, "null record");
+    RegistryTypeUtils.validateServiceRecord(path, record);
+
+    Path dataPath = formatDataPath(path);
+    Boolean overwrite = ((flags & BindFlags.OVERWRITE) != 0);
+    if (fs.exists(dataPath) && !overwrite) {
+      throw new FileAlreadyExistsException();
+    } else {
+      // Either the file doesn't exist, or it exists and we're
+      // overwriting. Create overwrites by default and creates parent dirs if
+      // needed.
+      FSDataOutputStream stream = fs.create(dataPath);
+      byte[] bytes = serviceRecordMarshal.toBytes(record);
+      stream.write(bytes);
+      stream.close();
+      LOG.info("Bound record to path " + dataPath);
+    }
+  }
+
+  @Override
+  public ServiceRecord resolve(String path) throws PathNotFoundException,
+      NoRecordException, InvalidRecordException, IOException {
+    // Read the entire file into byte array, should be small metadata
+
+    Long size = fs.getFileStatus(formatDataPath(path)).getLen();
+    byte[] bytes = new byte[size.intValue()];
+
+    FSDataInputStream instream = fs.open(formatDataPath(path));
+    int bytesRead = instream.read(bytes);
+    instream.close();
+
+    if (bytesRead < size) {
+      throw new InvalidRecordException(path,
+          "Expected " + size + " bytes, but read " + bytesRead);
+    }
+
+    // Unmarshal, check, and return
+    ServiceRecord record = serviceRecordMarshal.fromBytes(path, bytes);
+    RegistryTypeUtils.validateServiceRecord(path, record);
+    return record;
+  }
+
+  @Override
+  public RegistryPathStatus stat(String path)
+      throws PathNotFoundException, InvalidPathnameException, IOException {
+    FileStatus fstat = fs.getFileStatus(formatDataPath(path));
+    int numChildren = fs.listStatus(makePath(path)).length;
+
+    RegistryPathStatus regstat =
+        new RegistryPathStatus(fstat.getPath().toString(),
+            fstat.getModificationTime(), fstat.getLen(), numChildren);
+
+    return regstat;
+  }
+
+  @Override
+  public boolean exists(String path) throws IOException {
+    return fs.exists(makePath(path));
+  }
+
+  @Override
+  public List<String> list(String path)
+      throws PathNotFoundException, InvalidPathnameException, IOException {
+    FileStatus[] statArray = fs.listStatus(makePath(path));
+    String basePath = fs.getFileStatus(makePath(path)).getPath().toString();
+
+    List<String> paths = new ArrayList<String>();
+
+    FileStatus stat;
+    // Only count dirs; the _record files are hidden.
+    for (int i = 0; i < statArray.length; i++) {
+      stat = statArray[i];
+      if (stat.isDirectory()) {
+        String relativePath = relativize(basePath, stat.getPath().toString());
+        paths.add(relativePath);
+      }
+    }
+
+    return paths;
+  }
+
+  @Override
+  public void delete(String path, boolean recursive)
+      throws PathNotFoundException, PathIsNotEmptyDirectoryException,
+      InvalidPathnameException, IOException {
+    Path dirPath = makePath(path);
+    if (!fs.exists(dirPath)) {
+      throw new PathNotFoundException(path);
+    }
+
+    // If recursive == true, or dir is empty, delete.
+    if (recursive || list(path).isEmpty()) {
+      fs.delete(makePath(path), true);
+      return;
+    }
+
+    throw new PathIsNotEmptyDirectoryException(path);
+  }
+
+  @Override
+  public boolean addWriteAccessor(String id, String pass) throws IOException {
+    throw new NotImplementedException("Code is not implemented");
+  }
+
+  @Override
+  public void clearWriteAccessors() {
+    throw new NotImplementedException("Code is not implemented");
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e2a9fa84/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/impl/RegistryOperationsClient.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/impl/RegistryOperationsClient.java b/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/impl/RegistryOperationsClient.java
new file mode 100644
index 0000000..44cefed
--- /dev/null
+++ b/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/impl/RegistryOperationsClient.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.registry.client.impl;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.registry.client.impl.zk.RegistryBindingSource;
+import org.apache.hadoop.registry.client.impl.zk.RegistryOperationsService;
+
+
+/**
+ * This is the client service for applications to work with the registry.
+ *
+ * It does not set up the root paths for the registry, is bonded
+ * to a user, and can be set to use SASL, anonymous or id:pass auth.
+ *
+ * For SASL, the client must be operating in the context of an authed user.
+ *
+ * For id:pass the client must have the relevant id and password, SASL is
+ * not used even if the client has credentials.
+ *
+ * For anonymous, nothing is used.
+ *
+ * Any SASL-authed client also has the ability to add one or more authentication
+ * id:pass pair on all future writes, and to reset them later.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+public class RegistryOperationsClient extends RegistryOperationsService {
+
+  public RegistryOperationsClient(String name) {
+    super(name);
+  }
+
+  public RegistryOperationsClient(String name,
+      RegistryBindingSource bindingSource) {
+    super(name, bindingSource);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e2a9fa84/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/impl/package-info.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/impl/package-info.java b/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/impl/package-info.java
new file mode 100644
index 0000000..d85b6a7
--- /dev/null
+++ b/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/impl/package-info.java
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Registry client services
+ * <p>
+ * These are classes which follow the YARN lifecycle and which implement
+ * the {@link org.apache.hadoop.registry.client.api.RegistryOperations}
+ * API.
+ */
+package org.apache.hadoop.registry.client.impl;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e2a9fa84/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/impl/zk/BindingInformation.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/impl/zk/BindingInformation.java b/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/impl/zk/BindingInformation.java
new file mode 100644
index 0000000..8ae003d
--- /dev/null
+++ b/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/impl/zk/BindingInformation.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.registry.client.impl.zk;
+
+import org.apache.curator.ensemble.EnsembleProvider;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+/**
+ * Binding information provided by a {@link RegistryBindingSource}
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+public class BindingInformation {
+
+  /**
+   * The Curator Ensemble Provider
+   */
+  public EnsembleProvider ensembleProvider;
+
+  /**
+   * Any information that may be useful for diagnostics
+   */
+  public String description;
+}


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org


Mime
View raw message