Return-Path: X-Original-To: archive-asf-public-internal@cust-asf2.ponee.io Delivered-To: archive-asf-public-internal@cust-asf2.ponee.io Received: from cust-asf.ponee.io (cust-asf.ponee.io [163.172.22.183]) by cust-asf2.ponee.io (Postfix) with ESMTP id B1ECD200D36 for ; Mon, 6 Nov 2017 23:01:40 +0100 (CET) Received: by cust-asf.ponee.io (Postfix) id B0792160C04; Mon, 6 Nov 2017 22:01:40 +0000 (UTC) Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by cust-asf.ponee.io (Postfix) with SMTP id A8B8D160C01 for ; Mon, 6 Nov 2017 23:01:38 +0100 (CET) Received: (qmail 85384 invoked by uid 500); 6 Nov 2017 22:01:36 -0000 Mailing-List: contact common-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Delivered-To: mailing list common-commits@hadoop.apache.org Received: (qmail 85154 invoked by uid 99); 6 Nov 2017 22:01:36 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Mon, 06 Nov 2017 22:01:36 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id B5DF6DFF39; Mon, 6 Nov 2017 22:01:35 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: jianhe@apache.org To: common-commits@hadoop.apache.org Date: Mon, 06 Nov 2017 22:01:38 -0000 Message-Id: <70d5d0b5a7174a49a5676af411df89c9@git.apache.org> In-Reply-To: <6d4786bbf9a2474696ffe184527caab8@git.apache.org> References: <6d4786bbf9a2474696ffe184527caab8@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: [04/58] [abbrv] hadoop git commit: YARN-7091. Rename application to service in yarn-native-services. Contributed by Jian He archived-at: Mon, 06 Nov 2017 22:01:40 -0000 http://git-wip-us.apache.org/repos/asf/hadoop/blob/40ab068e/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/ConfigHelper.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/ConfigHelper.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/ConfigHelper.java deleted file mode 100644 index fe8cce8..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/ConfigHelper.java +++ /dev/null @@ -1,157 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.service.utils; - -import com.google.common.base.Preconditions; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.yarn.service.exceptions.BadConfigException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.io.StringWriter; -import java.net.URL; -import java.util.Map; - -/** - * Methods to aid in config, both in the Configuration class and - * with other parts of setting up Slider-initated processes. - * - * Some of the methods take an argument of a map iterable for their sources; this allows - * the same method - */ -public class ConfigHelper { - private static final Logger log = LoggerFactory.getLogger(ConfigHelper.class); - - /** - * Set an entire map full of values - * - * @param config config to patch - * @param map map of data - * @param origin origin data - */ - public static void addConfigMap(Configuration config, - Map map, - String origin) throws BadConfigException { - addConfigMap(config, map.entrySet(), origin); - } - - /** - * Set an entire map full of values - * - * @param config config to patch - * @param map map of data - * @param origin origin data - */ - public static void addConfigMap(Configuration config, - Iterable> map, - String origin) throws BadConfigException { - for (Map.Entry mapEntry : map) { - String key = mapEntry.getKey(); - String value = mapEntry.getValue(); - if (value == null) { - throw new BadConfigException("Null value for property " + key); - } - config.set(key, value, origin); - } - } - - /** - * Convert to an XML string - * @param conf configuration - * @return conf - * @throws IOException - */ - public static String toXml(Configuration conf) throws IOException { - StringWriter writer = new StringWriter(); - conf.writeXml(writer); - return writer.toString(); - } - - - /** - * Register a resource as a default resource. - * Do not attempt to use this unless you understand that the - * order in which default resources are loaded affects the outcome, - * and that subclasses of Configuration often register new default - * resources - * @param resource the resource name - * @return the URL or null - */ - public static URL registerDefaultResource(String resource) { - URL resURL = getResourceUrl(resource); - if (resURL != null) { - Configuration.addDefaultResource(resource); - } - return resURL; - } - - /** - * Load a configuration from a resource on this classpath. - * If the resource is not found, an empty configuration is returned - * @param resource the resource name - * @return the loaded configuration. - */ - public static Configuration loadFromResource(String resource) { - Configuration conf = new Configuration(false); - URL resURL = getResourceUrl(resource); - if (resURL != null) { - log.debug("loaded resources from {}", resURL); - conf.addResource(resource); - } else{ - log.debug("failed to find {} on the classpath", resource); - } - return conf; - - } - - /** - * Get the URL to a resource, null if not on the CP - * @param resource resource to look for - * @return the URL or null - */ - public static URL getResourceUrl(String resource) { - return ConfigHelper.class.getClassLoader() - .getResource(resource); - } - - /** - * This goes through the keyset of one configuration and retrieves each value - * from a value source -a different or the same configuration. This triggers - * the property resolution process of the value, resolving any variables against - * in-config or inherited configurations - * @param keysource source of keys - * @param valuesource the source of values - * @return a new configuration where foreach key in keysource, get(key)==valuesource.get(key) - */ - public static Configuration resolveConfiguration( - Iterable> keysource, - Configuration valuesource) { - Configuration result = new Configuration(false); - for (Map.Entry entry : keysource) { - String key = entry.getKey(); - String value = valuesource.get(key); - Preconditions.checkState(value != null, - "no reference for \"%s\" in values", key); - result.set(key, value); - } - return result; - } - -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/40ab068e/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/ConfigUtils.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/ConfigUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/ConfigUtils.java deleted file mode 100644 index a969be9..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/ConfigUtils.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.yarn.service.utils; - -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.yarn.service.api.records.ConfigFormat; -import org.apache.hadoop.yarn.service.utils.SliderFileSystem; - -import java.io.IOException; -import java.util.HashMap; -import java.util.Map; -import java.util.Map.Entry; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -public class ConfigUtils { - public static final String TEMPLATE_FILE = "template.file"; - - public static String replaceProps(Map config, String content) { - Map tokens = new HashMap<>(); - for (Entry entry : config.entrySet()) { - tokens.put("${" + entry.getKey() + "}", entry.getValue()); - tokens.put("{{" + entry.getKey() + "}}", entry.getValue()); - } - String value = content; - for (Map.Entry token : tokens.entrySet()) { - value = value.replaceAll(Pattern.quote(token.getKey()), - Matcher.quoteReplacement(token.getValue())); - } - return value; - } - - public static Map replacePropsInConfig( - Map config, Map env) { - Map tokens = new HashMap<>(); - for (Entry entry : env.entrySet()) { - tokens.put("${" + entry.getKey() + "}", entry.getValue()); - } - Map newConfig = new HashMap<>(); - for (Entry entry : config.entrySet()) { - String value = entry.getValue(); - for (Map.Entry token : tokens.entrySet()) { - value = value.replaceAll(Pattern.quote(token.getKey()), - Matcher.quoteReplacement(token.getValue())); - } - newConfig.put(entry.getKey(), entry.getValue()); - } - return newConfig; - } - - public static void prepConfigForTemplateOutputter(ConfigFormat configFormat, - Map config, SliderFileSystem fileSystem, - String clusterName, String fileName) throws IOException { - if (!configFormat.equals(ConfigFormat.TEMPLATE)) { - return; - } - Path templateFile = null; - if (config.containsKey(TEMPLATE_FILE)) { - templateFile = fileSystem.buildResourcePath(config.get(TEMPLATE_FILE)); - if (!fileSystem.isFile(templateFile)) { - templateFile = fileSystem.buildResourcePath(clusterName, - config.get(TEMPLATE_FILE)); - } - if (!fileSystem.isFile(templateFile)) { - throw new IOException("config specified template file " + config - .get(TEMPLATE_FILE) + " but " + templateFile + " doesn't exist"); - } - } - if (templateFile == null && fileName != null) { - templateFile = fileSystem.buildResourcePath(fileName); - if (!fileSystem.isFile(templateFile)) { - templateFile = fileSystem.buildResourcePath(clusterName, - fileName); - } - } - if (fileSystem.isFile(templateFile)) { - config.put("content", fileSystem.cat(templateFile)); - } else { - config.put("content", ""); - } - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/40ab068e/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/CoreFileSystem.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/CoreFileSystem.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/CoreFileSystem.java deleted file mode 100644 index fa3b402..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/CoreFileSystem.java +++ /dev/null @@ -1,521 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.service.utils; - -import com.google.common.base.Preconditions; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.CommonConfigurationKeys; -import org.apache.hadoop.fs.FSDataInputStream; -import org.apache.hadoop.fs.FSDataOutputStream; -import org.apache.hadoop.fs.FileStatus; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.fs.permission.FsPermission; -import org.apache.hadoop.io.IOUtils; -import org.apache.hadoop.util.VersionInfo; -import org.apache.hadoop.yarn.api.records.LocalResource; -import org.apache.hadoop.yarn.api.records.LocalResourceType; -import org.apache.hadoop.yarn.api.records.LocalResourceVisibility; -import org.apache.hadoop.yarn.api.records.URL; -import org.apache.hadoop.yarn.service.conf.SliderExitCodes; -import org.apache.hadoop.yarn.service.conf.YarnServiceConstants; -import org.apache.hadoop.yarn.service.conf.YarnServiceConf; -import org.apache.hadoop.yarn.service.exceptions.BadClusterStateException; -import org.apache.hadoop.yarn.service.exceptions.ErrorStrings; -import org.apache.hadoop.yarn.service.exceptions.SliderException; -import org.apache.hadoop.yarn.util.Records; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; -import java.io.FileNotFoundException; -import java.io.IOException; -import java.util.HashMap; -import java.util.Map; - -public class CoreFileSystem { - private static final Logger - log = LoggerFactory.getLogger(CoreFileSystem.class); - - private static final String UTF_8 = "UTF-8"; - - protected final FileSystem fileSystem; - protected final Configuration configuration; - - public CoreFileSystem(FileSystem fileSystem, Configuration configuration) { - Preconditions.checkNotNull(fileSystem, - "Cannot create a CoreFileSystem with a null FileSystem"); - Preconditions.checkNotNull(configuration, - "Cannot create a CoreFileSystem with a null Configuration"); - this.fileSystem = fileSystem; - this.configuration = configuration; - } - - public CoreFileSystem(Configuration configuration) throws IOException { - Preconditions.checkNotNull(configuration, - "Cannot create a CoreFileSystem with a null Configuration"); - this.fileSystem = FileSystem.get(configuration); - this.configuration = configuration; - } - - /** - * Get the temp path for this cluster - * @param clustername name of the cluster - * @return path for temp files (is not purged) - */ - public Path getTempPathForCluster(String clustername) { - Path clusterDir = buildClusterDirPath(clustername); - return new Path(clusterDir, YarnServiceConstants.TMP_DIR_PREFIX); - } - - /** - * Returns the underlying FileSystem for this object. - * - * @return filesystem - */ - public FileSystem getFileSystem() { - return fileSystem; - } - - @Override - public String toString() { - final StringBuilder sb = - new StringBuilder("CoreFileSystem{"); - sb.append("fileSystem=").append(fileSystem.getUri()); - sb.append('}'); - return sb.toString(); - } - - /** - * Build up the path string for a cluster instance -no attempt to - * create the directory is made - * - * @param clustername name of the cluster - * @return the path for persistent data - */ - public Path buildClusterDirPath(String clustername) { - Preconditions.checkNotNull(clustername); - Path path = getBaseApplicationPath(); - return new Path(path, YarnServiceConstants.SERVICES_DIRECTORY + "/" + clustername); - } - - - /** - * Build up the path string for keytab install location -no attempt to - * create the directory is made - * - * @return the path for keytab - */ - public Path buildKeytabInstallationDirPath(String keytabFolder) { - Preconditions.checkNotNull(keytabFolder); - Path path = getBaseApplicationPath(); - return new Path(path, YarnServiceConstants.KEYTAB_DIR + "/" + keytabFolder); - } - - /** - * Build up the path string for keytab install location -no attempt to - * create the directory is made - * - * @return the path for keytab installation location - */ - public Path buildKeytabPath(String keytabDir, String keytabName, String clusterName) { - Path homePath = getHomeDirectory(); - Path baseKeytabDir; - if (keytabDir != null) { - baseKeytabDir = new Path(homePath, keytabDir); - } else { - baseKeytabDir = new Path(buildClusterDirPath(clusterName), - YarnServiceConstants.KEYTAB_DIR); - } - return keytabName == null ? baseKeytabDir : - new Path(baseKeytabDir, keytabName); - } - - /** - * Build up the path string for resource install location -no attempt to - * create the directory is made - * - * @return the path for resource - */ - public Path buildResourcePath(String resourceFolder) { - Preconditions.checkNotNull(resourceFolder); - Path path = getBaseApplicationPath(); - return new Path(path, YarnServiceConstants.RESOURCE_DIR + "/" + resourceFolder); - } - - /** - * Build up the path string for resource install location -no attempt to - * create the directory is made - * - * @return the path for resource - */ - public Path buildResourcePath(String dirName, String fileName) { - Preconditions.checkNotNull(dirName); - Preconditions.checkNotNull(fileName); - Path path = getBaseApplicationPath(); - return new Path(path, YarnServiceConstants.RESOURCE_DIR + "/" + dirName + "/" + fileName); - } - - /** - * Create a directory with the given permissions. - * - * @param dir directory - * @param clusterPerms cluster permissions - * @throws IOException IO problem - * @throws BadClusterStateException any cluster state problem - */ - @SuppressWarnings("deprecation") - public void createWithPermissions(Path dir, FsPermission clusterPerms) throws - IOException, - BadClusterStateException { - if (fileSystem.isFile(dir)) { - // HADOOP-9361 shows some filesystems don't correctly fail here - throw new BadClusterStateException( - "Cannot create a directory over a file %s", dir); - } - log.debug("mkdir {} with perms {}", dir, clusterPerms); - //no mask whatoever - fileSystem.getConf().set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "000"); - fileSystem.mkdirs(dir, clusterPerms); - //and force set it anyway just to make sure - fileSystem.setPermission(dir, clusterPerms); - } - - /** - * Verify that the cluster directory is not present - * - * @param clustername name of the cluster - * @param clusterDirectory actual directory to look for - * @throws IOException trouble with FS - * @throws SliderException If the directory exists - */ - public void verifyClusterDirectoryNonexistent(String clustername, - Path clusterDirectory) - throws IOException, SliderException { - if (fileSystem.exists(clusterDirectory)) { - throw new SliderException(SliderExitCodes.EXIT_INSTANCE_EXISTS, - ErrorStrings.PRINTF_E_INSTANCE_ALREADY_EXISTS, clustername, - clusterDirectory); - } - } - /** - * Verify that the given directory is not present - * - * @param clusterDirectory actual directory to look for - * @throws IOException trouble with FS - * @throws SliderException If the directory exists - */ - public void verifyDirectoryNonexistent(Path clusterDirectory) throws - IOException, - SliderException { - if (fileSystem.exists(clusterDirectory)) { - - log.error("Dir {} exists: {}", - clusterDirectory, - listFSDir(clusterDirectory)); - throw new SliderException(SliderExitCodes.EXIT_INSTANCE_EXISTS, - ErrorStrings.PRINTF_E_INSTANCE_DIR_ALREADY_EXISTS, - clusterDirectory); - } - } - - /** - * Verify that a user has write access to a directory. - * It does this by creating then deleting a temp file - * - * @param dirPath actual directory to look for - * @throws FileNotFoundException file not found - * @throws IOException trouble with FS - * @throws BadClusterStateException if the directory is not writeable - */ - public void verifyDirectoryWriteAccess(Path dirPath) throws IOException, - SliderException { - verifyPathExists(dirPath); - Path tempFile = new Path(dirPath, "tmp-file-for-checks"); - try { - FSDataOutputStream out ; - out = fileSystem.create(tempFile, true); - IOUtils.closeStream(out); - fileSystem.delete(tempFile, false); - } catch (IOException e) { - log.warn("Failed to create file {}: {}", tempFile, e); - throw new BadClusterStateException(e, - "Unable to write to directory %s : %s", dirPath, e.toString()); - } - } - - /** - * Verify that a path exists - * @param path path to check - * @throws FileNotFoundException file not found - * @throws IOException trouble with FS - */ - public void verifyPathExists(Path path) throws IOException { - if (!fileSystem.exists(path)) { - throw new FileNotFoundException(path.toString()); - } - } - - /** - * Verify that a path exists - * @param path path to check - * @throws FileNotFoundException file not found or is not a file - * @throws IOException trouble with FS - */ - public void verifyFileExists(Path path) throws IOException { - FileStatus status = fileSystem.getFileStatus(path); - - if (!status.isFile()) { - throw new FileNotFoundException("Not a file: " + path.toString()); - } - } - - /** - * Given a path, check if it exists and is a file - * - * @param path - * absolute path to the file to check - * @return true if and only if path exists and is a file, false for all other - * reasons including if file check throws IOException - */ - public boolean isFile(Path path) { - boolean isFile = false; - try { - FileStatus status = fileSystem.getFileStatus(path); - if (status.isFile()) { - isFile = true; - } - } catch (IOException e) { - // ignore, isFile is already set to false - } - return isFile; - } - - /** - * Get the base path - * - * @return the base path optionally configured by - * {@link YarnServiceConf#YARN_SERVICE_BASE_PATH} - */ - public Path getBaseApplicationPath() { - String configuredBasePath = configuration - .get(YarnServiceConf.YARN_SERVICE_BASE_PATH, - getHomeDirectory() + "/" + YarnServiceConstants.SERVICE_BASE_DIRECTORY); - return new Path(configuredBasePath); - } - - /** - * Get slider dependency parent dir in HDFS - * - * @return the parent dir path of slider.tar.gz in HDFS - */ - public Path getDependencyPath() { - String parentDir = YarnServiceConstants.DEPENDENCY_DIR; - return new Path(String.format(parentDir, VersionInfo.getVersion())); - } - - /** - * Get slider.tar.gz absolute filepath in HDFS - * - * @return the absolute path to slider.tar.gz in HDFS - */ - public Path getDependencyTarGzip() { - Path dependencyLibAmPath = getDependencyPath(); - Path dependencyLibTarGzip = new Path( - dependencyLibAmPath.toUri().toString(), - YarnServiceConstants.DEPENDENCY_TAR_GZ_FILE_NAME - + YarnServiceConstants.DEPENDENCY_TAR_GZ_FILE_EXT); - return dependencyLibTarGzip; - } - - public Path getHomeDirectory() { - return fileSystem.getHomeDirectory(); - } - - /** - * Create an AM resource from the - * - * @param destPath dest path in filesystem - * @param resourceType resource type - * @return the local resource for AM - */ - public LocalResource createAmResource(Path destPath, LocalResourceType resourceType) throws IOException { - FileStatus destStatus = fileSystem.getFileStatus(destPath); - LocalResource amResource = Records.newRecord(LocalResource.class); - amResource.setType(resourceType); - // Set visibility of the resource - // Setting to most private option - amResource.setVisibility(LocalResourceVisibility.APPLICATION); - // Set the resource to be copied over - amResource.setResource( - URL.fromPath(fileSystem.resolvePath(destStatus.getPath()))); - // Set timestamp and length of file so that the framework - // can do basic sanity checks for the local resource - // after it has been copied over to ensure it is the same - // resource the client intended to use with the application - amResource.setTimestamp(destStatus.getModificationTime()); - amResource.setSize(destStatus.getLen()); - return amResource; - } - - /** - * Register all files under a fs path as a directory to push out - * - * @param srcDir src dir - * @param destRelativeDir dest dir (no trailing /) - * @return the map of entries - */ - public Map submitDirectory(Path srcDir, String destRelativeDir) throws IOException { - //now register each of the files in the directory to be - //copied to the destination - FileStatus[] fileset = fileSystem.listStatus(srcDir); - Map localResources = - new HashMap(fileset.length); - for (FileStatus entry : fileset) { - - LocalResource resource = createAmResource(entry.getPath(), - LocalResourceType.FILE); - String relativePath = destRelativeDir + "/" + entry.getPath().getName(); - localResources.put(relativePath, resource); - } - return localResources; - } - - /** - * Submit a JAR containing a specific class, returning - * the resource to be mapped in - * - * @param clazz class to look for - * @param subdir subdirectory (expected to end in a "/") - * @param jarName At the destination - * @return the local resource ref - * @throws IOException trouble copying to HDFS - */ - public LocalResource submitJarWithClass(Class clazz, Path tempPath, String subdir, String jarName) - throws IOException, SliderException { - File localFile = SliderUtils.findContainingJarOrFail(clazz); - return submitFile(localFile, tempPath, subdir, jarName); - } - - /** - * Submit a local file to the filesystem references by the instance's cluster - * filesystem - * - * @param localFile filename - * @param subdir subdirectory (expected to end in a "/") - * @param destFileName destination filename - * @return the local resource ref - * @throws IOException trouble copying to HDFS - */ - public LocalResource submitFile(File localFile, Path tempPath, String subdir, String destFileName) - throws IOException { - Path src = new Path(localFile.toString()); - Path subdirPath = new Path(tempPath, subdir); - fileSystem.mkdirs(subdirPath); - Path destPath = new Path(subdirPath, destFileName); - log.debug("Copying {} (size={} bytes) to {}", localFile, localFile.length(), destPath); - - fileSystem.copyFromLocalFile(false, true, src, destPath); - - // Set the type of resource - file or archive - // archives are untarred at destination - // we don't need the jar file to be untarred for now - return createAmResource(destPath, LocalResourceType.FILE); - } - - /** - * Submit the AM tar.gz resource referenced by the instance's cluster - * filesystem. Also, update the providerResources object with the new - * resource. - * - * @param providerResources - * the provider resource map to be updated - * @throws IOException - * trouble copying to HDFS - */ - public void submitTarGzipAndUpdate( - Map providerResources) throws IOException, - BadClusterStateException { - Path dependencyLibTarGzip = getDependencyTarGzip(); - LocalResource lc = createAmResource(dependencyLibTarGzip, - LocalResourceType.ARCHIVE); - providerResources.put(YarnServiceConstants.DEPENDENCY_LOCALIZED_DIR_LINK, lc); - } - - public void copyLocalFileToHdfs(File localPath, - Path destPath, FsPermission fp) - throws IOException { - if (localPath == null || destPath == null) { - throw new IOException("Either localPath or destPath is null"); - } - fileSystem.getConf().set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, - "000"); - fileSystem.mkdirs(destPath.getParent(), fp); - log.info("Copying file {} to {}", localPath.toURI(), - fileSystem.getScheme() + ":/" + destPath.toUri()); - - fileSystem.copyFromLocalFile(false, true, new Path(localPath.getPath()), - destPath); - // set file permissions of the destPath - fileSystem.setPermission(destPath, fp); - } - - public void copyHdfsFileToLocal(Path hdfsPath, File destFile) - throws IOException { - if (hdfsPath == null || destFile == null) { - throw new IOException("Either hdfsPath or destPath is null"); - } - log.info("Copying file {} to {}", hdfsPath.toUri(), destFile.toURI()); - - Path destPath = new Path(destFile.getPath()); - fileSystem.copyToLocalFile(hdfsPath, destPath); - } - - /** - * list entries in a filesystem directory - * - * @param path directory - * @return a listing, one to a line - * @throws IOException - */ - public String listFSDir(Path path) throws IOException { - FileStatus[] stats = fileSystem.listStatus(path); - StringBuilder builder = new StringBuilder(); - for (FileStatus stat : stats) { - builder.append(stat.getPath().toString()) - .append("\t") - .append(stat.getLen()) - .append("\n"); - } - return builder.toString(); - } - - public String cat(Path path) throws IOException { - FileStatus status = fileSystem.getFileStatus(path); - byte[] b = new byte[(int) status.getLen()]; - FSDataInputStream in = null; - try { - in = fileSystem.open(path); - int count = in.read(b); - return new String(b, 0, count, UTF_8); - } finally { - IOUtils.closeStream(in); - } - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/40ab068e/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/Duration.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/Duration.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/Duration.java deleted file mode 100644 index 6fadfd3..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/Duration.java +++ /dev/null @@ -1,109 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.service.utils; - -import java.io.Closeable; - -/** - * A duration in milliseconds. This class can be used - * to count time, and to be polled to see if a time limit has - * passed. - */ -public class Duration implements Closeable { - public long start, finish; - public final long limit; - - /** - * Create a duration instance with a limit of 0 - */ - public Duration() { - this(0); - } - - /** - * Create a duration with a limit specified in millis - * @param limit duration in milliseconds - */ - public Duration(long limit) { - this.limit = limit; - } - - /** - * Start - * @return self - */ - public Duration start() { - start = now(); - return this; - } - - /** - * The close operation relays to {@link #finish()}. - * Implementing it allows Duration instances to be automatically - * finish()'d in Java7 try blocks for when used in measuring durations. - */ - @Override - public final void close() { - finish(); - } - - public void finish() { - finish = now(); - } - - protected long now() { - return System.nanoTime()/1000000; - } - - public long getInterval() { - return finish - start; - } - - /** - * return true if the limit has been exceeded - * @return true if a limit was set and the current time - * exceeds it. - */ - public boolean getLimitExceeded() { - return limit >= 0 && ((now() - start) > limit); - } - - @Override - public String toString() { - StringBuilder builder = new StringBuilder(); - builder.append("Duration"); - if (finish >= start) { - builder.append(" finished at ").append(getInterval()).append(" millis;"); - } else { - if (start > 0) { - builder.append(" started but not yet finished;"); - } else { - builder.append(" unstarted;"); - } - } - if (limit > 0) { - builder.append(" limit: ").append(limit).append(" millis"); - if (getLimitExceeded()) { - builder.append(" - exceeded"); - } - } - return builder.toString(); - } - -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/40ab068e/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/JsonSerDeser.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/JsonSerDeser.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/JsonSerDeser.java deleted file mode 100644 index 7b22e3e..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/JsonSerDeser.java +++ /dev/null @@ -1,249 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.service.utils; - -import org.apache.hadoop.fs.FSDataInputStream; -import org.apache.hadoop.fs.FSDataOutputStream; -import org.apache.hadoop.fs.FileStatus; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.io.IOUtils; -import org.codehaus.jackson.JsonGenerationException; -import org.codehaus.jackson.JsonParseException; -import org.codehaus.jackson.map.DeserializationConfig; -import org.codehaus.jackson.map.JsonMappingException; -import org.codehaus.jackson.map.ObjectMapper; -import org.codehaus.jackson.map.PropertyNamingStrategy; -import org.codehaus.jackson.map.SerializationConfig; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.EOFException; -import java.io.File; -import java.io.FileNotFoundException; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; - -/** - * Support for marshalling objects to and from JSON. - * This class is NOT thread safe; it constructs an object mapper - * as an instance field. - * @param - */ -public class JsonSerDeser { - - private static final Logger log = LoggerFactory.getLogger(JsonSerDeser.class); - private static final String UTF_8 = "UTF-8"; - - private final Class classType; - private final ObjectMapper mapper; - - /** - * Create an instance bound to a specific type - * @param classType class type - */ - public JsonSerDeser(Class classType) { - this.classType = classType; - this.mapper = new ObjectMapper(); - mapper.configure(DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES, false); - } - - public JsonSerDeser(Class classType, PropertyNamingStrategy namingStrategy) { - this(classType); - mapper.setPropertyNamingStrategy(namingStrategy); - } - - /** - * Convert from JSON - * @param json input - * @return the parsed JSON - * @throws IOException IO - * @throws JsonMappingException failure to map from the JSON to this class - */ - public T fromJson(String json) - throws IOException, JsonParseException, JsonMappingException { - try { - return mapper.readValue(json, classType); - } catch (IOException e) { - log.error("Exception while parsing json : " + e + "\n" + json, e); - throw e; - } - } - - /** - * Convert from a JSON file - * @param jsonFile input file - * @return the parsed JSON - * @throws IOException IO problems - * @throws JsonMappingException failure to map from the JSON to this class - */ - public T fromFile(File jsonFile) - throws IOException, JsonParseException, JsonMappingException { - File absoluteFile = jsonFile.getAbsoluteFile(); - try { - return mapper.readValue(absoluteFile, classType); - } catch (IOException e) { - log.error("Exception while parsing json file {}", absoluteFile, e); - throw e; - } - } - - /** - * Convert from a JSON file - * @param resource input file - * @return the parsed JSON - * @throws IOException IO problems - * @throws JsonMappingException failure to map from the JSON to this class - */ - public T fromResource(String resource) - throws IOException, JsonParseException, JsonMappingException { - try(InputStream resStream = this.getClass().getResourceAsStream(resource)) { - if (resStream == null) { - throw new FileNotFoundException(resource); - } - return (T) (mapper.readValue(resStream, classType)); - } catch (IOException e) { - log.error("Exception while parsing json resource {}", resource, e); - throw e; - } - } - - /** - * Convert from an input stream, closing the stream afterwards. - * @param stream - * @return the parsed JSON - * @throws IOException IO problems - */ - public T fromStream(InputStream stream) throws IOException { - try { - return (T) (mapper.readValue(stream, classType)); - } catch (IOException e) { - log.error("Exception while parsing json input stream", e); - throw e; - } finally { - IOUtils.closeStream(stream); - } - } - - /** - * clone by converting to JSON and back again. - * This is much less efficient than any Java clone process. - * @param instance instance to duplicate - * @return a new instance - * @throws IOException problems. - */ - public T fromInstance(T instance) throws IOException { - return fromJson(toJson(instance)); - } - - /** - * Deserialize from a byte array - * @param b - * @return the deserialized value - * @throws IOException parse problems - */ - public T fromBytes(byte[] b) throws IOException { - String json = new String(b, 0, b.length, UTF_8); - return fromJson(json); - } - - /** - * Load from a Hadoop filesystem - * @param fs filesystem - * @param path path - * @return a loaded CD - * @throws IOException IO problems - * @throws JsonParseException parse problems - * @throws JsonMappingException O/J mapping problems - */ - public T load(FileSystem fs, Path path) - throws IOException, JsonParseException, JsonMappingException { - FileStatus status = fs.getFileStatus(path); - long len = status.getLen(); - byte[] b = new byte[(int) len]; - FSDataInputStream dataInputStream = fs.open(path); - int count = dataInputStream.read(b); - if (count != len) { - throw new EOFException("Read of " + path +" finished prematurely"); - } - return fromBytes(b); - } - - - /** - * Save to a hadoop filesystem - * @param fs filesystem - * @param path path - * @param instance instance to save - * @param overwrite should any existing file be overwritten - * @throws IOException IO exception - */ - public void save(FileSystem fs, Path path, T instance, - boolean overwrite) throws - IOException { - FSDataOutputStream dataOutputStream = fs.create(path, overwrite); - writeJsonAsBytes(instance, dataOutputStream); - } - - /** - * Save an instance to a file - * @param instance instance to save - * @param file file - * @throws IOException - */ - public void save(T instance, File file) throws - IOException { - writeJsonAsBytes(instance, new FileOutputStream(file.getAbsoluteFile())); - } - - /** - * Write the json as bytes -then close the file - * @param dataOutputStream an outout stream that will always be closed - * @throws IOException on any failure - */ - private void writeJsonAsBytes(T instance, - OutputStream dataOutputStream) throws IOException { - try { - String json = toJson(instance); - byte[] b = json.getBytes(UTF_8); - dataOutputStream.write(b); - dataOutputStream.flush(); - dataOutputStream.close(); - } finally { - IOUtils.closeStream(dataOutputStream); - } - } - - /** - * Convert an object to a JSON string - * @param instance instance to convert - * @return a JSON string description - * @throws JsonParseException parse problems - * @throws JsonMappingException O/J mapping problems - */ - public String toJson(T instance) throws IOException, - JsonGenerationException, - JsonMappingException { - mapper.configure(SerializationConfig.Feature.INDENT_OUTPUT, true); - return mapper.writeValueAsString(instance); - } - -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/40ab068e/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/KerberosDiags.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/KerberosDiags.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/KerberosDiags.java deleted file mode 100644 index c0712c3..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/KerberosDiags.java +++ /dev/null @@ -1,680 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.service.utils; - -import com.google.common.annotations.VisibleForTesting; -import org.apache.commons.io.IOUtils; -import org.apache.commons.lang.StringUtils; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.io.Text; -import org.apache.hadoop.security.Credentials; -import org.apache.hadoop.security.SaslPropertiesResolver; -import org.apache.hadoop.security.SecurityUtil; -import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.security.token.Token; -import org.apache.hadoop.security.token.TokenIdentifier; -import org.apache.hadoop.util.ExitUtil; -import org.apache.hadoop.util.Shell; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import javax.crypto.Cipher; -import java.io.Closeable; -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.io.PrintStream; -import java.lang.reflect.InvocationTargetException; -import java.net.InetAddress; -import java.security.NoSuchAlgorithmException; -import java.util.Collection; -import java.util.Date; -import java.util.List; -import java.util.regex.Pattern; - -import static org.apache.hadoop.security.UserGroupInformation.*; -import static org.apache.hadoop.security.authentication.util.KerberosUtil.*; -import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.*; - -/** - * Kerberos diagnostics - * At some point this may move to hadoop core, so please keep use of slider - * methods and classes to ~0. - * - * This operation expands some of the diagnostic output of the security code, - * but not all. For completeness - * - * Set the environment variable {@code HADOOP_JAAS_DEBUG=true} - * Set the log level for {@code org.apache.hadoop.security=DEBUG} - */ -public class KerberosDiags implements Closeable { - - private static final Logger LOG = LoggerFactory.getLogger(KerberosDiags.class); - public static final String KRB5_CCNAME = "KRB5CCNAME"; - public static final String JAVA_SECURITY_KRB5_CONF - = "java.security.krb5.conf"; - public static final String JAVA_SECURITY_KRB5_REALM - = "java.security.krb5.realm"; - public static final String SUN_SECURITY_KRB5_DEBUG - = "sun.security.krb5.debug"; - public static final String SUN_SECURITY_SPNEGO_DEBUG - = "sun.security.spnego.debug"; - public static final String SUN_SECURITY_JAAS_FILE - = "java.security.auth.login.config"; - public static final String KERBEROS_KINIT_COMMAND - = "hadoop.kerberos.kinit.command"; - public static final String HADOOP_AUTHENTICATION_IS_DISABLED - = "Hadoop authentication is disabled"; - public static final String UNSET = "(unset)"; - public static final String NO_DEFAULT_REALM = "Cannot locate default realm"; - - private final Configuration conf; - private final List services; - private final PrintStream out; - private final File keytab; - private final String principal; - private final long minKeyLength; - private final boolean securityRequired; - - public static final String CAT_JVM = "JVM"; - public static final String CAT_JAAS = "JAAS"; - public static final String CAT_CONFIG = "CONFIG"; - public static final String CAT_LOGIN = "LOGIN"; - public static final String CAT_KERBEROS = "KERBEROS"; - public static final String CAT_SASL = "SASL"; - - @SuppressWarnings("IOResourceOpenedButNotSafelyClosed") - public KerberosDiags(Configuration conf, - PrintStream out, - List services, - File keytab, - String principal, - long minKeyLength, - boolean securityRequired) { - this.conf = conf; - this.services = services; - this.keytab = keytab; - this.principal = principal; - this.out = out; - this.minKeyLength = minKeyLength; - this.securityRequired = securityRequired; - } - - @Override - public void close() throws IOException { - flush(); - } - - /** - * Execute diagnostics. - *

- * Things it would be nice if UGI made accessible - *

    - *
  1. A way to enable JAAS debug programatically
  2. - *
  3. Access to the TGT
  4. - *
- * @return true if security was enabled and all probes were successful - * @throws KerberosDiagsFailure explicitly raised failure - * @throws Exception other security problems - */ - @SuppressWarnings("deprecation") - public boolean execute() throws Exception { - - title("Kerberos Diagnostics scan at %s", - new Date(System.currentTimeMillis())); - - // check that the machine has a name - println("Hostname: %s", - InetAddress.getLocalHost().getCanonicalHostName()); - - // Fail fast on a JVM without JCE installed. - validateKeyLength(); - - // look at realm - println("JVM Kerberos Login Module = %s", getKrb5LoginModuleName()); - printDefaultRealm(); - - title("System Properties"); - for (String prop : new String[]{ - JAVA_SECURITY_KRB5_CONF, - JAVA_SECURITY_KRB5_REALM, - SUN_SECURITY_KRB5_DEBUG, - SUN_SECURITY_SPNEGO_DEBUG, - SUN_SECURITY_JAAS_FILE - }) { - printSysprop(prop); - } - - title("Environment Variables"); - for (String env : new String[]{ - "HADOOP_JAAS_DEBUG", - KRB5_CCNAME, - "HADOOP_USER_NAME", - "HADOOP_PROXY_USER", - HADOOP_TOKEN_FILE_LOCATION, - }) { - printEnv(env); - } - - for (String prop : new String[]{ - KERBEROS_KINIT_COMMAND, - HADOOP_SECURITY_AUTHENTICATION, - HADOOP_SECURITY_AUTHORIZATION, - "hadoop.kerberos.min.seconds.before.relogin", // not in 2.6 - "hadoop.security.dns.interface", // not in 2.6 - "hadoop.security.dns.nameserver", // not in 2.6 - HADOOP_RPC_PROTECTION, - HADOOP_SECURITY_SASL_PROPS_RESOLVER_CLASS, - HADOOP_SECURITY_CRYPTO_CODEC_CLASSES_KEY_PREFIX, - HADOOP_SECURITY_GROUP_MAPPING, - "hadoop.security.impersonation.provider.class", // not in 2.6 - "dfs.data.transfer.protection" // HDFS - }) { - printConfOpt(prop); - } - - // check that authentication is enabled - if (SecurityUtil.getAuthenticationMethod(conf) - .equals(AuthenticationMethod.SIMPLE)) { - println(HADOOP_AUTHENTICATION_IS_DISABLED); - failif(securityRequired, CAT_CONFIG, HADOOP_AUTHENTICATION_IS_DISABLED); - // no security, skip rest of test - return false; - } - - validateKrb5File(); - validateSasl(HADOOP_SECURITY_SASL_PROPS_RESOLVER_CLASS); - validateSasl("dfs.data.transfer.saslproperties.resolver.class"); - validateKinitExecutable(); - validateJAAS(); - // now the big test: login, then try again - boolean krb5Debug = getAndSet(SUN_SECURITY_KRB5_DEBUG); - boolean spnegoDebug = getAndSet(SUN_SECURITY_SPNEGO_DEBUG); - try { - title("Logging in"); - - if (keytab != null) { - dumpKeytab(keytab); - loginFromKeytab(); - } else { - UserGroupInformation loginUser = getLoginUser(); - dumpUGI("Log in user", loginUser); - validateUGI("Login user", loginUser); - println("Ticket based login: %b", isLoginTicketBased()); - println("Keytab based login: %b", isLoginKeytabBased()); - } - - return true; - } finally { - // restore original system properties - System.setProperty(SUN_SECURITY_KRB5_DEBUG, - Boolean.toString(krb5Debug)); - System.setProperty(SUN_SECURITY_SPNEGO_DEBUG, - Boolean.toString(spnegoDebug)); - } - } - - /** - * Fail fast on a JVM without JCE installed. - * - * This is a recurrent problem - * (that is: it keeps creeping back with JVM updates); - * a fast failure is the best tactic - * @throws NoSuchAlgorithmException - */ - - protected void validateKeyLength() throws NoSuchAlgorithmException { - int aesLen = Cipher.getMaxAllowedKeyLength("AES"); - println("Maximum AES encryption key length %d bits", aesLen); - failif (aesLen < minKeyLength, - CAT_JVM, - "Java Cryptography Extensions are not installed on this JVM." - +" Maximum supported key length %s - minimum required %d", - aesLen, minKeyLength); - } - - /** - * Get the default realm. - *

- * Not having a default realm may be harmless, so is noted at info. - * All other invocation failures are downgraded to warn, as - * follow-on actions may still work. - * failure to invoke the method via introspection is rejected, - * as it's a sign of JVM compatibility issues that may have other - * consequences - */ - protected void printDefaultRealm() { - try { - println("Default Realm = %s", - getDefaultRealm()); - } catch (ClassNotFoundException - | IllegalAccessException - | NoSuchMethodException e) { - - throw new KerberosDiagsFailure(CAT_JVM, e, - "Failed to invoke krb5.Config.getDefaultRealm: %s", e); - } catch (InvocationTargetException e) { - Throwable cause = e.getCause() != null ? e.getCause() : e; - if (cause.toString().contains(NO_DEFAULT_REALM)) { - // exception raised if there is no default realm. This is not - // always a problem, so downgrade to a message. - println("Host has no default realm"); - LOG.debug(cause.toString(), cause); - } else { - println("Kerberos.getDefaultRealm() failed: %s\n%s", - cause, - org.apache.hadoop.util.StringUtils.stringifyException(cause)); - } - } - } - - /** - * Locate the krb5.conf file and dump it. - * No-op on windows. - * @throws IOException - */ - private void validateKrb5File() throws IOException { - if (!Shell.WINDOWS) { - title("Locating Kerberos configuration file"); - String krbPath = "/etc/krb5.conf"; - String jvmKrbPath = System.getProperty(JAVA_SECURITY_KRB5_CONF); - if (jvmKrbPath != null) { - println("Setting kerberos path from sysprop %s: %s", - JAVA_SECURITY_KRB5_CONF, jvmKrbPath); - krbPath = jvmKrbPath; - } - - String krb5name = System.getenv(KRB5_CCNAME); - if (krb5name != null) { - println("Setting kerberos path from environment variable %s: %s", - KRB5_CCNAME, krb5name); - krbPath = krb5name; - if (jvmKrbPath != null) { - println("Warning - both %s and %s were set - %s takes priority", - JAVA_SECURITY_KRB5_CONF, KRB5_CCNAME, KRB5_CCNAME); - } - } - - File krbFile = new File(krbPath); - println("Kerberos configuration file = %s", krbFile); - failif(!krbFile.exists(), - CAT_KERBEROS, - "Kerberos configuration file %s not found", krbFile); - dump(krbFile); - } - } - - /** - * Dump a keytab: list all principals. - * @param keytabFile the keytab file - * @throws IOException IO problems - */ - public void dumpKeytab(File keytabFile) throws IOException { - title("Examining keytab %s", keytabFile); - File kt = keytabFile.getCanonicalFile(); - failif(!kt.exists(), CAT_CONFIG, "Keytab not found: %s", kt); - failif(!kt.isFile(), CAT_CONFIG, "Keytab is not a valid file: %s", kt); - - String[] names = getPrincipalNames(keytabFile.getCanonicalPath(), - Pattern.compile(".*")); - println("keytab entry count: %d", names.length); - for (String name : names) { - println(" %s", name); - } - println("-----"); - } - - /** - * Log in from a keytab, dump the UGI, validate it, then try and log in again. - * That second-time login catches JVM/Hadoop compatibility problems. - * @throws IOException - */ - private void loginFromKeytab() throws IOException { - UserGroupInformation ugi; - String identity; - if (keytab != null) { - File kt = keytab.getCanonicalFile(); - println("Using keytab %s principal %s", kt, principal); - identity = principal; - - failif(StringUtils.isEmpty(principal), CAT_KERBEROS, - "No principal defined"); - ugi = loginUserFromKeytabAndReturnUGI(principal, kt.getPath()); - dumpUGI(identity, ugi); - validateUGI(principal, ugi); - - title("Attempting to log in from keytab again"); - // package scoped -hence the reason why this class must be in the - // hadoop.security package - setShouldRenewImmediatelyForTests(true); - // attempt a new login - ugi.reloginFromKeytab(); - } else { - println("No keytab: logging is as current user"); - } - } - - /** - * Dump a UGI. - * @param title title of this section - * @param ugi UGI to dump - * @throws IOException - */ - private void dumpUGI(String title, UserGroupInformation ugi) - throws IOException { - title(title); - println("UGI instance = %s", ugi); - println("Has kerberos credentials: %b", ugi.hasKerberosCredentials()); - println("Authentication method: %s", ugi.getAuthenticationMethod()); - println("Real Authentication method: %s", - ugi.getRealAuthenticationMethod()); - title("Group names"); - for (String name : ugi.getGroupNames()) { - println(name); - } - title("Credentials"); - Credentials credentials = ugi.getCredentials(); - List secretKeys = credentials.getAllSecretKeys(); - title("Secret keys"); - if (!secretKeys.isEmpty()) { - for (Text secret: secretKeys) { - println("%s", secret); - } - } else { - println("(none)"); - } - - dumpTokens(ugi); - } - - /** - * Validate the UGI: verify it is kerberized. - * @param messagePrefix message in exceptions - * @param user user to validate - */ - private void validateUGI(String messagePrefix, UserGroupInformation user) { - failif(!user.hasKerberosCredentials(), - CAT_LOGIN, "%s: No kerberos credentials for %s", messagePrefix, user); - failif(user.getAuthenticationMethod() == null, - CAT_LOGIN, "%s: Null AuthenticationMethod for %s", messagePrefix, user); - } - - /** - * A cursory look at the {@code kinit} executable. - * If it is an absolute path: it must exist with a size > 0. - * If it is just a command, it has to be on the path. There's no check - * for that -but the PATH is printed out. - */ - private void validateKinitExecutable() { - String kinit = conf.getTrimmed(KERBEROS_KINIT_COMMAND, ""); - if (!kinit.isEmpty()) { - File kinitPath = new File(kinit); - println("%s = %s", KERBEROS_KINIT_COMMAND, kinitPath); - if (kinitPath.isAbsolute()) { - failif(!kinitPath.exists(), CAT_KERBEROS, - "%s executable does not exist: %s", - KERBEROS_KINIT_COMMAND, kinitPath); - failif(!kinitPath.isFile(), CAT_KERBEROS, - "%s path does not refer to a file: %s", - KERBEROS_KINIT_COMMAND, kinitPath); - failif(kinitPath.length() == 0, CAT_KERBEROS, - "%s file is empty: %s", - KERBEROS_KINIT_COMMAND, kinitPath); - } else { - println("Executable %s is relative -must be on the PATH", kinit); - printEnv("PATH"); - } - } - } - - /** - * Try to load the SASL resolver. - * @param saslPropsResolverKey key for the SASL resolver - */ - private void validateSasl(String saslPropsResolverKey) { - title("Resolving SASL property %s", saslPropsResolverKey); - String saslPropsResolver = conf.getTrimmed(saslPropsResolverKey); - try { - Class resolverClass = conf.getClass( - saslPropsResolverKey, - SaslPropertiesResolver.class, SaslPropertiesResolver.class); - println("Resolver is %s", resolverClass); - } catch (RuntimeException e) { - throw new KerberosDiagsFailure(CAT_SASL, e, - "Failed to load %s class %s", - saslPropsResolverKey, saslPropsResolver); - } - } - - /** - * Validate any JAAS entry referenced in the {@link #SUN_SECURITY_JAAS_FILE} - * property. - */ - private void validateJAAS() { - String jaasFilename = System.getProperty(SUN_SECURITY_JAAS_FILE); - if (jaasFilename != null) { - title("JAAS"); - File jaasFile = new File(jaasFilename); - println("JAAS file is defined in %s: %s", - SUN_SECURITY_JAAS_FILE, jaasFile); - failif(!jaasFile.exists(), CAT_JAAS, - "JAAS file does not exist: %s", jaasFile); - failif(!jaasFile.isFile(), CAT_JAAS, - "Specified JAAS file is not a file: %s", jaasFile); - } - } - - /** - * Dump all tokens of a user - * @param user user - */ - public void dumpTokens(UserGroupInformation user) { - Collection> tokens - = user.getCredentials().getAllTokens(); - title("Token Count: %d", tokens.size()); - for (Token token : tokens) { - println("Token %s", token.getKind()); - } - } - - /** - * Set the System property to true; return the old value for caching - * @param sysprop property - * @return the previous value - */ - private boolean getAndSet(String sysprop) { - boolean old = Boolean.getBoolean(sysprop); - System.setProperty(sysprop, "true"); - return old; - } - - /** - * Flush all active output channels, including {@Code System.err}, - * so as to stay in sync with any JRE log messages. - */ - private void flush() { - if (out != null) { - out.flush(); - } else { - System.out.flush(); - } - System.err.flush(); - } - - /** - * Format and print a line of output. - * This goes to any output file, or - * is logged at info. The output is flushed before and after, to - * try and stay in sync with JRE logging. - * @param format format string - * @param args any arguments - */ - @VisibleForTesting - public void println(String format, Object... args) { - println(format(format, args)); - } - - /** - * Print a line of output. This goes to any output file, or - * is logged at info. The output is flushed before and after, to - * try and stay in sync with JRE logging. - * @param msg message string - */ - @VisibleForTesting - private void println(String msg) { - flush(); - if (out != null) { - out.println(msg); - } else { - LOG.info(msg); - } - flush(); - } - - /** - * Print a title entry - * @param format format string - * @param args any arguments - */ - private void title(String format, Object... args) { - println(""); - println(""); - String msg = "== " + format(format, args) + " =="; - println(msg); - println(""); - } - - /** - * Print a system property, or {@link #UNSET} if unset. - * @param property property to print - */ - private void printSysprop(String property) { - println("%s = \"%s\"", property, - System.getProperty(property, UNSET)); - } - - /** - * Print a configuration option, or {@link #UNSET} if unset. - * @param option option to print - */ - private void printConfOpt(String option) { - println("%s = \"%s\"", option, conf.get(option, UNSET)); - } - - /** - * Print an environment variable's name and value; printing - * {@link #UNSET} if it is not set - * @param variable environment variable - */ - private void printEnv(String variable) { - String env = System.getenv(variable); - println("%s = \"%s\"", variable, env != null ? env : UNSET); - } - - /** - * Dump any file to standard out; add a trailing newline - * @param file file to dump - * @throws IOException IO problems - */ - public void dump(File file) throws IOException { - try (FileInputStream in = new FileInputStream(file)) { - for (String line : IOUtils.readLines(in)) { - println("%s", line); - } - } - println(""); - } - - /** - * Format and raise a failure - * - * @param category category for exception - * @param message string formatting message - * @param args any arguments for the formatting - * @throws KerberosDiagsFailure containing the formatted text - */ - private void fail(String category, String message, Object... args) - throws KerberosDiagsFailure { - throw new KerberosDiagsFailure(category, message, args); - } - - /** - * Conditional failure with string formatted arguments - * @param condition failure condition - * @param category category for exception - * @param message string formatting message - * @param args any arguments for the formatting - * @throws KerberosDiagsFailure containing the formatted text - * if the condition was met - */ - private void failif(boolean condition, - String category, - String message, - Object... args) - throws KerberosDiagsFailure { - if (condition) { - fail(category, message, args); - } - } - - /** - * Format a string, treating a call where there are no varags values - * as a string to pass through unformatted. - * @param message message, which is either a format string + args, or - * a general string - * @param args argument array - * @return a string for printing. - */ - public static String format(String message, Object... args) { - if (args.length == 0) { - return message; - } else { - return String.format(message, args); - } - } - - /** - * Diagnostics failures return the exit code 41, "unauthorized". - * - * They have a category, initially for testing: the category can be - * validated without having to match on the entire string. - */ - public static class KerberosDiagsFailure extends ExitUtil.ExitException { - private final String category; - - public KerberosDiagsFailure(String category, String message) { - super(41, category + ": " + message); - this.category = category; - } - - public KerberosDiagsFailure(String category, String message, Object... args) { - this(category, format(message, args)); - } - - public KerberosDiagsFailure(String category, Throwable throwable, - String message, Object... args) { - this(category, message, args); - initCause(throwable); - } - - public String getCategory() { - return category; - } - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/40ab068e/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/PatternValidator.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/PatternValidator.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/PatternValidator.java deleted file mode 100644 index 6efa880..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/PatternValidator.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.service.utils; - -import java.util.regex.Pattern; - -/** - * Utility class to validate strings against a predefined pattern. - */ -public class PatternValidator { - - public static final String E_INVALID_NAME = - "Invalid name %s does not match the pattern pattern %s "; - private final Pattern valid; - private final String pattern; - - public PatternValidator(String pattern) { - this.pattern = pattern; - valid = Pattern.compile(pattern); - } - - /** - * Validate the name -restricting it to the set defined in - * @param name name to validate - * @throws IllegalArgumentException if not a valid name - */ - public void validate(String name) { - if (!matches(name)) { - throw new IllegalArgumentException( - String.format(E_INVALID_NAME, name, pattern)); - } - } - - /** - * Query to see if the pattern matches - * @param name name to validate - * @return true if the string matches the pattern - */ - public boolean matches(String name) { - return valid.matcher(name).matches(); - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/40ab068e/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/PortScanner.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/PortScanner.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/PortScanner.java deleted file mode 100644 index 2dbf37f..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/PortScanner.java +++ /dev/null @@ -1,113 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.yarn.service.utils; - -import org.apache.hadoop.yarn.service.conf.SliderExitCodes; -import org.apache.hadoop.yarn.service.exceptions.BadConfigException; -import org.apache.hadoop.yarn.service.exceptions.SliderException; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; -import java.util.Set; -import java.util.TreeSet; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -/** - * a scanner which can take an input string for a range or scan the lot. - */ -public class PortScanner { - private static Pattern NUMBER_RANGE = Pattern.compile("^(\\d+)\\s*-\\s*(\\d+)$"); - private static Pattern SINGLE_NUMBER = Pattern.compile("^\\d+$"); - - private List remainingPortsToCheck; - - public PortScanner() { - } - - public void setPortRange(String input) throws BadConfigException { - // first split based on commas - Set inputPorts= new TreeSet(); - String[] ranges = input.split(","); - for ( String range : ranges ) { - if (range.trim().isEmpty()) { - continue; - } - Matcher m = SINGLE_NUMBER.matcher(range.trim()); - if (m.find()) { - inputPorts.add(Integer.parseInt(m.group())); - continue; - } - m = NUMBER_RANGE.matcher(range.trim()); - if (m.find()) { - String[] boundaryValues = m.group(0).split("-"); - int start = Integer.parseInt(boundaryValues[0].trim()); - int end = Integer.parseInt(boundaryValues[1].trim()); - if (end < start) { - throw new BadConfigException("End of port range is before start: " - + range + " in input: " + input); - } - for (int i = start; i < end + 1; i++) { - inputPorts.add(i); - } - continue; - } - throw new BadConfigException("Bad port range: " + range + " in input: " - + input); - } - if (inputPorts.size() == 0) { - throw new BadConfigException("No ports found in range: " + input); - } - this.remainingPortsToCheck = new ArrayList(inputPorts); - } - - public List getRemainingPortsToCheck() { - return remainingPortsToCheck; - } - - public int getAvailablePort() throws SliderException, IOException { - if (remainingPortsToCheck != null) { - return getAvailablePortViaPortArray(); - } else { - return SliderUtils.getOpenPort(); - } - } - - private int getAvailablePortViaPortArray() throws SliderException { - boolean found = false; - int availablePort = -1; - Iterator portsToCheck = this.remainingPortsToCheck.iterator(); - while (portsToCheck.hasNext() && !found) { - int portToCheck = portsToCheck.next(); - found = SliderUtils.isPortAvailable(portToCheck); - if (found) { - availablePort = portToCheck; - portsToCheck.remove(); - } - } - - if (availablePort < 0) { - throw new SliderException(SliderExitCodes.EXIT_BAD_CONFIGURATION, - "No available ports found in configured range {}", - remainingPortsToCheck); - } - - return availablePort; - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/40ab068e/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/PublishedConfiguration.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/PublishedConfiguration.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/PublishedConfiguration.java deleted file mode 100644 index 9d00b3c..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/PublishedConfiguration.java +++ /dev/null @@ -1,196 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.service.utils; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.yarn.service.api.records.ConfigFormat; -import org.apache.hadoop.yarn.service.exceptions.BadConfigException; -import org.codehaus.jackson.annotate.JsonIgnoreProperties; -import org.codehaus.jackson.map.ObjectMapper; -import org.codehaus.jackson.map.SerializationConfig; -import org.codehaus.jackson.map.annotate.JsonSerialize; - -import java.io.IOException; -import java.util.Date; -import java.util.HashMap; -import java.util.Map; -import java.util.Properties; - -/** - * JSON-serializable description of a published key-val configuration. - * - * The values themselves are not serialized in the external view; they have - * to be served up by the far end - */ -@JsonIgnoreProperties(ignoreUnknown = true) -@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL) -public class PublishedConfiguration { - - public String description; - public long updated; - - public String updatedTime; - - public Map entries = new HashMap<>(); - - public PublishedConfiguration() { - } - - /** - * build an empty published configuration - * @param description configuration description - */ - public PublishedConfiguration(String description) { - this.description = description; - } - - /** - * Build a configuration from the entries - * @param description configuration description - * @param entries entries to put - */ - public PublishedConfiguration(String description, - Iterable> entries) { - this.description = description; - putValues(entries); - } - - /** - * Build a published configuration, using the keys from keysource, - * but resolving the values from the value source, via Configuration.get() - * @param description configuration description - * @param keysource source of keys - * @param valuesource source of values - */ - public PublishedConfiguration(String description, - Iterable> keysource, - Configuration valuesource) { - this.description = description; - putValues(ConfigHelper.resolveConfiguration(keysource, valuesource)); - } - - - /** - * Is the configuration empty. This means either that it has not - * been given any values, or it is stripped down copy set down over the - * wire. - * @return true if it is empty - */ - public boolean isEmpty() { - return entries.isEmpty(); - } - - - public void setUpdated(long updated) { - this.updated = updated; - this.updatedTime = new Date(updated).toString(); - } - - public long getUpdated() { - return updated; - } - - /** - * Set the values from an iterable (this includes a Hadoop Configuration - * and Java properties object). - * Any existing value set is discarded - * @param entries entries to put - */ - public void putValues(Iterable> entries) { - this.entries = new HashMap(); - for (Map.Entry entry : entries) { - this.entries.put(entry.getKey(), entry.getValue()); - } - - } - - /** - * Convert to Hadoop XML - * @return the configuration as a Hadoop Configuratin - */ - public Configuration asConfiguration() { - Configuration conf = new Configuration(false); - try { - ConfigHelper.addConfigMap(conf, entries, ""); - } catch (BadConfigException e) { - // triggered on a null value; switch to a runtime (and discard the stack) - throw new RuntimeException(e.toString()); - } - return conf; - } - - public String asConfigurationXML() throws IOException { - return ConfigHelper.toXml(asConfiguration()); - } - - /** - * Convert values to properties - * @return a property file - */ - public Properties asProperties() { - Properties props = new Properties(); - props.putAll(entries); - return props; - } - - /** - * Return the values as json string - * @return the JSON representation - * @throws IOException marshalling failure - */ - public String asJson() throws IOException { - ObjectMapper mapper = new ObjectMapper(); - mapper.configure(SerializationConfig.Feature.INDENT_OUTPUT, true); - String json = mapper.writeValueAsString(entries); - return json; - } - - - /** - * This makes a copy without the nested content -so is suitable - * for returning as part of the list of a parent's values - * @return the copy - */ - public PublishedConfiguration shallowCopy() { - PublishedConfiguration that = new PublishedConfiguration(); - that.description = this.description; - that.updated = this.updated; - that.updatedTime = this.updatedTime; - return that; - } - - @Override - public String toString() { - final StringBuilder sb = - new StringBuilder("PublishedConfiguration{"); - sb.append("description='").append(description).append('\''); - sb.append(" entries = ").append(entries.size()); - sb.append('}'); - return sb.toString(); - } - - /** - * Create an outputter for a given format - * @param format format to use - * @return an instance of output - */ - public PublishedConfigurationOutputter createOutputter(ConfigFormat format) { - return PublishedConfigurationOutputter.createOutputter(format, this); - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/40ab068e/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/PublishedConfigurationOutputter.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/PublishedConfigurationOutputter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/PublishedConfigurationOutputter.java deleted file mode 100644 index 88ecf2c..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/PublishedConfigurationOutputter.java +++ /dev/null @@ -1,212 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.service.utils; - -import com.google.common.base.Charsets; -import com.google.common.base.Preconditions; -import org.apache.commons.io.FileUtils; -import org.apache.commons.io.IOUtils; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.yarn.service.api.records.ConfigFormat; -import org.yaml.snakeyaml.DumperOptions; -import org.yaml.snakeyaml.DumperOptions.FlowStyle; -import org.yaml.snakeyaml.Yaml; - -import java.io.File; -import java.io.IOException; -import java.io.OutputStream; -import java.io.StringWriter; -import java.util.Properties; - -/** - * Output a published configuration - */ -public abstract class PublishedConfigurationOutputter { - - private static final String COMMENTS = "Generated by Apache Slider"; - - protected final PublishedConfiguration owner; - - protected PublishedConfigurationOutputter(PublishedConfiguration owner) { - this.owner = owner; - } - - /** - * Save the config to a destination file, in the format of this outputter - * @param dest destination file - * @throws IOException - */ -/* JDK7 - public void save(File dest) throws IOException { - try(FileOutputStream out = new FileOutputStream(dest)) { - save(out); - out.close(); - } - } -*/ - public void save(File dest) throws IOException { - FileUtils.writeStringToFile(dest, asString(), Charsets.UTF_8); - } - - /** - * Save the content. The default saves the asString() value - * to the output stream - * @param out output stream - * @throws IOException - */ - public void save(OutputStream out) throws IOException { - IOUtils.write(asString(), out, Charsets.UTF_8); - } - /** - * Convert to a string - * @return the string form - * @throws IOException - */ - public abstract String asString() throws IOException; - - /** - * Create an outputter for the chosen format - * @param format format enumeration - * @param owner owning config - * @return the outputter - */ - - public static PublishedConfigurationOutputter createOutputter(ConfigFormat format, - PublishedConfiguration owner) { - Preconditions.checkNotNull(owner); - switch (format) { - case XML: - case HADOOP_XML: - return new XmlOutputter(owner); - case PROPERTIES: - return new PropertiesOutputter(owner); - case JSON: - return new JsonOutputter(owner); - case ENV: - return new EnvOutputter(owner); - case TEMPLATE: - return new TemplateOutputter(owner); - case YAML: - return new YamlOutputter(owner); - default: - throw new RuntimeException("Unsupported format :" + format); - } - } - - public static class XmlOutputter extends PublishedConfigurationOutputter { - - - private final Configuration configuration; - - public XmlOutputter(PublishedConfiguration owner) { - super(owner); - configuration = owner.asConfiguration(); - } - - @Override - public void save(OutputStream out) throws IOException { - configuration.writeXml(out); - } - - @Override - public String asString() throws IOException { - return ConfigHelper.toXml(configuration); - } - - public Configuration getConfiguration() { - return configuration; - } - } - - public static class PropertiesOutputter extends PublishedConfigurationOutputter { - - private final Properties properties; - - public PropertiesOutputter(PublishedConfiguration owner) { - super(owner); - properties = owner.asProperties(); - } - - @Override - public void save(OutputStream out) throws IOException { - properties.store(out, COMMENTS); - } - - - public String asString() throws IOException { - StringWriter sw = new StringWriter(); - properties.store(sw, COMMENTS); - return sw.toString(); - } - } - - - public static class JsonOutputter extends PublishedConfigurationOutputter { - - public JsonOutputter(PublishedConfiguration owner) { - super(owner); - } - - @Override - public String asString() throws IOException { - return owner.asJson(); - } - } - - - public static class EnvOutputter extends PublishedConfigurationOutputter { - - public EnvOutputter(PublishedConfiguration owner) { - super(owner); - } - - @Override - public String asString() throws IOException { - if (!owner.entries.containsKey("content")) { - throw new IOException("Configuration has no content field and cannot " + - "be retrieved as type 'env'"); - } - String content = owner.entries.get("content"); - return ConfigUtils.replaceProps(owner.entries, content); - } - } - - public static class TemplateOutputter extends EnvOutputter { - public TemplateOutputter(PublishedConfiguration owner) { - super(owner); - } - } - - public static class YamlOutputter extends PublishedConfigurationOutputter { - - private final Yaml yaml; - - public YamlOutputter(PublishedConfiguration owner) { - super(owner); - DumperOptions options = new DumperOptions(); - options.setDefaultFlowStyle(FlowStyle.BLOCK); - yaml = new Yaml(options); - } - - public String asString() throws IOException { - return yaml.dump(owner.entries); - } - } - -} --------------------------------------------------------------------- To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org For additional commands, e-mail: common-commits-help@hadoop.apache.org