Return-Path: X-Original-To: archive-asf-public-internal@cust-asf2.ponee.io Delivered-To: archive-asf-public-internal@cust-asf2.ponee.io Received: from cust-asf.ponee.io (cust-asf.ponee.io [163.172.22.183]) by cust-asf2.ponee.io (Postfix) with ESMTP id 28EB6200CFC for ; Thu, 14 Sep 2017 01:33:16 +0200 (CEST) Received: by cust-asf.ponee.io (Postfix) id 277221609D1; Wed, 13 Sep 2017 23:33:16 +0000 (UTC) Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by cust-asf.ponee.io (Postfix) with SMTP id C2CE31609CB for ; Thu, 14 Sep 2017 01:33:13 +0200 (CEST) Received: (qmail 67154 invoked by uid 500); 13 Sep 2017 23:32:53 -0000 Mailing-List: contact common-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Delivered-To: mailing list common-commits@hadoop.apache.org Received: (qmail 63670 invoked by uid 99); 13 Sep 2017 23:32:50 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Wed, 13 Sep 2017 23:32:50 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id A53B6F583B; Wed, 13 Sep 2017 23:32:49 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 8bit From: jianhe@apache.org To: common-commits@hadoop.apache.org Date: Wed, 13 Sep 2017 23:33:37 -0000 Message-Id: <1c64e3c4c8f24b5eb075376aaa7c2fe4@git.apache.org> In-Reply-To: References: X-Mailer: ASF-Git Admin Mailer Subject: [50/82] [abbrv] hadoop git commit: YARN-7091. Rename application to service in yarn-native-services. Contributed by Jian He archived-at: Wed, 13 Sep 2017 23:33:16 -0000 http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae16ae9/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/ProviderFactory.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/ProviderFactory.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/ProviderFactory.java deleted file mode 100644 index 83c9961..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/ProviderFactory.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.service.provider; - -import org.apache.hadoop.yarn.service.provider.defaultImpl.DefaultProviderFactory; -import org.apache.hadoop.yarn.service.api.records.Artifact; -import org.apache.hadoop.yarn.service.provider.docker.DockerProviderFactory; -import org.apache.hadoop.yarn.service.provider.tarball.TarballProviderFactory; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Base class for factories. - */ -public abstract class ProviderFactory { - protected static final Logger LOG = - LoggerFactory.getLogger(ProviderFactory.class); - - protected ProviderFactory() {} - - public abstract AbstractClientProvider createClientProvider(); - - public abstract ProviderService createServerProvider(); - - public static synchronized ProviderService getProviderService(Artifact - artifact) { - return createSliderProviderFactory(artifact).createServerProvider(); - } - - public static synchronized AbstractClientProvider getClientProvider(Artifact - artifact) { - return createSliderProviderFactory(artifact).createClientProvider(); - } - - /** - * Create a provider for a specific application - * @param artifact artifact - * @return provider factory - */ - public static synchronized ProviderFactory createSliderProviderFactory( - Artifact artifact) { - if (artifact == null || artifact.getType() == null) { - LOG.debug("Loading service provider type default"); - return DefaultProviderFactory.getInstance(); - } - LOG.debug("Loading service provider type {}", artifact.getType()); - switch (artifact.getType()) { - // TODO add handling for custom types? - // TODO handle application - case DOCKER: - return DockerProviderFactory.getInstance(); - case TARBALL: - return TarballProviderFactory.getInstance(); - default: - throw new IllegalArgumentException(String.format("Resolution error, " + - "%s should not be passed to createSliderProviderFactory", - artifact.getType())); - } - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae16ae9/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/ProviderService.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/ProviderService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/ProviderService.java deleted file mode 100644 index 9ef0176..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/ProviderService.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.service.provider; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.yarn.service.api.records.Application; -import org.apache.hadoop.yarn.service.utils.SliderFileSystem; -import org.apache.hadoop.yarn.service.exceptions.SliderException; -import org.apache.hadoop.yarn.service.containerlaunch.AbstractLauncher; -import org.apache.hadoop.yarn.service.compinstance.ComponentInstance; - -import java.io.IOException; - -public interface ProviderService { - - /** - * Set up the entire container launch context - */ - void buildContainerLaunchContext(AbstractLauncher containerLauncher, - Application application, ComponentInstance instance, - SliderFileSystem sliderFileSystem, Configuration yarnConf) - throws IOException, SliderException; -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae16ae9/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/ProviderUtils.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/ProviderUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/ProviderUtils.java deleted file mode 100644 index a044838..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/ProviderUtils.java +++ /dev/null @@ -1,402 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.service.provider; - -import org.apache.hadoop.fs.FSDataOutputStream; -import org.apache.hadoop.fs.FileStatus; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.fs.permission.FsAction; -import org.apache.hadoop.fs.permission.FsPermission; -import org.apache.hadoop.yarn.api.records.LocalResource; -import org.apache.hadoop.yarn.api.records.LocalResourceType; -import org.apache.hadoop.yarn.service.ServiceContext; -import org.apache.hadoop.yarn.service.api.records.Application; -import org.apache.hadoop.yarn.service.api.records.Component; -import org.apache.hadoop.yarn.service.api.records.ConfigFile; -import org.apache.hadoop.yarn.service.api.records.ConfigFormat; -import org.apache.hadoop.yarn.service.api.records.Configuration; -import org.apache.hadoop.yarn.service.compinstance.ComponentInstance; -import org.apache.hadoop.yarn.service.conf.YarnServiceConstants; -import org.apache.hadoop.yarn.service.conf.YarnServiceConf; -import org.apache.hadoop.yarn.service.containerlaunch.AbstractLauncher; -import org.apache.hadoop.yarn.service.exceptions.BadCommandArgumentsException; -import org.apache.hadoop.yarn.service.exceptions.SliderException; -import org.apache.hadoop.yarn.service.utils.PublishedConfiguration; -import org.apache.hadoop.yarn.service.utils.PublishedConfigurationOutputter; -import org.apache.hadoop.yarn.service.utils.SliderFileSystem; -import org.apache.hadoop.yarn.service.utils.SliderUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; -import java.io.FileNotFoundException; -import java.io.IOException; -import java.io.OutputStream; -import java.util.HashMap; -import java.util.Map; -import java.util.concurrent.ExecutionException; -import java.util.regex.Pattern; - -import static org.apache.hadoop.yarn.service.api.constants.ServiceApiConstants.*; - -/** - * This is a factoring out of methods handy for providers. It's bonded to a log - * at construction time. - */ -public class ProviderUtils implements YarnServiceConstants { - - protected static final Logger log = - LoggerFactory.getLogger(ProviderUtils.class); - - - /** - * Add oneself to the classpath. This does not work - * on minicluster test runs where the JAR is not built up. - * @param providerResources map of provider resources to add these entries to - * @param providerClass provider to add - * @param jarName name of the jar to use - * @param sliderFileSystem target filesystem - * @param tempPath path in the cluster FS for temp files - * @param libdir relative directory to place resources - * @param miniClusterTestRun true if minicluster is being used - * @return true if the class was found in a JAR - * - * @throws FileNotFoundException if the JAR was not found and this is NOT - * a mini cluster test run - * @throws IOException IO problems - * @throws SliderException any Slider problem - */ - public static boolean addProviderJar( - Map providerResources, - Class providerClass, - String jarName, - SliderFileSystem sliderFileSystem, - Path tempPath, - String libdir, - boolean miniClusterTestRun) throws - IOException, - SliderException { - try { - SliderUtils.putJar(providerResources, - sliderFileSystem, - providerClass, - tempPath, - libdir, - jarName); - return true; - } catch (FileNotFoundException e) { - if (miniClusterTestRun) { - return false; - } else { - throw e; - } - } - } - - /** - * Loads all dependency jars from the default path. - * @param providerResources map of provider resources to add these entries to - * @param sliderFileSystem target filesystem - * @param tempPath path in the cluster FS for temp files - * @param libDir relative directory to place resources - * @param libLocalSrcDir explicitly supplied local libs dir - * @throws IOException trouble copying to HDFS - * @throws SliderException trouble copying to HDFS - */ - public static void addAllDependencyJars( - Map providerResources, - SliderFileSystem sliderFileSystem, - Path tempPath, - String libDir, - String libLocalSrcDir) - throws IOException, SliderException { - if (SliderUtils.isSet(libLocalSrcDir)) { - File file = new File(libLocalSrcDir); - if (!file.exists() || !file.isDirectory()) { - throw new BadCommandArgumentsException( - "Supplied lib src dir %s is not valid", libLocalSrcDir); - } - } - SliderUtils.putAllJars(providerResources, sliderFileSystem, tempPath, - libDir, libLocalSrcDir); - } - - public static String substituteStrWithTokens(String content, - Map tokensForSubstitution) { - for (Map.Entry token : tokensForSubstitution.entrySet()) { - content = - content.replaceAll(Pattern.quote(token.getKey()), token.getValue()); - } - return content; - } - - // configs will be substituted by corresponding env in tokenMap - public static void substituteMapWithTokens(Map configs, - Map tokenMap) { - for (Map.Entry entry : configs.entrySet()) { - String value = entry.getValue(); - if (tokenMap != null) { - for (Map.Entry token : tokenMap.entrySet()) { - value = - value.replaceAll(Pattern.quote(token.getKey()), token.getValue()); - } - } - entry.setValue(value); - } - } - - /** - * Localize the service keytabs for the application. - * @param launcher container launcher - * @param fileSystem file system - * @throws IOException trouble uploading to HDFS - */ - public void localizeServiceKeytabs(AbstractLauncher launcher, - SliderFileSystem fileSystem, Application application) throws IOException { - - Configuration conf = application.getConfiguration(); - String keytabPathOnHost = - conf.getProperty(YarnServiceConf.KEY_AM_KEYTAB_LOCAL_PATH); - if (SliderUtils.isUnset(keytabPathOnHost)) { - String amKeytabName = - conf.getProperty(YarnServiceConf.KEY_AM_LOGIN_KEYTAB_NAME); - String keytabDir = - conf.getProperty(YarnServiceConf.KEY_HDFS_KEYTAB_DIR); - // we need to localize the keytab files in the directory - Path keytabDirPath = fileSystem.buildKeytabPath(keytabDir, null, - application.getName()); - boolean serviceKeytabsDeployed = false; - if (fileSystem.getFileSystem().exists(keytabDirPath)) { - FileStatus[] keytabs = fileSystem.getFileSystem().listStatus( - keytabDirPath); - LocalResource keytabRes; - for (FileStatus keytab : keytabs) { - if (!amKeytabName.equals(keytab.getPath().getName()) - && keytab.getPath().getName().endsWith(".keytab")) { - serviceKeytabsDeployed = true; - log.info("Localizing keytab {}", keytab.getPath().getName()); - keytabRes = fileSystem.createAmResource(keytab.getPath(), - LocalResourceType.FILE); - launcher.addLocalResource(KEYTAB_DIR + "/" + - keytab.getPath().getName(), - keytabRes); - } - } - } - if (!serviceKeytabsDeployed) { - log.warn("No service keytabs for the application have been localized. " - + "If the application requires keytabs for secure operation, " - + "please ensure that the required keytabs have been uploaded " - + "to the folder {}", keytabDirPath); - } - } - } - - // 1. Create all config files for a component on hdfs for localization - // 2. Add the config file to localResource - public static synchronized void createConfigFileAndAddLocalResource( - AbstractLauncher launcher, SliderFileSystem fs, Component component, - Map tokensForSubstitution, ComponentInstance instance, - ServiceContext context) throws IOException { - Path compDir = - new Path(new Path(fs.getAppDir(), "components"), component.getName()); - Path compInstanceDir = - new Path(compDir, instance.getCompInstanceName()); - if (!fs.getFileSystem().exists(compInstanceDir)) { - log.info(instance.getCompInstanceId() + ": Creating dir on hdfs: " + compInstanceDir); - fs.getFileSystem().mkdirs(compInstanceDir, - new FsPermission(FsAction.ALL, FsAction.NONE, FsAction.NONE)); - instance.setCompInstanceDir(compInstanceDir); - } else { - log.info("Component instance conf dir already exists: " + compInstanceDir); - } - - if (log.isDebugEnabled()) { - log.debug("Tokens substitution for component instance: " + instance - .getCompInstanceName() + System.lineSeparator() - + tokensForSubstitution); - } - - for (ConfigFile originalFile : component.getConfiguration().getFiles()) { - ConfigFile configFile = originalFile.copy(); - String fileName = new Path(configFile.getDestFile()).getName(); - - // substitute file name - for (Map.Entry token : tokensForSubstitution.entrySet()) { - configFile.setDestFile(configFile.getDestFile() - .replaceAll(Pattern.quote(token.getKey()), token.getValue())); - } - - Path remoteFile = new Path(compInstanceDir, fileName); - if (!fs.getFileSystem().exists(remoteFile)) { - log.info("Saving config file on hdfs for component " + instance - .getCompInstanceName() + ": " + configFile); - - if (configFile.getSrcFile() != null) { - // Load config file template - switch (configFile.getType()) { - case HADOOP_XML: - // Hadoop_xml_template - resolveHadoopXmlTemplateAndSaveOnHdfs(fs.getFileSystem(), - tokensForSubstitution, configFile, remoteFile, context); - break; - case TEMPLATE: - // plain-template - resolvePlainTemplateAndSaveOnHdfs(fs.getFileSystem(), - tokensForSubstitution, configFile, remoteFile, context); - break; - default: - log.info("Not supporting loading src_file for " + configFile); - break; - } - } else { - // non-template - resolveNonTemplateConfigsAndSaveOnHdfs(fs, tokensForSubstitution, - instance, configFile, fileName, remoteFile); - } - } - - // Add resource for localization - LocalResource configResource = - fs.createAmResource(remoteFile, LocalResourceType.FILE); - File destFile = new File(configFile.getDestFile()); - String symlink = APP_CONF_DIR + "/" + fileName; - if (destFile.isAbsolute()) { - launcher.addLocalResource(symlink, configResource, - configFile.getDestFile()); - log.info("Add config file for localization: " + symlink + " -> " - + configResource.getResource().getFile() + ", dest mount path: " - + configFile.getDestFile()); - } else { - launcher.addLocalResource(symlink, configResource); - log.info("Add config file for localization: " + symlink + " -> " - + configResource.getResource().getFile()); - } - } - } - - private static void resolveNonTemplateConfigsAndSaveOnHdfs(SliderFileSystem fs, - Map tokensForSubstitution, ComponentInstance instance, - ConfigFile configFile, String fileName, Path remoteFile) - throws IOException { - // substitute non-template configs - substituteMapWithTokens(configFile.getProps(), tokensForSubstitution); - - // write configs onto hdfs - PublishedConfiguration publishedConfiguration = - new PublishedConfiguration(fileName, - configFile.getProps().entrySet()); - if (!fs.getFileSystem().exists(remoteFile)) { - PublishedConfigurationOutputter configurationOutputter = - PublishedConfigurationOutputter.createOutputter( - ConfigFormat.resolve(configFile.getType().toString()), - publishedConfiguration); - try (FSDataOutputStream os = fs.getFileSystem().create(remoteFile)) { - configurationOutputter.save(os); - os.flush(); - } - } else { - log.info("Component instance = " + instance.getCompInstanceName() - + ", config file already exists: " + remoteFile); - } - } - - // 1. substitute config template - only handle hadoop_xml format - // 2. save on hdfs - @SuppressWarnings("unchecked") - private static void resolveHadoopXmlTemplateAndSaveOnHdfs(FileSystem fs, - Map tokensForSubstitution, ConfigFile configFile, - Path remoteFile, ServiceContext context) throws IOException { - Map conf; - try { - conf = (Map) context.configCache.get(configFile); - } catch (ExecutionException e) { - log.info("Failed to load config file: " + configFile, e); - return; - } - // make a copy for substitution - org.apache.hadoop.conf.Configuration confCopy = - new org.apache.hadoop.conf.Configuration(false); - for (Map.Entry entry : conf.entrySet()) { - confCopy.set(entry.getKey(), entry.getValue()); - } - // substitute properties - for (Map.Entry entry : configFile.getProps().entrySet()) { - confCopy.set(entry.getKey(), entry.getValue()); - } - // substitute env variables - for (Map.Entry entry : confCopy) { - String val = entry.getValue(); - if (val != null) { - for (Map.Entry token : tokensForSubstitution - .entrySet()) { - val = val.replaceAll(Pattern.quote(token.getKey()), token.getValue()); - confCopy.set(entry.getKey(), val); - } - } - } - // save on hdfs - try (OutputStream output = fs.create(remoteFile)) { - confCopy.writeXml(output); - log.info("Reading config from: " + configFile.getSrcFile() - + ", writing to: " + remoteFile); - } - } - - // 1) read the template as a string - // 2) do token substitution - // 3) save on hdfs - private static void resolvePlainTemplateAndSaveOnHdfs(FileSystem fs, - Map tokensForSubstitution, ConfigFile configFile, - Path remoteFile, ServiceContext context) { - String content; - try { - content = (String) context.configCache.get(configFile); - } catch (ExecutionException e) { - log.info("Failed to load config file: " + configFile, e); - return; - } - // substitute tokens - content = substituteStrWithTokens(content, tokensForSubstitution); - - try (OutputStream output = fs.create(remoteFile)) { - org.apache.commons.io.IOUtils.write(content, output); - } catch (IOException e) { - log.info("Failed to create " + remoteFile); - } - } - - /** - * Get initial component token map to be substituted into config values. - * @return tokens to replace - */ - public static Map initCompTokensForSubstitute( - ComponentInstance instance) { - Map tokens = new HashMap<>(); - tokens.put(COMPONENT_NAME, instance.getCompSpec().getName()); - tokens - .put(COMPONENT_NAME_LC, instance.getCompSpec().getName().toLowerCase()); - tokens.put(COMPONENT_INSTANCE_NAME, instance.getCompInstanceName()); - tokens.put(CONTAINER_ID, instance.getContainer().getId().toString()); - tokens.put(COMPONENT_ID, - String.valueOf(instance.getCompInstanceId().getId())); - return tokens; - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae16ae9/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/defaultImpl/DefaultClientProvider.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/defaultImpl/DefaultClientProvider.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/defaultImpl/DefaultClientProvider.java deleted file mode 100644 index 0920a9c..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/defaultImpl/DefaultClientProvider.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.yarn.service.provider.defaultImpl; - -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.yarn.service.provider.AbstractClientProvider; -import org.apache.hadoop.yarn.service.api.records.Artifact; -import org.apache.hadoop.yarn.service.api.records.ConfigFile; - -import java.io.IOException; -import java.nio.file.Paths; - -public class DefaultClientProvider extends AbstractClientProvider { - - public DefaultClientProvider() { - } - - @Override - public void validateArtifact(Artifact artifact, FileSystem fileSystem) { - } - - @Override - protected void validateConfigFile(ConfigFile configFile, FileSystem - fileSystem) throws IOException { - // validate dest_file is not absolute - if (Paths.get(configFile.getDestFile()).isAbsolute()) { - throw new IllegalArgumentException( - "Dest_file must not be absolute path: " + configFile.getDestFile()); - } - } -} \ No newline at end of file http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae16ae9/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/defaultImpl/DefaultProviderFactory.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/defaultImpl/DefaultProviderFactory.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/defaultImpl/DefaultProviderFactory.java deleted file mode 100644 index 868bba8..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/defaultImpl/DefaultProviderFactory.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.yarn.service.provider.defaultImpl; - -import org.apache.hadoop.yarn.service.provider.AbstractClientProvider; -import org.apache.hadoop.yarn.service.provider.ProviderService; -import org.apache.hadoop.yarn.service.provider.ProviderFactory; - -public final class DefaultProviderFactory extends ProviderFactory { - private static final ProviderFactory FACTORY = new - DefaultProviderFactory(); - - private DefaultProviderFactory() {} - - private static class Client { - static final AbstractClientProvider PROVIDER = new DefaultClientProvider(); - } - - private static class Server { - static final ProviderService PROVIDER = new DefaultProviderService(); - } - - @Override - public AbstractClientProvider createClientProvider() { - return Client.PROVIDER; - } - - @Override - public ProviderService createServerProvider() { - return Server.PROVIDER; - } - - public static ProviderFactory getInstance() { - return FACTORY; - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae16ae9/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/defaultImpl/DefaultProviderService.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/defaultImpl/DefaultProviderService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/defaultImpl/DefaultProviderService.java deleted file mode 100644 index 33f8278..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/defaultImpl/DefaultProviderService.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.yarn.service.provider.defaultImpl; - -import org.apache.hadoop.yarn.service.compinstance.ComponentInstance; -import org.apache.hadoop.yarn.service.provider.AbstractProviderService; -import org.apache.hadoop.yarn.service.api.records.Application; -import org.apache.hadoop.yarn.service.utils.SliderFileSystem; -import org.apache.hadoop.yarn.service.containerlaunch.AbstractLauncher; - -import java.io.IOException; - -public class DefaultProviderService extends AbstractProviderService { - - @Override - public void processArtifact(AbstractLauncher launcher, - ComponentInstance compInstance, SliderFileSystem fileSystem, - Application application) - throws IOException { - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae16ae9/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/docker/DockerClientProvider.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/docker/DockerClientProvider.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/docker/DockerClientProvider.java deleted file mode 100644 index d4a2254..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/docker/DockerClientProvider.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.yarn.service.provider.docker; - -import org.apache.commons.lang.StringUtils; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.yarn.service.api.records.Artifact; -import org.apache.hadoop.yarn.service.api.records.ConfigFile; -import org.apache.hadoop.yarn.service.conf.YarnServiceConstants; -import org.apache.hadoop.yarn.service.provider.AbstractClientProvider; -import org.apache.hadoop.yarn.service.exceptions.RestApiErrorMessages; - -import java.io.IOException; - -public class DockerClientProvider extends AbstractClientProvider - implements YarnServiceConstants { - - public DockerClientProvider() { - super(); - } - - @Override - public void validateArtifact(Artifact artifact, FileSystem fileSystem) { - if (artifact == null) { - throw new IllegalArgumentException( - RestApiErrorMessages.ERROR_ARTIFACT_INVALID); - } - if (StringUtils.isEmpty(artifact.getId())) { - throw new IllegalArgumentException( - RestApiErrorMessages.ERROR_ARTIFACT_ID_INVALID); - } - } - - @Override - protected void validateConfigFile(ConfigFile configFile, FileSystem - fileSystem) throws IOException { - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae16ae9/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/docker/DockerKeys.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/docker/DockerKeys.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/docker/DockerKeys.java deleted file mode 100644 index f30c002..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/docker/DockerKeys.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.yarn.service.provider.docker; - -public interface DockerKeys { - String PROVIDER_DOCKER = "docker"; - String DOCKER_PREFIX = "docker."; - String DOCKER_IMAGE = DOCKER_PREFIX + "image"; - String DOCKER_NETWORK = DOCKER_PREFIX + "network"; - String DOCKER_USE_PRIVILEGED = DOCKER_PREFIX + "usePrivileged"; - String DOCKER_START_COMMAND = DOCKER_PREFIX + "startCommand"; - - String DEFAULT_DOCKER_NETWORK = "bridge"; - Boolean DEFAULT_DOCKER_USE_PRIVILEGED = false; -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae16ae9/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/docker/DockerProviderFactory.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/docker/DockerProviderFactory.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/docker/DockerProviderFactory.java deleted file mode 100644 index 57330ab..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/docker/DockerProviderFactory.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.yarn.service.provider.docker; - -import org.apache.hadoop.yarn.service.provider.AbstractClientProvider; -import org.apache.hadoop.yarn.service.provider.ProviderService; -import org.apache.hadoop.yarn.service.provider.ProviderFactory; - -public class DockerProviderFactory extends ProviderFactory { - private static final ProviderFactory FACTORY = new - DockerProviderFactory(); - - private DockerProviderFactory() { - } - - private static class Client { - static final AbstractClientProvider PROVIDER = new DockerClientProvider(); - } - - private static class Server { - static final ProviderService PROVIDER = new DockerProviderService(); - } - - @Override - public AbstractClientProvider createClientProvider() { - return Client.PROVIDER; - } - - @Override - public ProviderService createServerProvider() { - return Server.PROVIDER; - } - - public static ProviderFactory getInstance() { - return FACTORY; - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae16ae9/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/docker/DockerProviderService.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/docker/DockerProviderService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/docker/DockerProviderService.java deleted file mode 100644 index 236ddd9..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/docker/DockerProviderService.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.yarn.service.provider.docker; - -import org.apache.hadoop.registry.client.api.RegistryConstants; -import org.apache.hadoop.registry.client.binding.RegistryUtils; -import org.apache.hadoop.yarn.service.compinstance.ComponentInstance; -import org.apache.hadoop.yarn.service.provider.AbstractProviderService; -import org.apache.hadoop.yarn.service.api.records.Application; -import org.apache.hadoop.yarn.service.utils.SliderFileSystem; -import org.apache.hadoop.yarn.service.containerlaunch.AbstractLauncher; - -import java.io.IOException; -import java.text.MessageFormat; - -public class DockerProviderService extends AbstractProviderService - implements DockerKeys { - - public void processArtifact(AbstractLauncher launcher, - ComponentInstance compInstance, SliderFileSystem fileSystem, - Application application) throws IOException{ - launcher.setYarnDockerMode(true); - launcher.setDockerImage(compInstance.getCompSpec().getArtifact().getId()); - launcher.setDockerNetwork(compInstance.getCompSpec().getConfiguration() - .getProperty(DOCKER_NETWORK, DEFAULT_DOCKER_NETWORK)); - String domain = compInstance.getComponent().getScheduler().getConfig() - .get(RegistryConstants.KEY_DNS_DOMAIN); - String hostname; - if (domain == null || domain.isEmpty()) { - hostname = MessageFormat - .format("{0}.{1}.{2}", compInstance.getCompInstanceName(), - application.getName(), RegistryUtils.currentUser()); - } else { - hostname = MessageFormat - .format("{0}.{1}.{2}.{3}", compInstance.getCompInstanceName(), - application.getName(), RegistryUtils.currentUser(), domain); - } - launcher.setDockerHostname(hostname); - launcher.setRunPrivilegedContainer( - compInstance.getCompSpec().getRunPrivilegedContainer()); - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae16ae9/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/tarball/TarballClientProvider.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/tarball/TarballClientProvider.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/tarball/TarballClientProvider.java deleted file mode 100644 index 01f7b20..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/tarball/TarballClientProvider.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.yarn.service.provider.tarball; - -import org.apache.commons.lang.StringUtils; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.yarn.service.api.records.Artifact; -import org.apache.hadoop.yarn.service.api.records.ConfigFile; -import org.apache.hadoop.yarn.service.conf.YarnServiceConstants; -import org.apache.hadoop.yarn.service.provider.AbstractClientProvider; -import org.apache.hadoop.yarn.service.exceptions.RestApiErrorMessages; - -import java.io.IOException; -import java.nio.file.Paths; - -public class TarballClientProvider extends AbstractClientProvider - implements YarnServiceConstants { - - public TarballClientProvider() { - } - - @Override - public void validateArtifact(Artifact artifact, FileSystem fs) - throws IOException { - if (artifact == null) { - throw new IllegalArgumentException( - RestApiErrorMessages.ERROR_ARTIFACT_INVALID); - } - if (StringUtils.isEmpty(artifact.getId())) { - throw new IllegalArgumentException( - RestApiErrorMessages.ERROR_ARTIFACT_ID_INVALID); - } - Path p = new Path(artifact.getId()); - if (!fs.exists(p)) { - throw new IllegalArgumentException( "Artifact tarball does not exist " - + artifact.getId()); - } - } - - @Override - protected void validateConfigFile(ConfigFile configFile, FileSystem - fileSystem) throws IOException { - // validate dest_file is not absolute - if (Paths.get(configFile.getDestFile()).isAbsolute()) { - throw new IllegalArgumentException( - "Dest_file must not be absolute path: " + configFile.getDestFile()); - } - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae16ae9/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/tarball/TarballProviderFactory.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/tarball/TarballProviderFactory.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/tarball/TarballProviderFactory.java deleted file mode 100644 index 9d81f66..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/tarball/TarballProviderFactory.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.yarn.service.provider.tarball; - -import org.apache.hadoop.yarn.service.provider.AbstractClientProvider; -import org.apache.hadoop.yarn.service.provider.ProviderService; -import org.apache.hadoop.yarn.service.provider.ProviderFactory; - -public class TarballProviderFactory extends ProviderFactory { - private static final ProviderFactory FACTORY = new - TarballProviderFactory(); - - private TarballProviderFactory() { - } - - private static class Client { - static final AbstractClientProvider PROVIDER = new TarballClientProvider(); - } - - private static class Server { - static final ProviderService PROVIDER = new TarballProviderService(); - } - - @Override - public AbstractClientProvider createClientProvider() { - return Client.PROVIDER; - } - - @Override - public ProviderService createServerProvider() { - return Server.PROVIDER; - } - - public static ProviderFactory getInstance() { - return FACTORY; - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae16ae9/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/tarball/TarballProviderService.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/tarball/TarballProviderService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/tarball/TarballProviderService.java deleted file mode 100644 index 2403255..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/tarball/TarballProviderService.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.yarn.service.provider.tarball; - -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.yarn.api.records.LocalResource; -import org.apache.hadoop.yarn.api.records.LocalResourceType; -import org.apache.hadoop.yarn.service.compinstance.ComponentInstance; -import org.apache.hadoop.yarn.service.provider.AbstractProviderService; -import org.apache.hadoop.yarn.service.api.records.Application; -import org.apache.hadoop.yarn.service.utils.SliderFileSystem; -import org.apache.hadoop.yarn.service.containerlaunch.AbstractLauncher; - -import java.io.IOException; - -public class TarballProviderService extends AbstractProviderService { - - @Override - public void processArtifact(AbstractLauncher launcher, - ComponentInstance instance, SliderFileSystem fileSystem, - Application application) - throws IOException { - Path artifact = new Path(instance.getCompSpec().getArtifact().getId()); - if (!fileSystem.isFile(artifact)) { - throw new IOException( - "Package doesn't exist as a resource: " + artifact.toString()); - } - log.info("Adding resource {}", artifact.toString()); - LocalResourceType type = LocalResourceType.ARCHIVE; - LocalResource packageResource = fileSystem.createAmResource(artifact, type); - launcher.addLocalResource(APP_LIB_DIR, packageResource); - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae16ae9/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/registry/CustomRegistryConstants.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/registry/CustomRegistryConstants.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/registry/CustomRegistryConstants.java deleted file mode 100644 index 56634f6..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/registry/CustomRegistryConstants.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.service.registry; - -/** - * These are constants unique to the Slider AM - */ -public class CustomRegistryConstants { - - public static final String MANAGEMENT_REST_API = - "classpath:org.apache.slider.management"; - - public static final String REGISTRY_REST_API = - "classpath:org.apache.slider.registry"; - - public static final String PUBLISHER_REST_API = - "classpath:org.apache.slider.publisher"; - - public static final String PUBLISHER_CONFIGURATIONS_API = - "classpath:org.apache.slider.publisher.configurations"; - - public static final String PUBLISHER_EXPORTS_API = - "classpath:org.apache.slider.publisher.exports"; - - public static final String PUBLISHER_DOCUMENTS_API = - "classpath:org.apache.slider.publisher.documents"; - - public static final String AGENT_SECURE_REST_API = - "classpath:org.apache.slider.agents.secure"; - - public static final String AGENT_ONEWAY_REST_API = - "classpath:org.apache.slider.agents.oneway"; - - public static final String AM_IPC_PROTOCOL = - "classpath:org.apache.slider.appmaster.ipc"; - - public static final String AM_REST_BASE = - "classpath:org.apache.slider.client.rest"; - - public static final String WEB_UI = "http://"; -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae16ae9/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/registry/YarnRegistryViewForProviders.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/registry/YarnRegistryViewForProviders.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/registry/YarnRegistryViewForProviders.java deleted file mode 100644 index ef5ed91..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/registry/YarnRegistryViewForProviders.java +++ /dev/null @@ -1,225 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.service.registry; - -import com.google.common.base.Preconditions; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.fs.PathNotFoundException; -import org.apache.hadoop.registry.client.api.RegistryConstants; -import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; -import org.apache.hadoop.registry.client.api.BindFlags; -import org.apache.hadoop.registry.client.api.RegistryOperations; -import org.apache.hadoop.registry.client.binding.RegistryUtils; -import org.apache.hadoop.registry.client.binding.RegistryPathUtils; - -import org.apache.hadoop.registry.client.types.ServiceRecord; -import org.apache.hadoop.yarn.service.compinstance.ComponentInstanceId; -import org.apache.hadoop.yarn.service.utils.SliderUtils; - -import java.io.IOException; -import java.util.List; - -import static org.apache.hadoop.registry.client.binding.RegistryPathUtils.join; - -/** - * Registry view for providers. This tracks where the service - * is registered, offers access to the record and other things. - */ -public class YarnRegistryViewForProviders { - private static final Log LOG = - LogFactory.getLog(YarnRegistryViewForProviders.class); - - private final RegistryOperations registryOperations; - private final String user; - private final String sliderServiceClass; - private final String instanceName; - /** - * Record used where the service registered itself. - * Null until the service is registered - */ - private ServiceRecord selfRegistration; - - /** - * Path where record was registered - * Null until the service is registered - */ - private String selfRegistrationPath; - - public YarnRegistryViewForProviders(RegistryOperations registryOperations, - String user, - String sliderServiceClass, - String instanceName, - ApplicationAttemptId applicationAttemptId) { - Preconditions.checkArgument(registryOperations != null, - "null registry operations"); - Preconditions.checkArgument(user != null, "null user"); - Preconditions.checkArgument(SliderUtils.isSet(sliderServiceClass), - "unset service class"); - Preconditions.checkArgument(SliderUtils.isSet(instanceName), - "instanceName"); - Preconditions.checkArgument(applicationAttemptId != null, - "null applicationAttemptId"); - this.registryOperations = registryOperations; - this.user = user; - this.sliderServiceClass = sliderServiceClass; - this.instanceName = instanceName; - } - - public String getUser() { - return user; - } - - - private void setSelfRegistration(ServiceRecord selfRegistration) { - this.selfRegistration = selfRegistration; - } - - /** - * Get the path to where the service has registered itself. - * Null until the service is registered - * @return the service registration path. - */ - public String getSelfRegistrationPath() { - return selfRegistrationPath; - } - - /** - * Get the absolute path to where the service has registered itself. - * This includes the base registry path - * Null until the service is registered - * @return the service registration path. - */ - public String getAbsoluteSelfRegistrationPath() { - if (selfRegistrationPath == null) { - return null; - } - String root = registryOperations.getConfig().getTrimmed( - RegistryConstants.KEY_REGISTRY_ZK_ROOT, - RegistryConstants.DEFAULT_ZK_REGISTRY_ROOT); - return RegistryPathUtils.join(root, selfRegistrationPath); - } - - /** - * Add a component under the slider name/entry - * @param componentName component name - * @param record record to put - * @throws IOException - */ - public void putComponent(String componentName, - ServiceRecord record) throws - IOException { - putComponent(sliderServiceClass, instanceName, - componentName, - record); - } - - /** - * Add a component - * @param serviceClass service class to use under ~user - * @param componentName component name - * @param record record to put - * @throws IOException - */ - public void putComponent(String serviceClass, - String serviceName, - String componentName, - ServiceRecord record) throws IOException { - String path = RegistryUtils.componentPath( - user, serviceClass, serviceName, componentName); - registryOperations.mknode(RegistryPathUtils.parentOf(path), true); - registryOperations.bind(path, record, BindFlags.OVERWRITE); - } - - /** - * Add a service under a path, optionally purging any history - * @param username user - * @param serviceClass service class to use under ~user - * @param serviceName name of the service - * @param record service record - * @param deleteTreeFirst perform recursive delete of the path first. - * @return the path the service was created at - * @throws IOException - */ - public String putService(String username, - String serviceClass, - String serviceName, - ServiceRecord record, - boolean deleteTreeFirst) throws IOException { - String path = RegistryUtils.servicePath( - username, serviceClass, serviceName); - if (deleteTreeFirst) { - registryOperations.delete(path, true); - } - registryOperations.mknode(RegistryPathUtils.parentOf(path), true); - registryOperations.bind(path, record, BindFlags.OVERWRITE); - return path; - } - - /** - * Add a service under a path for the current user - * @param record service record - * @param deleteTreeFirst perform recursive delete of the path first - * @return the path the service was created at - * @throws IOException - */ - public String registerSelf( - ServiceRecord record, - boolean deleteTreeFirst) throws IOException { - selfRegistrationPath = - putService(user, sliderServiceClass, instanceName, record, deleteTreeFirst); - setSelfRegistration(record); - return selfRegistrationPath; - } - - /** - * Delete a component - * @param containerId component name - * @throws IOException - */ - public void deleteComponent(ComponentInstanceId instanceId, - String containerId) throws IOException { - String path = RegistryUtils.componentPath( - user, sliderServiceClass, instanceName, - containerId); - LOG.info(instanceId + ": Deleting registry path " + path); - registryOperations.delete(path, false); - } - - /** - * Delete the children of a path -but not the path itself. - * It is not an error if the path does not exist - * @param path path to delete - * @param recursive flag to request recursive deletes - * @throws IOException IO problems - */ - public void deleteChildren(String path, boolean recursive) throws IOException { - List childNames = null; - try { - childNames = registryOperations.list(path); - } catch (PathNotFoundException e) { - return; - } - for (String childName : childNames) { - String child = join(path, childName); - registryOperations.delete(child, recursive); - } - } - -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae16ae9/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/rest/BaseRestClient.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/rest/BaseRestClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/rest/BaseRestClient.java deleted file mode 100644 index 2d01bef..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/rest/BaseRestClient.java +++ /dev/null @@ -1,149 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.service.rest; - -import com.google.common.base.Preconditions; -import com.sun.jersey.api.client.Client; -import com.sun.jersey.api.client.ClientHandlerException; -import com.sun.jersey.api.client.GenericType; -import com.sun.jersey.api.client.UniformInterfaceException; -import com.sun.jersey.api.client.WebResource; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import javax.ws.rs.core.MediaType; -import java.io.IOException; -import java.net.URI; - - -/** - * This is a base class for Jersey REST clients in Slider. - * It supports the execution of operations —with - * exceptions uprated to IOExceptions when needed. - *

- * Subclasses can use these operations to provide an API-like view - * of the REST model - */ -public class BaseRestClient { - private static final Logger log = - LoggerFactory.getLogger(BaseRestClient.class); - private final Client client; - - public BaseRestClient( - Client client) { - Preconditions.checkNotNull(client, "null jersey client"); - this.client = client; - } - - /** - * Get the jersey client - * @return jersey client - */ - public Client getClient() { - return client; - } - - /** - * Execute the operation. Failures are raised as IOException subclasses - * @param method method to execute - * @param resource resource to work against - * @param c class to build - * @param type expected - * @return an instance of the type T - * @throws IOException on any failure - */ - public T exec(HttpVerb method, WebResource resource, Class c) - throws IOException { - try { - Preconditions.checkArgument(c != null); - log.debug("{}} {}", method, resource.getURI()); - return resource.accept(MediaType.APPLICATION_JSON_TYPE) - .method(method.getVerb(), c); - } catch (ClientHandlerException ex) { - throw ExceptionConverter.convertJerseyException(method.getVerb(), - resource.getURI().toString(), - ex); - } catch (UniformInterfaceException ex) { - throw UgiJerseyBinding.uprateFaults(method, - resource.getURI().toString(), - ex); - } - } - - /** - * Execute the operation. Failures are raised as IOException subclasses - * @param method method to execute - * @param resource resource to work against - * @param t type to work with - * @param type expected - * @return an instance of the type T - * @throws IOException on any failure - */ - public T exec(HttpVerb method, WebResource resource, GenericType t) - throws IOException { - try { - Preconditions.checkArgument(t != null); - log.debug("{}} {}", method, resource.getURI()); - resource.accept(MediaType.APPLICATION_JSON_TYPE); - return resource.method(method.getVerb(), t); - } catch (ClientHandlerException ex) { - throw ExceptionConverter.convertJerseyException(method.getVerb(), - resource.getURI().toString(), - ex); - } catch (UniformInterfaceException ex) { - throw UgiJerseyBinding.uprateFaults(method, resource.getURI().toString(), - ex); - } - } - - - /** - * Execute the GET operation. Failures are raised as IOException subclasses - * @param resource resource to work against - * @param c class to build - * @param type expected - * @return an instance of the type T - * @throws IOException on any failure - */ - public T get(WebResource resource, Class c) throws IOException { - return exec(HttpVerb.GET, resource, c); - } - - /** - * Create a Web resource from the client. - * - * @param u the URI of the resource. - * @return the Web resource. - */ - public WebResource resource(URI u) { - return client.resource(u); - } - - /** - * Create a Web resource from the client. - * - * @param url the URI of the resource. - * @return the Web resource. - */ - - public WebResource resource(String url) { - return client.resource(url); - } - -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae16ae9/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/rest/ExceptionConverter.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/rest/ExceptionConverter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/rest/ExceptionConverter.java deleted file mode 100644 index 12fdc79..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/rest/ExceptionConverter.java +++ /dev/null @@ -1,128 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.service.rest; - -import com.sun.jersey.api.client.ClientHandlerException; -import com.sun.jersey.api.client.ClientResponse; -import com.sun.jersey.api.client.UniformInterfaceException; -import org.apache.hadoop.fs.InvalidRequestException; -import org.apache.hadoop.fs.PathAccessDeniedException; -import org.apache.hadoop.fs.PathIOException; -import org.apache.hadoop.yarn.webapp.*; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import javax.servlet.http.HttpServletResponse; -import java.io.FileNotFoundException; -import java.io.IOException; - -/** - * static methods to convert exceptions into different types, including - * extraction of details and finer-grained conversions. - */ -public class ExceptionConverter { - private static final Logger - log = LoggerFactory.getLogger(ExceptionConverter.class); - - /** - * Uprate error codes 400 and up into faults; - * 404 is converted to a {@link FileNotFoundException}, - * 401 to {@link ForbiddenException} - * FileNotFoundException for an unknown resource - * PathAccessDeniedException for access denied - * PathIOException for anything else - * @param verb HTTP Verb used - * @param targetURL URL being targeted - * @param exception original exception - * @return a new exception, the original one nested as a cause - */ - public static IOException convertJerseyException(String verb, - String targetURL, - UniformInterfaceException exception) { - - IOException ioe = null; - ClientResponse response = exception.getResponse(); - if (response != null) { - int status = response.getStatus(); - String body = ""; - try { - if (response.hasEntity()) { - body = response.getEntity(String.class); - log.error("{} {} returned status {} and body\n{}", - verb, targetURL, status, body); - } else { - log.error("{} {} returned status {} and empty body", - verb, targetURL, status); - } - } catch (Exception e) { - log.warn("Failed to extract body from client response", e); - } - - if (status == HttpServletResponse.SC_UNAUTHORIZED - || status == HttpServletResponse.SC_FORBIDDEN) { - ioe = new PathAccessDeniedException(targetURL); - } else if (status == HttpServletResponse.SC_BAD_REQUEST - || status == HttpServletResponse.SC_NOT_ACCEPTABLE - || status == HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE) { - // bad request - ioe = new InvalidRequestException( - String.format("Bad %s request: status code %d against %s", - verb, status, targetURL)); - } else if (status > 400 && status < 500) { - ioe = new FileNotFoundException(targetURL); - } - if (ioe == null) { - ioe = new PathIOException(targetURL, - verb + " " + targetURL - + " failed with status code : " + status - + ":" + exception); - } - } else { - ioe = new PathIOException(targetURL, - verb + " " + targetURL + " failed: " + exception); - } - ioe.initCause(exception); - return ioe; - } - - /** - * Handle a client-side Jersey exception. - *

- * If there's an inner IOException, return that. - *

- * Otherwise: create a new wrapper IOE including verb and target details - * @param verb HTTP Verb used - * @param targetURL URL being targeted - * @param exception original exception - * @return an exception to throw - */ - public static IOException convertJerseyException(String verb, - String targetURL, - ClientHandlerException exception) { - if (exception.getCause() instanceof IOException) { - return (IOException)exception.getCause(); - } else { - IOException ioe = new IOException( - verb + " " + targetURL + " failed: " + exception); - ioe.initCause(exception); - return ioe; - } - } - -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae16ae9/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/rest/HttpVerb.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/rest/HttpVerb.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/rest/HttpVerb.java deleted file mode 100644 index 93f9082..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/rest/HttpVerb.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.service.rest; - -/** - * Http verbs with details on what they support in terms of submit and - * response bodies. - *

- * Those verbs which do support bodies in the response MAY NOT return it; - * if the response code is 204 then the answer is "no body", but the operation - * is considered a success. - */ -public enum HttpVerb { - GET("GET", false, true), - POST("POST", true, true), - PUT("PUT", true, true), - DELETE("DELETE", false, true), - HEAD("HEAD", false, false); - - private final String verb; - private final boolean hasUploadBody; - private final boolean hasResponseBody; - - HttpVerb(String verb, boolean hasUploadBody, boolean hasResponseBody) { - this.verb = verb; - this.hasUploadBody = hasUploadBody; - this.hasResponseBody = hasResponseBody; - } - - public String getVerb() { - return verb; - } - - public boolean hasUploadBody() { - return hasUploadBody; - } - - public boolean hasResponseBody() { - return hasResponseBody; - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae16ae9/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/rest/SliderURLConnectionFactory.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/rest/SliderURLConnectionFactory.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/rest/SliderURLConnectionFactory.java deleted file mode 100644 index fcd7f55..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/rest/SliderURLConnectionFactory.java +++ /dev/null @@ -1,176 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.service.rest; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hdfs.web.KerberosUgiAuthenticator; -import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.security.authentication.client.AuthenticatedURL; -import org.apache.hadoop.security.authentication.client.AuthenticationException; -import org.apache.hadoop.security.authentication.client.ConnectionConfigurator; -import org.apache.hadoop.security.ssl.SSLFactory; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import javax.net.ssl.HostnameVerifier; -import javax.net.ssl.HttpsURLConnection; -import javax.net.ssl.SSLSocketFactory; -import java.io.IOException; -import java.net.HttpURLConnection; -import java.net.URL; -import java.net.URLConnection; -import java.security.GeneralSecurityException; - -/** - * Factory for URL connections; used behind the scenes in the Jersey integration. - *

- * Derived from the WebHDFS implementation. - */ -public class SliderURLConnectionFactory { - private static final Logger log = - LoggerFactory.getLogger(SliderURLConnectionFactory.class); - - /** - * Timeout for socket connects and reads - */ - public final static int DEFAULT_SOCKET_TIMEOUT = 60 * 1000; // 1 minute - private final ConnectionConfigurator connConfigurator; - - private static final ConnectionConfigurator DEFAULT_CONFIGURATOR = new BasicConfigurator(); - - /** - * Construct a new URLConnectionFactory based on the configuration. It will - * try to load SSL certificates when it is specified. - */ - public static SliderURLConnectionFactory newInstance(Configuration conf) { - ConnectionConfigurator conn; - try { - conn = newSslConnConfigurator(DEFAULT_SOCKET_TIMEOUT, conf); - } catch (Exception e) { - log.debug("Cannot load customized SSL configuration.", e); - conn = DEFAULT_CONFIGURATOR; - } - return new SliderURLConnectionFactory(conn); - } - - private SliderURLConnectionFactory(ConnectionConfigurator connConfigurator) { - this.connConfigurator = connConfigurator; - } - - /** - * Create a new ConnectionConfigurator for SSL connections - */ - private static ConnectionConfigurator newSslConnConfigurator(final int timeout, - Configuration conf) throws IOException, GeneralSecurityException { - final SSLFactory factory; - final SSLSocketFactory sf; - final HostnameVerifier hv; - - factory = new SSLFactory(SSLFactory.Mode.CLIENT, conf); - factory.init(); - sf = factory.createSSLSocketFactory(); - hv = factory.getHostnameVerifier(); - - return new ConnectionConfigurator() { - @Override - public HttpURLConnection configure(HttpURLConnection conn) - throws IOException { - if (conn instanceof HttpsURLConnection) { - HttpsURLConnection c = (HttpsURLConnection) conn; - c.setSSLSocketFactory(sf); - c.setHostnameVerifier(hv); - } - SliderURLConnectionFactory.setupConnection(conn, timeout); - return conn; - } - }; - } - - /** - * Opens a url with read and connect timeouts - * - * @param url - * to open - * @return URLConnection - * @throws IOException - */ - public URLConnection openConnection(URL url) throws IOException { - try { - return openConnection(url, false); - } catch (AuthenticationException e) { - // Unreachable - return null; - } - } - - /** - * Opens a url with read and connect timeouts - * - * @param url - * URL to open - * @param isSpnego - * whether the url should be authenticated via SPNEGO - * @return URLConnection - * @throws IOException - * @throws AuthenticationException - */ - public URLConnection openConnection(URL url, boolean isSpnego) - throws IOException, AuthenticationException { - if (isSpnego) { - log.debug("open AuthenticatedURL connection {}", url); - UserGroupInformation.getCurrentUser().checkTGTAndReloginFromKeytab(); - final AuthenticatedURL.Token authToken = new AuthenticatedURL.Token(); - return new AuthenticatedURL(new KerberosUgiAuthenticator(), - connConfigurator).openConnection(url, authToken); - } else { - log.debug("open URL connection {}", url); - URLConnection connection = url.openConnection(); - if (connection instanceof HttpURLConnection) { - connConfigurator.configure((HttpURLConnection) connection); - } - return connection; - } - } - - /** - * Sets connection parameters on the given URLConnection - * - * @param connection - * URLConnection to set - * @param socketTimeout - * the connection and read timeout of the connection. - */ - private static void setupConnection(URLConnection connection, int socketTimeout) { - connection.setConnectTimeout(socketTimeout); - connection.setReadTimeout(socketTimeout); - connection.setUseCaches(false); - if (connection instanceof HttpURLConnection) { - ((HttpURLConnection) connection).setInstanceFollowRedirects(true); - } - } - - private static class BasicConfigurator implements ConnectionConfigurator { - @Override - public HttpURLConnection configure(HttpURLConnection conn) - throws IOException { - SliderURLConnectionFactory.setupConnection(conn, DEFAULT_SOCKET_TIMEOUT); - return conn; - } - } -} --------------------------------------------------------------------- To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org For additional commands, e-mail: common-commits-help@hadoop.apache.org