Return-Path: X-Original-To: archive-asf-public-internal@cust-asf2.ponee.io Delivered-To: archive-asf-public-internal@cust-asf2.ponee.io Received: from cust-asf.ponee.io (cust-asf.ponee.io [163.172.22.183]) by cust-asf2.ponee.io (Postfix) with ESMTP id 45D2C200CF3 for ; Wed, 30 Aug 2017 05:55:25 +0200 (CEST) Received: by cust-asf.ponee.io (Postfix) id 4481E1682D5; Wed, 30 Aug 2017 03:55:25 +0000 (UTC) Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by cust-asf.ponee.io (Postfix) with SMTP id 195A91682C6 for ; Wed, 30 Aug 2017 05:55:22 +0200 (CEST) Received: (qmail 44105 invoked by uid 500); 30 Aug 2017 03:55:15 -0000 Mailing-List: contact common-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Delivered-To: mailing list common-commits@hadoop.apache.org Received: (qmail 25297 invoked by uid 99); 30 Aug 2017 03:54:57 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Wed, 30 Aug 2017 03:54:57 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 5CE02F5FDE; Wed, 30 Aug 2017 03:54:56 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: jianhe@apache.org To: common-commits@hadoop.apache.org Date: Wed, 30 Aug 2017 03:55:53 -0000 Message-Id: In-Reply-To: <3a98c56b99fd4fe98523bc4190ff3263@git.apache.org> References: <3a98c56b99fd4fe98523bc4190ff3263@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: [60/75] [abbrv] hadoop git commit: YARN-7091. Rename application to service in yarn-native-services. Contributed by Jian He archived-at: Wed, 30 Aug 2017 03:55:25 -0000 http://git-wip-us.apache.org/repos/asf/hadoop/blob/db5888ea/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/timelineservice/TestServiceTimelinePublisher.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/timelineservice/TestServiceTimelinePublisher.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/timelineservice/TestServiceTimelinePublisher.java new file mode 100644 index 0000000..b742553 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/timelineservice/TestServiceTimelinePublisher.java @@ -0,0 +1,293 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.service.timelineservice; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; +import org.apache.hadoop.yarn.api.records.ApplicationId; +import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity; +import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity.Identifier; +import org.apache.hadoop.yarn.client.api.TimelineV2Client; +import org.apache.hadoop.yarn.client.api.impl.TimelineV2ClientImpl; +import org.apache.hadoop.yarn.conf.YarnConfiguration; +import org.apache.hadoop.yarn.exceptions.YarnException; +import org.apache.hadoop.yarn.service.ServiceContext; +import org.apache.hadoop.yarn.service.api.records.Service; +import org.apache.hadoop.yarn.service.api.records.ServiceState; +import org.apache.hadoop.yarn.service.api.records.Artifact; +import org.apache.hadoop.yarn.service.api.records.Component; +import org.apache.hadoop.yarn.service.api.records.Container; +import org.apache.hadoop.yarn.service.api.records.ContainerState; +import org.apache.hadoop.yarn.service.api.records.PlacementPolicy; +import org.apache.hadoop.yarn.service.api.records.Resource; +import org.apache.hadoop.yarn.service.component.instance.ComponentInstance; +import org.apache.hadoop.yarn.service.component.instance.ComponentInstanceId; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.junit.Assert.assertEquals; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +/** + * Test class for ServiceTimelinePublisher. + */ +public class TestServiceTimelinePublisher { + private TimelineV2Client timelineClient; + private Configuration config; + private ServiceTimelinePublisher serviceTimelinePublisher; + private static String SERVICE_NAME = "HBASE"; + private static String SERVICEID = "application_1490093646524_0005"; + private static String ARTIFACTID = "ARTIFACTID"; + private static String COMPONENT_NAME = "DEFAULT"; + private static String CONTAINER_ID = + "container_e02_1490093646524_0005_01_000001"; + private static String CONTAINER_IP = + "localhost"; + private static String CONTAINER_HOSTNAME = + "cnl124-localhost.site"; + private static String CONTAINER_BAREHOST = + "localhost.com"; + + @Before + public void setUp() throws Exception { + config = new Configuration(); + config.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true); + config.setFloat(YarnConfiguration.TIMELINE_SERVICE_VERSION, 2.0f); + timelineClient = + new DummyTimelineClient(ApplicationId.fromString(SERVICEID)); + serviceTimelinePublisher = new ServiceTimelinePublisher(timelineClient); + timelineClient.init(config); + serviceTimelinePublisher.init(config); + timelineClient.start(); + serviceTimelinePublisher.start(); + } + + @After + public void tearDown() throws Exception { + if (serviceTimelinePublisher != null) { + serviceTimelinePublisher.stop(); + } + if (timelineClient != null) { + timelineClient.stop(); + } + } + + @Test + public void testServiceAttemptEntity() { + Service service = createMockApplication(); + serviceTimelinePublisher + .serviceAttemptRegistered(service, new YarnConfiguration()); + + Collection lastPublishedEntities = + ((DummyTimelineClient) timelineClient).getLastPublishedEntities(); + // 2 entities because during registration component also registered. + assertEquals(2, lastPublishedEntities.size()); + for (TimelineEntity timelineEntity : lastPublishedEntities) { + if (timelineEntity.getType() == ServiceTimelineEntityType.COMPONENT + .toString()) { + verifyComponentTimelineEntity(timelineEntity); + } else { + verifyServiceAttemptTimelineEntity(timelineEntity, null, true); + } + } + + ServiceContext context = new ServiceContext(); + context.attemptId = ApplicationAttemptId + .newInstance(ApplicationId.fromString(service.getId()), 1); + String exitDiags = "service killed"; + serviceTimelinePublisher.serviceAttemptUnregistered(context, exitDiags); + lastPublishedEntities = + ((DummyTimelineClient) timelineClient).getLastPublishedEntities(); + for (TimelineEntity timelineEntity : lastPublishedEntities) { + if (timelineEntity.getType() == ServiceTimelineEntityType.SERVICE_ATTEMPT + .toString()) { + verifyServiceAttemptTimelineEntity(timelineEntity, exitDiags, + false); + } + } + } + + @Test + public void testComponentInstanceEntity() { + Container container = new Container(); + container.id(CONTAINER_ID).ip(CONTAINER_IP).bareHost(CONTAINER_BAREHOST) + .hostname(CONTAINER_HOSTNAME).state(ContainerState.RUNNING_BUT_UNREADY) + .launchTime(new Date()); + ComponentInstanceId id = new ComponentInstanceId(0, COMPONENT_NAME); + ComponentInstance instance = mock(ComponentInstance.class); + when(instance.getCompName()).thenReturn(COMPONENT_NAME); + when(instance.getCompInstanceName()).thenReturn("comp_instance_name"); + serviceTimelinePublisher.componentInstanceStarted(container, + instance); + + Collection lastPublishedEntities = + ((DummyTimelineClient) timelineClient).getLastPublishedEntities(); + assertEquals(1, lastPublishedEntities.size()); + TimelineEntity entity = lastPublishedEntities.iterator().next(); + + assertEquals(1, entity.getEvents().size()); + assertEquals(CONTAINER_ID, entity.getId()); + assertEquals(CONTAINER_BAREHOST, + entity.getInfo().get(ServiceTimelineMetricsConstants.BARE_HOST)); + assertEquals(COMPONENT_NAME, + entity.getInfo().get(ServiceTimelineMetricsConstants.COMPONENT_NAME)); + assertEquals(ContainerState.RUNNING_BUT_UNREADY.toString(), + entity.getInfo().get(ServiceTimelineMetricsConstants.STATE)); + + // updated container state + container.setState(ContainerState.READY); + serviceTimelinePublisher.componentInstanceUpdated(container); + lastPublishedEntities = + ((DummyTimelineClient) timelineClient).getLastPublishedEntities(); + assertEquals(1, lastPublishedEntities.size()); + entity = lastPublishedEntities.iterator().next(); + assertEquals(2, entity.getEvents().size()); + assertEquals(ContainerState.READY.toString(), + entity.getInfo().get(ServiceTimelineMetricsConstants.STATE)); + + } + + private void verifyServiceAttemptTimelineEntity(TimelineEntity timelineEntity, + String message, boolean isRegistedEntity) { + assertEquals(SERVICEID, timelineEntity.getId()); + assertEquals(SERVICE_NAME, + timelineEntity.getInfo().get(ServiceTimelineMetricsConstants.NAME)); + if (isRegistedEntity) { + assertEquals(ServiceState.STARTED.toString(), + timelineEntity.getInfo().get(ServiceTimelineMetricsConstants.STATE)); + assertEquals(ServiceTimelineEvent.SERVICE_ATTEMPT_REGISTERED.toString(), + timelineEntity.getEvents().iterator().next().getId()); + } else { + assertEquals("ENDED", + timelineEntity.getInfo().get(ServiceTimelineMetricsConstants.STATE).toString()); + assertEquals(message, timelineEntity.getInfo() + .get(ServiceTimelineMetricsConstants.DIAGNOSTICS_INFO)); + assertEquals(2, timelineEntity.getEvents().size()); + assertEquals(ServiceTimelineEvent.SERVICE_ATTEMPT_UNREGISTERED.toString(), + timelineEntity.getEvents().iterator().next().getId()); + } + } + + private void verifyComponentTimelineEntity(TimelineEntity entity) { + Map info = entity.getInfo(); + assertEquals("DEFAULT", entity.getId()); + assertEquals(ARTIFACTID, + info.get(ServiceTimelineMetricsConstants.ARTIFACT_ID)); + assertEquals("DOCKER", + info.get(ServiceTimelineMetricsConstants.ARTIFACT_TYPE)); + assertEquals("medium", + info.get(ServiceTimelineMetricsConstants.RESOURCE_PROFILE)); + assertEquals(1, info.get(ServiceTimelineMetricsConstants.RESOURCE_CPU)); + assertEquals("1024", + info.get(ServiceTimelineMetricsConstants.RESOURCE_MEMORY)); + assertEquals("sleep 1", + info.get(ServiceTimelineMetricsConstants.LAUNCH_COMMAND)); + assertEquals("false", + info.get(ServiceTimelineMetricsConstants.RUN_PRIVILEGED_CONTAINER)); + assertEquals("label", + info.get(ServiceTimelineMetricsConstants.PLACEMENT_POLICY)); + } + + private static Service createMockApplication() { + Service service = mock(Service.class); + + when(service.getId()).thenReturn(SERVICEID); + when(service.getLaunchTime()).thenReturn(new Date()); + when(service.getState()).thenReturn(ServiceState.STARTED); + when(service.getName()).thenReturn(SERVICE_NAME); + when(service.getConfiguration()).thenReturn( + new org.apache.hadoop.yarn.service.api.records.Configuration()); + + Component component = mock(Component.class); + Artifact artifact = new Artifact(); + artifact.setId(ARTIFACTID); + Resource resource = new Resource(); + resource.setCpus(1); + resource.setMemory(1024 + ""); + resource.setProfile("medium"); + when(component.getArtifact()).thenReturn(artifact); + when(component.getName()).thenReturn(COMPONENT_NAME); + when(component.getResource()).thenReturn(resource); + when(component.getLaunchCommand()).thenReturn("sleep 1"); + PlacementPolicy placementPolicy = new PlacementPolicy(); + placementPolicy.setLabel("label"); + when(component.getPlacementPolicy()).thenReturn(placementPolicy); + when(component.getConfiguration()).thenReturn( + new org.apache.hadoop.yarn.service.api.records.Configuration()); + List components = new ArrayList(); + components.add(component); + + when(service.getComponents()).thenReturn(components); + return service; + } + + protected static class DummyTimelineClient extends TimelineV2ClientImpl { + private Map lastPublishedEntities = + new HashMap<>(); + + public DummyTimelineClient(ApplicationId appId) { + super(appId); + } + + @Override + public void putEntitiesAsync(TimelineEntity... entities) + throws IOException, YarnException { + putEntities(entities); + } + + @Override + public void putEntities(TimelineEntity... entities) + throws IOException, YarnException { + for (TimelineEntity timelineEntity : entities) { + TimelineEntity entity = + lastPublishedEntities.get(timelineEntity.getIdentifier()); + if (entity == null) { + lastPublishedEntities.put(timelineEntity.getIdentifier(), + timelineEntity); + } else { + entity.addMetrics(timelineEntity.getMetrics()); + entity.addEvents(timelineEntity.getEvents()); + entity.addInfo(timelineEntity.getInfo()); + entity.addConfigs(timelineEntity.getConfigs()); + entity.addRelatesToEntities(timelineEntity.getRelatesToEntities()); + entity + .addIsRelatedToEntities(timelineEntity.getIsRelatedToEntities()); + } + } + } + + public Collection getLastPublishedEntities() { + return lastPublishedEntities.values(); + } + + public void reset() { + lastPublishedEntities = null; + } + } +} http://git-wip-us.apache.org/repos/asf/hadoop/blob/db5888ea/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/resources/example-app.json ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/resources/example-app.json b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/resources/example-app.json new file mode 100644 index 0000000..5dfbd64 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/resources/example-app.json @@ -0,0 +1,15 @@ +{ + "name": "example-app", + "components" : + [ + { + "name": "simple", + "number_of_containers": 1, + "launch_command": "sleep 2", + "resource": { + "cpus": 1, + "memory": "128" + } + } + ] +} \ No newline at end of file http://git-wip-us.apache.org/repos/asf/hadoop/blob/db5888ea/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/resources/org/apache/hadoop/yarn/service/conf/examples/app-override.json ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/resources/org/apache/hadoop/yarn/service/conf/examples/app-override.json b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/resources/org/apache/hadoop/yarn/service/conf/examples/app-override.json new file mode 100644 index 0000000..d7e2fd0 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/resources/org/apache/hadoop/yarn/service/conf/examples/app-override.json @@ -0,0 +1,72 @@ +{ + "name": "app-1", + "lifetime": "3600", + "launch_command": "sleep 3600", + "configuration": { + "properties": { + "g1": "a", + "g2": "b" + }, + "files": [ + { + "type": "PROPERTIES", + "dest_file": "file1", + "props": { + "k1": "v1", + "k2": "v2" + } + }, + { + "type": "XML", + "dest_file": "file2", + "props": { + "k3": "v3" + } + } + ] + }, + "resource": { + "cpus": 1, + "memory": "512" + }, + "number_of_containers": 2, + "components": [ + { + "name": "simple", + "configuration": { + "files": [ + { + "type": "PROPERTIES", + "dest_file": "file1", + "props": { + "k1": "overridden" + } + } + ] + } + }, + { + "name": "master", + "configuration": { + "properties": { + "name": "m", + "g1": "overridden" + } + } + }, + { + "name": "worker", + "resource": { + "cpus": 1, + "memory": "1024" + }, + "configuration": { + "properties": { + "name": "worker", + "g1": "overridden-by-worker", + "timeout": "1000" + } + } + } + ] +} \ No newline at end of file http://git-wip-us.apache.org/repos/asf/hadoop/blob/db5888ea/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/resources/org/apache/hadoop/yarn/service/conf/examples/app.json ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/resources/org/apache/hadoop/yarn/service/conf/examples/app.json b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/resources/org/apache/hadoop/yarn/service/conf/examples/app.json new file mode 100644 index 0000000..12b51e4 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/resources/org/apache/hadoop/yarn/service/conf/examples/app.json @@ -0,0 +1,48 @@ +{ + "name": "service-1", + "id" : "application_1503358878042_0011", + "lifetime": "3600", + "launch_command": "sleep 3600", + "configuration": { + "properties": { + "g1": "a", + "g2": "b", + "yarn.service.failure-count-reset.window": "60" + } + }, + "resource": { + "cpus": 1, + "memory": "512" + }, + "number_of_containers": 2, + "components": [ + { + "name": "simple" + }, + { + "name": "master", + "number_of_containers": 1, + "configuration": { + "properties": { + "g1": "overridden", + "g3": "will-be-overridden", + "jvm.heapsize": "512M" + } + } + }, + { + "name": "worker", + "number_of_containers": 5, + "resource": { + "cpus": 1, + "memory": "1024" + }, + "configuration": { + "properties": { + "g1": "overridden-by-worker", + "jvm.heapsize": "512M" + } + } + } + ] +} \ No newline at end of file http://git-wip-us.apache.org/repos/asf/hadoop/blob/db5888ea/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/resources/org/apache/hadoop/yarn/service/conf/examples/default.json ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/resources/org/apache/hadoop/yarn/service/conf/examples/default.json b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/resources/org/apache/hadoop/yarn/service/conf/examples/default.json new file mode 100644 index 0000000..73d4e7b --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/resources/org/apache/hadoop/yarn/service/conf/examples/default.json @@ -0,0 +1,16 @@ +{ + "name": "default-app-1", + "lifetime": "3600", + "components" : + [ + { + "name": "sleep", + "number_of_containers": 1, + "launch_command": "sleep 3600", + "resource": { + "cpus": 2, + "memory": "256" + } + } + ] +} http://git-wip-us.apache.org/repos/asf/hadoop/blob/db5888ea/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/resources/org/apache/hadoop/yarn/service/conf/examples/external0.json ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/resources/org/apache/hadoop/yarn/service/conf/examples/external0.json b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/resources/org/apache/hadoop/yarn/service/conf/examples/external0.json new file mode 100644 index 0000000..0857f62 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/resources/org/apache/hadoop/yarn/service/conf/examples/external0.json @@ -0,0 +1,8 @@ +{ + "name": "external-0", + "lifetime": "3600", + "artifact": { + "type": "SERVICE", + "id": "app-1" + } +} http://git-wip-us.apache.org/repos/asf/hadoop/blob/db5888ea/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/resources/org/apache/hadoop/yarn/service/conf/examples/external1.json ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/resources/org/apache/hadoop/yarn/service/conf/examples/external1.json b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/resources/org/apache/hadoop/yarn/service/conf/examples/external1.json new file mode 100644 index 0000000..4afdb8b --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/resources/org/apache/hadoop/yarn/service/conf/examples/external1.json @@ -0,0 +1,30 @@ +{ + "name": "external-1", + "lifetime": "3600", + "components": [ + { + "name": "simple", + "artifact": { + "type": "SERVICE", + "id": "app-1" + } + }, + { + "name": "master", + "configuration": { + "properties": { + "g3": "is-overridden" + } + } + }, + { + "name": "other", + "launch_command": "sleep 3600", + "number_of_containers": 2, + "resource": { + "cpus": 1, + "memory": "512" + } + } + ] +} http://git-wip-us.apache.org/repos/asf/hadoop/blob/db5888ea/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/resources/org/apache/hadoop/yarn/service/conf/examples/external2.json ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/resources/org/apache/hadoop/yarn/service/conf/examples/external2.json b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/resources/org/apache/hadoop/yarn/service/conf/examples/external2.json new file mode 100644 index 0000000..0df8e0a --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/resources/org/apache/hadoop/yarn/service/conf/examples/external2.json @@ -0,0 +1,22 @@ +{ + "name": "external-2", + "lifetime": "3600", + "components": [ + { + "name": "ext", + "artifact": { + "type": "SERVICE", + "id": "external-1" + } + }, + { + "name": "another", + "launch_command": "sleep 3600", + "number_of_containers": 1, + "resource": { + "cpus": 1, + "memory": "512" + } + } + ] +} http://git-wip-us.apache.org/repos/asf/hadoop/blob/db5888ea/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/resources/yarn-site.xml ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/resources/yarn-site.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/resources/yarn-site.xml new file mode 100644 index 0000000..266caa9 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/resources/yarn-site.xml @@ -0,0 +1,19 @@ + + + + + + + http://git-wip-us.apache.org/repos/asf/hadoop/blob/db5888ea/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/pom.xml ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/pom.xml new file mode 100644 index 0000000..1233804 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/pom.xml @@ -0,0 +1,38 @@ + + + + + hadoop-yarn-applications + org.apache.hadoop + 3.0.0-beta1-SNAPSHOT + + 4.0.0 + hadoop-yarn-services + Apache Hadoop YARN Services + pom + + + ${basedir}/../../../../hadoop-common-project/hadoop-common/target + + + + + + hadoop-yarn-services-core + + http://git-wip-us.apache.org/repos/asf/hadoop/blob/db5888ea/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/dev-support/findbugs-exclude.xml ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/dev-support/findbugs-exclude.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/dev-support/findbugs-exclude.xml deleted file mode 100644 index 2814cca..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/dev-support/findbugs-exclude.xml +++ /dev/null @@ -1,48 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - http://git-wip-us.apache.org/repos/asf/hadoop/blob/db5888ea/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/conf/yarnservice-log4j.properties ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/conf/yarnservice-log4j.properties b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/conf/yarnservice-log4j.properties deleted file mode 100644 index 58c8e27..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/conf/yarnservice-log4j.properties +++ /dev/null @@ -1,62 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# This is the log4j configuration for Slider Application Master - -# Log rotation based on size (256MB) with a max of 20 backup files -log4j.rootLogger=INFO, amlog -log4j.threshhold=ALL -log4j.appender.amlog=org.apache.log4j.RollingFileAppender -log4j.appender.amlog.layout=org.apache.log4j.PatternLayout -log4j.appender.amlog.File=${LOG_DIR}/serviceam.log -log4j.appender.amlog.MaxFileSize=256MB -log4j.appender.amlog.MaxBackupIndex=20 - -# log layout skips stack-trace creation operations by avoiding line numbers and method -log4j.appender.amlog.layout.ConversionPattern=%d{ISO8601} [%t] %-5p %c{2} - %m%n - -# debug edition is much more expensive -#log4j.appender.amlog.layout.ConversionPattern=%d{ISO8601} [%t] %-5p %c{2} (%F:%M(%L)) - %m%n - -# configure stderr -# set the conversion pattern of stderr -# Print the date in ISO 8601 format -log4j.appender.stderr=org.apache.log4j.ConsoleAppender -log4j.appender.stderr.Target=System.err -log4j.appender.stderr.layout=org.apache.log4j.PatternLayout -log4j.appender.stderr.layout.ConversionPattern=%d{ISO8601} [%t] %-5p %c{2} - %m%n - -log4j.appender.subprocess=org.apache.log4j.ConsoleAppender -log4j.appender.subprocess.layout=org.apache.log4j.PatternLayout -log4j.appender.subprocess.layout.ConversionPattern=[%c{1}]: %m%n - -# for debugging yarn-service framework -#log4j.logger.org.apache.hadoop.yarn.service=DEBUG - -# uncomment for YARN operations -#log4j.logger.org.apache.hadoop.yarn.client=DEBUG - -# uncomment this to debug security problems -#log4j.logger.org.apache.hadoop.security=DEBUG - -#crank back on some noise -log4j.logger.org.apache.hadoop.util.NativeCodeLoader=ERROR -log4j.logger.org.apache.hadoop.hdfs=WARN - -log4j.logger.org.apache.zookeeper=WARN -log4j.logger.org.apache.curator.framework.state=ERROR -log4j.logger.org.apache.curator.framework.imps=WARN http://git-wip-us.apache.org/repos/asf/hadoop/blob/db5888ea/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/pom.xml ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/pom.xml deleted file mode 100644 index c8de037..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/pom.xml +++ /dev/null @@ -1,409 +0,0 @@ - - - 4.0.0 - - org.apache.hadoop - hadoop-yarn-slider - 3.0.0-beta1-SNAPSHOT - - org.apache.hadoop - hadoop-yarn-slider-core - jar - Apache Hadoop YARN Slider Core - - - - ${project.parent.basedir} - - - - - - - src/main/resources - true - - - - - - org.apache.hadoop - hadoop-maven-plugins - - - compile-protoc - - protoc - - - ${protobuf.version} - ${protoc.path} - - ${basedir}/src/main/proto - - - ${basedir}/src/main/proto - - ClientAMProtocol.proto - - - - - - - - - org.apache.maven.plugins - maven-jar-plugin - - - - test-jar - - - - - - - org.apache.maven.plugins - maven-surefire-plugin - - - ${java.home} - - - - - - org.apache.rat - apache-rat-plugin - - - **/*.json - - - - - - - - - com.beust - jcommander - 1.30 - - - - org.slf4j - slf4j-api - - - - log4j - log4j - runtime - - - - com.google.guava - guava - - - - org.codehaus.jackson - jackson-core-asl - compile - - - - org.codehaus.jackson - jackson-jaxrs - compile - - - - org.codehaus.jackson - jackson-mapper-asl - compile - - - - org.codehaus.jackson - jackson-xc - compile - - - - org.apache.hadoop - hadoop-common - test-jar - test - - - - org.apache.hadoop - hadoop-hdfs - - - - org.apache.hadoop - hadoop-hdfs-client - - - - org.apache.hadoop - hadoop-yarn-client - compile - - - - org.apache.hadoop - hadoop-yarn-server-web-proxy - compile - - - - org.apache.hadoop - hadoop-yarn-registry - compile - - - - junit - junit - test - - - - com.google.protobuf - protobuf-java - - - - org.apache.commons - commons-compress - - - - commons-digester - commons-digester - 1.8 - - - - commons-io - commons-io - - - - commons-lang - commons-lang - - - - commons-logging - commons-logging - - - - com.codahale.metrics - metrics-core - - - - com.codahale.metrics - metrics-servlets - 3.0.1 - - - - - - - - org.apache.zookeeper - zookeeper - - - - - - - - javax.servlet - javax.servlet-api - - - - javax.xml.bind - jaxb-api - - - - com.sun.jersey - jersey-client - - - - com.sun.jersey - jersey-json - - - - com.sun.jersey - jersey-server - - - - com.google.inject - guice - - - - com.google.code.gson - gson - - - - com.google.inject.extensions - guice-servlet - - - - com.sun.jersey.contribs - jersey-guice - - - - org.mockito - mockito-all - test - - - - org.easymock - easymock - 3.1 - test - - - org.objenesis - objenesis - - - - - - org.powermock - powermock-api-easymock - 1.6.5 - test - - - - org.powermock - powermock-module-junit4 - 1.6.5 - - - org.javassist - javassist - - - org.objenesis - objenesis - - - - - - javax.servlet.jsp - jsp-api - runtime - - - - org.codehaus.jettison - jettison - - - - org.yaml - snakeyaml - 1.16 - compile - - - - io.swagger - swagger-annotations - 1.5.4 - - - - org.apache.hadoop - hadoop-minicluster - test - - - - - - - - dist - - false - - - - - org.apache.maven.plugins - maven-assembly-plugin - - - org.apache.hadoop - hadoop-assemblies - ${project.version} - - - - - dist - prepare-package - - single - - - false - false - ${project.artifactId}-${project.version} - - hadoop-yarn-services-dist - - - - - - - - - - - - http://git-wip-us.apache.org/repos/asf/hadoop/blob/db5888ea/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/assembly/executable-jar.xml ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/assembly/executable-jar.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/assembly/executable-jar.xml deleted file mode 100644 index 23383c8..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/assembly/executable-jar.xml +++ /dev/null @@ -1,47 +0,0 @@ - - - - distribution - - zip - - true - - - ${project.build.directory} - / - - *.jar - - - - - - runtime - /lib - - false - - false - - - \ No newline at end of file http://git-wip-us.apache.org/repos/asf/hadoop/blob/db5888ea/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/ClientAMProtocol.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/ClientAMProtocol.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/ClientAMProtocol.java deleted file mode 100644 index 516d23d..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/ClientAMProtocol.java +++ /dev/null @@ -1,40 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.service; - -import org.apache.hadoop.yarn.exceptions.YarnException; -import org.apache.hadoop.yarn.proto.ClientAMProtocol.FlexComponentsRequestProto; -import org.apache.hadoop.yarn.proto.ClientAMProtocol.FlexComponentsResponseProto; -import org.apache.hadoop.yarn.proto.ClientAMProtocol.GetStatusResponseProto; -import org.apache.hadoop.yarn.proto.ClientAMProtocol.GetStatusRequestProto; -import org.apache.hadoop.yarn.proto.ClientAMProtocol.StopResponseProto; -import org.apache.hadoop.yarn.proto.ClientAMProtocol.StopRequestProto; - -import java.io.IOException; - -public interface ClientAMProtocol { - FlexComponentsResponseProto flexComponents(FlexComponentsRequestProto request) - throws IOException, YarnException; - - GetStatusResponseProto getStatus(GetStatusRequestProto requestProto) - throws IOException, YarnException; - - StopResponseProto stop(StopRequestProto requestProto) - throws IOException, YarnException; -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/db5888ea/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/ClientAMService.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/ClientAMService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/ClientAMService.java deleted file mode 100644 index 6884757..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/ClientAMService.java +++ /dev/null @@ -1,132 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.service; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.ipc.Server; -import org.apache.hadoop.net.NetUtils; -import org.apache.hadoop.service.AbstractService; -import org.apache.hadoop.util.ExitUtil; -import org.apache.hadoop.yarn.api.ApplicationConstants; -import org.apache.hadoop.yarn.exceptions.YarnException; -import org.apache.hadoop.yarn.ipc.YarnRPC; -import org.apache.hadoop.yarn.proto.ClientAMProtocol.ComponentCountProto; -import org.apache.hadoop.yarn.proto.ClientAMProtocol.FlexComponentsRequestProto; -import org.apache.hadoop.yarn.proto.ClientAMProtocol.FlexComponentsResponseProto; -import org.apache.hadoop.yarn.proto.ClientAMProtocol.GetStatusRequestProto; -import org.apache.hadoop.yarn.proto.ClientAMProtocol.GetStatusResponseProto; -import org.apache.hadoop.yarn.proto.ClientAMProtocol.StopRequestProto; -import org.apache.hadoop.yarn.proto.ClientAMProtocol.StopResponseProto; -import org.apache.hadoop.yarn.service.component.ComponentEvent; -import org.apache.hadoop.yarn.service.utils.ServiceApiUtil; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.net.InetSocketAddress; - -import static org.apache.hadoop.yarn.service.component.ComponentEventType.FLEX; - -public class ClientAMService extends AbstractService - implements ClientAMProtocol { - - private static final Logger LOG = - LoggerFactory.getLogger(ClientAMService.class); - - private ServiceContext context; - private Server server; - - private InetSocketAddress bindAddress; - - public ClientAMService(ServiceContext context) { - super("Client AM Service"); - this.context = context; - } - - @Override protected void serviceStart() throws Exception { - Configuration conf = getConfig(); - YarnRPC rpc = YarnRPC.create(conf); - InetSocketAddress address = new InetSocketAddress(0); - server = rpc.getServer(ClientAMProtocol.class, this, address, conf, - context.secretManager, 1); - server.start(); - - String nodeHostString = - System.getenv(ApplicationConstants.Environment.NM_HOST.name()); - - bindAddress = NetUtils.createSocketAddrForHost(nodeHostString, - server.getListenerAddress().getPort()); - - LOG.info("Instantiated ClientAMService at " + bindAddress); - super.serviceStart(); - } - - @Override protected void serviceStop() throws Exception { - if (server != null) { - server.stop(); - } - super.serviceStop(); - } - - @Override public FlexComponentsResponseProto flexComponents( - FlexComponentsRequestProto request) throws IOException { - if (!request.getComponentsList().isEmpty()) { - for (ComponentCountProto component : request.getComponentsList()) { - ComponentEvent event = new ComponentEvent(component.getName(), FLEX) - .setDesired(component.getNumberOfContainers()); - context.scheduler.getDispatcher().getEventHandler().handle(event); - LOG.info("Flexing component {} to {}", component.getName(), - component.getNumberOfContainers()); - } - } - return FlexComponentsResponseProto.newBuilder().build(); - } - - @Override - public GetStatusResponseProto getStatus(GetStatusRequestProto request) - throws IOException, YarnException { - String stat = ServiceApiUtil.jsonSerDeser.toJson(context.application); - return GetStatusResponseProto.newBuilder().setStatus(stat).build(); - } - - @Override - public StopResponseProto stop(StopRequestProto requestProto) - throws IOException, YarnException { - LOG.info("Stop the service."); - // Stop the service in 2 seconds delay to make sure this rpc call is completed. - // shutdown hook will be executed which will stop AM gracefully. - Thread thread = new Thread() { - @Override - public void run() { - try { - Thread.sleep(2000); - ExitUtil.terminate(0); - } catch (InterruptedException e) { - LOG.error("Interrupted while stopping", e); - } - } - }; - thread.start(); - return StopResponseProto.newBuilder().build(); - } - - public InetSocketAddress getBindAddress() { - return bindAddress; - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/db5888ea/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/ContainerFailureTracker.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/ContainerFailureTracker.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/ContainerFailureTracker.java deleted file mode 100644 index 4743f28..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/ContainerFailureTracker.java +++ /dev/null @@ -1,89 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.service; - -import org.apache.hadoop.yarn.service.component.Component; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import static org.apache.hadoop.yarn.service.conf.YarnServiceConf.NODE_BLACKLIST_THRESHOLD; - -/** - * This tracks the container failures per node. If the failure counter exceeds - * the maxFailurePerNode limit, it'll blacklist that node. - * - */ -public class ContainerFailureTracker { - - private static final Logger LOG = - LoggerFactory.getLogger(ContainerFailureTracker.class); - - // Host -> num container failures - private Map failureCountPerNode = new HashMap<>(); - private Set blackListedNodes = new HashSet<>(); - private ServiceContext context; - private int maxFailurePerNode; - private Component component; - - public ContainerFailureTracker(ServiceContext context, Component component) { - this.context = context; - this.component = component; - maxFailurePerNode = component.getComponentSpec().getConfiguration() - .getPropertyInt(NODE_BLACKLIST_THRESHOLD, 3); - } - - - public synchronized void incNodeFailure(String host) { - int num = 0; - if (failureCountPerNode.containsKey(host)) { - num = failureCountPerNode.get(host); - } - num++; - failureCountPerNode.put(host, num); - - // black list the node if exceed max failure - if (num > maxFailurePerNode && !blackListedNodes.contains(host)) { - List blacklists = new ArrayList<>(); - blacklists.add(host); - blackListedNodes.add(host); - context.scheduler.getAmRMClient().updateBlacklist(blacklists, null); - LOG.info("[COMPONENT {}]: Failed {} times on this host, blacklisted {}." - + " Current list of blacklisted nodes: {}", - component.getName(), num, host, blackListedNodes); - } - } - - public synchronized void resetContainerFailures() { - // reset container failure counter per node - failureCountPerNode.clear(); - context.scheduler.getAmRMClient() - .updateBlacklist(null, new ArrayList<>(blackListedNodes)); - LOG.info("[COMPONENT {}]: Clearing blacklisted nodes {} ", - component.getName(), blackListedNodes); - blackListedNodes.clear(); - } - -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/db5888ea/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/ServiceContext.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/ServiceContext.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/ServiceContext.java deleted file mode 100644 index c7616af..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/ServiceContext.java +++ /dev/null @@ -1,41 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.service; - -import com.google.common.cache.LoadingCache; -import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; -import org.apache.hadoop.yarn.security.client.ClientToAMTokenSecretManager; -import org.apache.hadoop.yarn.service.api.records.Application; -import org.apache.hadoop.yarn.service.api.records.ConfigFile; -import org.apache.hadoop.yarn.service.utils.SliderFileSystem; - -public class ServiceContext { - public Application application = null; - public SliderFileSystem fs; - public String serviceHdfsDir = ""; - public ApplicationAttemptId attemptId; - public LoadingCache configCache; - public ServiceScheduler scheduler; - public ClientToAMTokenSecretManager secretManager; - public ClientAMService clientAMService; - - public ServiceContext() { - - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/db5888ea/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/ServiceMaster.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/ServiceMaster.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/ServiceMaster.java deleted file mode 100644 index d099f8c..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/ServiceMaster.java +++ /dev/null @@ -1,157 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.service; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; -import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.service.CompositeService; -import org.apache.hadoop.util.ExitUtil; -import org.apache.hadoop.util.GenericOptionsParser; -import org.apache.hadoop.util.ShutdownHookManager; -import org.apache.hadoop.util.StringUtils; -import org.apache.hadoop.yarn.YarnUncaughtExceptionHandler; -import org.apache.hadoop.yarn.api.ApplicationConstants; -import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; -import org.apache.hadoop.yarn.api.records.ContainerId; -import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnException; -import org.apache.hadoop.yarn.security.client.ClientToAMTokenSecretManager; -import org.apache.hadoop.yarn.service.client.params.SliderAMArgs; -import org.apache.hadoop.yarn.service.exceptions.BadCommandArgumentsException; -import org.apache.hadoop.yarn.service.servicemonitor.ServiceMonitor; -import org.apache.hadoop.yarn.service.utils.ServiceApiUtil; -import org.apache.hadoop.yarn.service.utils.SliderFileSystem; -import org.apache.hadoop.yarn.service.utils.SliderUtils; -import org.apache.hadoop.yarn.service.exceptions.BadClusterStateException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.util.Map; - -public class ServiceMaster extends CompositeService { - - private static final Logger LOG = - LoggerFactory.getLogger(ServiceMaster.class); - - private static SliderAMArgs amArgs; - protected ServiceContext context; - - public ServiceMaster(String name) { - super(name); - } - - @Override - protected void serviceInit(Configuration conf) throws Exception { - //TODO Deprecate slider conf, make sure works with yarn conf - printSystemEnv(); - if (UserGroupInformation.isSecurityEnabled()) { - UserGroupInformation.setConfiguration(conf); - } - LOG.info("Login user is {}", UserGroupInformation.getLoginUser()); - - context = new ServiceContext(); - Path appDir = getAppDir(); - context.serviceHdfsDir = appDir.toString(); - SliderFileSystem fs = new SliderFileSystem(conf); - context.fs = fs; - fs.setAppDir(appDir); - loadApplicationJson(context, fs); - - ContainerId amContainerId = getAMContainerId(); - - ApplicationAttemptId attemptId = amContainerId.getApplicationAttemptId(); - LOG.info("Application attemptId: " + attemptId); - context.attemptId = attemptId; - - // configure AM to wait forever for RM - conf.setLong(YarnConfiguration.RESOURCEMANAGER_CONNECT_MAX_WAIT_MS, -1); - conf.unset(YarnConfiguration.CLIENT_FAILOVER_MAX_ATTEMPTS); - - DefaultMetricsSystem.initialize("ServiceAppMaster"); - - context.secretManager = new ClientToAMTokenSecretManager(attemptId, null); - ClientAMService clientAMService = new ClientAMService(context); - context.clientAMService = clientAMService; - addService(clientAMService); - - ServiceScheduler scheduler = createServiceScheduler(context); - addService(scheduler); - context.scheduler = scheduler; - - ServiceMonitor monitor = new ServiceMonitor("Service Monitor", context); - addService(monitor); - - super.serviceInit(conf); - } - - protected ContainerId getAMContainerId() throws BadClusterStateException { - return ContainerId.fromString(SliderUtils.mandatoryEnvVariable( - ApplicationConstants.Environment.CONTAINER_ID.name())); - } - - protected Path getAppDir() { - return new Path(amArgs.getAppDefPath()).getParent(); - } - - protected ServiceScheduler createServiceScheduler(ServiceContext context) - throws IOException, YarnException { - return new ServiceScheduler(context); - } - - protected void loadApplicationJson(ServiceContext context, - SliderFileSystem fs) throws IOException { - context.application = ServiceApiUtil - .loadApplicationFrom(fs, new Path(amArgs.getAppDefPath())); - LOG.info(context.application.toString()); - } - - @Override - protected void serviceStop() throws Exception { - LOG.info("Stopping app master"); - super.serviceStop(); - } - - private void printSystemEnv() { - for (Map.Entry envs : System.getenv().entrySet()) { - LOG.info("{} = {}", envs.getKey(), envs.getValue()); - } - } - - public static void main(String[] args) throws Exception { - Thread.setDefaultUncaughtExceptionHandler(new YarnUncaughtExceptionHandler()); - StringUtils.startupShutdownMessage(ServiceMaster.class, args, LOG); - amArgs = new SliderAMArgs(args); - amArgs.parse(); - try { - ServiceMaster serviceMaster = new ServiceMaster("Service Master"); - ShutdownHookManager.get() - .addShutdownHook(new CompositeServiceShutdownHook(serviceMaster), 30); - YarnConfiguration conf = new YarnConfiguration(); - new GenericOptionsParser(conf, args); - serviceMaster.init(conf); - serviceMaster.start(); - } catch (Throwable t) { - LOG.error("Error starting service master", t); - ExitUtil.terminate(1, "Error starting service master"); - } - } -} --------------------------------------------------------------------- To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org For additional commands, e-mail: common-commits-help@hadoop.apache.org