Return-Path: X-Original-To: apmail-hadoop-common-commits-archive@www.apache.org Delivered-To: apmail-hadoop-common-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 7F77D184DD for ; Tue, 13 Oct 2015 17:53:39 +0000 (UTC) Received: (qmail 30216 invoked by uid 500); 13 Oct 2015 17:52:51 -0000 Delivered-To: apmail-hadoop-common-commits-archive@hadoop.apache.org Received: (qmail 30088 invoked by uid 500); 13 Oct 2015 17:52:51 -0000 Mailing-List: contact common-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: common-dev@hadoop.apache.org Delivered-To: mailing list common-commits@hadoop.apache.org Received: (qmail 27700 invoked by uid 99); 13 Oct 2015 17:52:50 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 13 Oct 2015 17:52:50 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id F12C3DFBD7; Tue, 13 Oct 2015 17:52:49 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: sjlee@apache.org To: common-commits@hadoop.apache.org Date: Tue, 13 Oct 2015 17:53:26 -0000 Message-Id: <0bd8a9597ab04dcdbfed7dc421b58a7c@git.apache.org> In-Reply-To: References: X-Mailer: ASF-Git Admin Mailer Subject: [38/50] [abbrv] hadoop git commit: YARN-4102. Add a "skip existing table" mode for timeline schema creator (Li Lu via sjlee) YARN-4102. Add a "skip existing table" mode for timeline schema creator (Li Lu via sjlee) Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/23fa8fa2 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/23fa8fa2 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/23fa8fa2 Branch: refs/heads/YARN-2928 Commit: 23fa8fa289f181131398f8875b79bd7b6aff5d5e Parents: c51eceb Author: Sangjin Lee Authored: Fri Sep 11 09:46:13 2015 -0700 Committer: Sangjin Lee Committed: Sat Oct 10 17:05:03 2015 -0700 ---------------------------------------------------------------------- hadoop-yarn-project/CHANGES.txt | 3 + .../storage/TimelineSchemaCreator.java | 81 ++++++++++++++++---- 2 files changed, 69 insertions(+), 15 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hadoop/blob/23fa8fa2/hadoop-yarn-project/CHANGES.txt ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt index 43f615a..302b325 100644 --- a/hadoop-yarn-project/CHANGES.txt +++ b/hadoop-yarn-project/CHANGES.txt @@ -100,6 +100,9 @@ Branch YARN-2928: Timeline Server Next Generation: Phase 1 YARN-3814. REST API implementation for getting raw entities in TimelineReader (Varun Saxena via sjlee) + YARN-4102. Add a "skip existing table" mode for timeline schema creator (Li + Lu via sjlee) + IMPROVEMENTS YARN-3276. Code cleanup for timeline service API records. (Junping Du via http://git-wip-us.apache.org/repos/asf/hadoop/blob/23fa8fa2/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineSchemaCreator.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineSchemaCreator.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineSchemaCreator.java index 5120856..e7e51a7 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineSchemaCreator.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineSchemaCreator.java @@ -54,6 +54,11 @@ public class TimelineSchemaCreator { final static String NAME = TimelineSchemaCreator.class.getSimpleName(); private static final Log LOG = LogFactory.getLog(TimelineSchemaCreator.class); private static final String PHOENIX_OPTION_SHORT = "p"; + private static final String SKIP_EXISTING_TABLE_OPTION_SHORT = "s"; + private static final String APP_TABLE_NAME_SHORT = "a"; + private static final String APP_TO_FLOW_TABLE_NAME_SHORT = "a2f"; + private static final String TTL_OPTION_SHORT = "m"; + private static final String ENTITY_TABLE_NAME_SHORT = "e"; public static void main(String[] args) throws Exception { @@ -66,22 +71,25 @@ public class TimelineSchemaCreator { CommandLine commandLine = parseArgs(otherArgs); // Grab the entityTableName argument - String entityTableName = commandLine.getOptionValue("e"); + String entityTableName + = commandLine.getOptionValue(ENTITY_TABLE_NAME_SHORT); if (StringUtils.isNotBlank(entityTableName)) { hbaseConf.set(EntityTable.TABLE_NAME_CONF_NAME, entityTableName); } - String entityTableTTLMetrics = commandLine.getOptionValue("m"); + String entityTableTTLMetrics = commandLine.getOptionValue(TTL_OPTION_SHORT); if (StringUtils.isNotBlank(entityTableTTLMetrics)) { int metricsTTL = Integer.parseInt(entityTableTTLMetrics); new EntityTable().setMetricsTTL(metricsTTL, hbaseConf); } // Grab the appToflowTableName argument - String appToflowTableName = commandLine.getOptionValue("a2f"); + String appToflowTableName = commandLine.getOptionValue( + APP_TO_FLOW_TABLE_NAME_SHORT); if (StringUtils.isNotBlank(appToflowTableName)) { hbaseConf.set(AppToFlowTable.TABLE_NAME_CONF_NAME, appToflowTableName); } // Grab the applicationTableName argument - String applicationTableName = commandLine.getOptionValue("a"); + String applicationTableName = commandLine.getOptionValue( + APP_TABLE_NAME_SHORT); if (StringUtils.isNotBlank(applicationTableName)) { hbaseConf.set(ApplicationTable.TABLE_NAME_CONF_NAME, applicationTableName); @@ -89,7 +97,13 @@ public class TimelineSchemaCreator { List exceptions = new ArrayList<>(); try { - createAllTables(hbaseConf); + boolean skipExisting + = commandLine.hasOption(SKIP_EXISTING_TABLE_OPTION_SHORT); + if (skipExisting) { + LOG.info("Will skip existing tables and continue on htable creation " + + "exceptions!"); + } + createAllTables(hbaseConf, skipExisting); LOG.info("Successfully created HBase schema. "); } catch (IOException e) { LOG.error("Error in creating hbase tables: " + e.getMessage()); @@ -135,26 +149,39 @@ public class TimelineSchemaCreator { Options options = new Options(); // Input - Option o = new Option("e", "entityTableName", true, "entity table name"); + Option o = new Option(ENTITY_TABLE_NAME_SHORT, "entityTableName", true, + "entity table name"); o.setArgName("entityTableName"); o.setRequired(false); options.addOption(o); - o = new Option("m", "metricsTTL", true, "TTL for metrics column family"); + o = new Option(TTL_OPTION_SHORT, "metricsTTL", true, + "TTL for metrics column family"); o.setArgName("metricsTTL"); o.setRequired(false); options.addOption(o); - o = new Option("a2f", "appToflowTableName", true, "app to flow table name"); + o = new Option(APP_TO_FLOW_TABLE_NAME_SHORT, "appToflowTableName", true, + "app to flow table name"); o.setArgName("appToflowTableName"); - o = new Option("a", "applicationTableName", true, "application table name"); + o.setRequired(false); + options.addOption(o); + + o = new Option(APP_TABLE_NAME_SHORT, "applicationTableName", true, + "application table name"); o.setArgName("applicationTableName"); o.setRequired(false); options.addOption(o); + // Options without an argument + // No need to set arg name since we do not need an argument here o = new Option(PHOENIX_OPTION_SHORT, "usePhoenix", false, "create Phoenix offline aggregation tables"); - // No need to set arg name since we do not need an argument here + o.setRequired(false); + options.addOption(o); + + o = new Option(SKIP_EXISTING_TABLE_OPTION_SHORT, "skipExistingTable", + false, "skip existing Hbase tables and continue to create new tables"); o.setRequired(false); options.addOption(o); @@ -172,8 +199,8 @@ public class TimelineSchemaCreator { return commandLine; } - private static void createAllTables(Configuration hbaseConf) - throws IOException { + private static void createAllTables(Configuration hbaseConf, + boolean skipExisting) throws IOException { Connection conn = null; try { @@ -182,9 +209,33 @@ public class TimelineSchemaCreator { if (admin == null) { throw new IOException("Cannot create table since admin is null"); } - new EntityTable().createTable(admin, hbaseConf); - new AppToFlowTable().createTable(admin, hbaseConf); - new ApplicationTable().createTable(admin, hbaseConf); + try { + new EntityTable().createTable(admin, hbaseConf); + } catch (IOException e) { + if (skipExisting) { + LOG.warn("Skip and continue on: " + e.getMessage()); + } else { + throw e; + } + } + try { + new AppToFlowTable().createTable(admin, hbaseConf); + } catch (IOException e) { + if (skipExisting) { + LOG.warn("Skip and continue on: " + e.getMessage()); + } else { + throw e; + } + } + try { + new ApplicationTable().createTable(admin, hbaseConf); + } catch (IOException e) { + if (skipExisting) { + LOG.warn("Skip and continue on: " + e.getMessage()); + } else { + throw e; + } + } } finally { if (conn != null) { conn.close();