Return-Path: X-Original-To: apmail-helix-commits-archive@minotaur.apache.org Delivered-To: apmail-helix-commits-archive@minotaur.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 65B7F102E8 for ; Wed, 21 Aug 2013 20:44:11 +0000 (UTC) Received: (qmail 71677 invoked by uid 500); 21 Aug 2013 20:44:11 -0000 Delivered-To: apmail-helix-commits-archive@helix.apache.org Received: (qmail 71639 invoked by uid 500); 21 Aug 2013 20:44:11 -0000 Mailing-List: contact commits-help@helix.incubator.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@helix.incubator.apache.org Delivered-To: mailing list commits@helix.incubator.apache.org Received: (qmail 71584 invoked by uid 99); 21 Aug 2013 20:44:11 -0000 Received: from athena.apache.org (HELO athena.apache.org) (140.211.11.136) by apache.org (qpsmtpd/0.29) with ESMTP; Wed, 21 Aug 2013 20:44:11 +0000 X-ASF-Spam-Status: No, hits=-2002.8 required=5.0 tests=ALL_TRUSTED,RP_MATCHES_RCVD X-Spam-Check-By: apache.org Received: from [140.211.11.3] (HELO mail.apache.org) (140.211.11.3) by apache.org (qpsmtpd/0.29) with SMTP; Wed, 21 Aug 2013 20:43:42 +0000 Received: (qmail 66118 invoked by uid 99); 21 Aug 2013 20:43:15 -0000 Received: from tyr.zones.apache.org (HELO tyr.zones.apache.org) (140.211.11.114) by apache.org (qpsmtpd/0.29) with ESMTP; Wed, 21 Aug 2013 20:43:15 +0000 Received: by tyr.zones.apache.org (Postfix, from userid 65534) id 6F0B78C1F9D; Wed, 21 Aug 2013 20:43:15 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: zzhang@apache.org To: commits@helix.incubator.apache.org Date: Wed, 21 Aug 2013 20:43:37 -0000 Message-Id: In-Reply-To: References: X-Mailer: ASF-Git Admin Mailer Subject: [24/51] [partial] [HELIX-198] Unify helix code style, rb=13710 X-Virus-Checked: Checked by ClamAV on apache.org http://git-wip-us.apache.org/repos/asf/incubator-helix/blob/f414aad4/helix-core/src/main/java/org/apache/helix/tools/StateModelConfigGenerator.java ---------------------------------------------------------------------- diff --git a/helix-core/src/main/java/org/apache/helix/tools/StateModelConfigGenerator.java b/helix-core/src/main/java/org/apache/helix/tools/StateModelConfigGenerator.java index 705f5bd..508e447 100644 --- a/helix-core/src/main/java/org/apache/helix/tools/StateModelConfigGenerator.java +++ b/helix-core/src/main/java/org/apache/helix/tools/StateModelConfigGenerator.java @@ -31,13 +31,10 @@ import org.apache.helix.model.Transition; import org.apache.helix.model.StateModelDefinition.StateModelDefinitionProperty; import org.apache.helix.model.builder.StateTransitionTableBuilder; - // TODO refactor to use StateModelDefinition.Builder -public class StateModelConfigGenerator -{ +public class StateModelConfigGenerator { - public static void main(String[] args) - { + public static void main(String[] args) { ZNRecordSerializer serializer = new ZNRecordSerializer(); StateModelConfigGenerator generator = new StateModelConfigGenerator(); System.out.println(new String(serializer.serialize(generator.generateConfigForMasterSlave()))); @@ -50,62 +47,48 @@ public class StateModelConfigGenerator * to last state */ - public static ZNRecord generateConfigForStorageSchemata() - { + public static ZNRecord generateConfigForStorageSchemata() { ZNRecord record = new ZNRecord("STORAGE_DEFAULT_SM_SCHEMATA"); - record.setSimpleField(StateModelDefinitionProperty.INITIAL_STATE.toString(), - "OFFLINE"); + record.setSimpleField(StateModelDefinitionProperty.INITIAL_STATE.toString(), "OFFLINE"); List statePriorityList = new ArrayList(); statePriorityList.add("MASTER"); statePriorityList.add("OFFLINE"); statePriorityList.add("DROPPED"); statePriorityList.add("ERROR"); record.setListField(StateModelDefinitionProperty.STATE_PRIORITY_LIST.toString(), - statePriorityList); - for (String state : statePriorityList) - { + statePriorityList); + for (String state : statePriorityList) { String key = state + ".meta"; Map metadata = new HashMap(); - if (state.equals("MASTER")) - { + if (state.equals("MASTER")) { metadata.put("count", "N"); record.setMapField(key, metadata); - } - else if (state.equals("OFFLINE")) - { + } else if (state.equals("OFFLINE")) { metadata.put("count", "-1"); record.setMapField(key, metadata); - } - else if (state.equals("DROPPED")) - { + } else if (state.equals("DROPPED")) { metadata.put("count", "-1"); record.setMapField(key, metadata); - } - else if (state.equals("ERROR")) - { + } else if (state.equals("ERROR")) { metadata.put("count", "-1"); record.setMapField(key, metadata); } } - for (String state : statePriorityList) - { + for (String state : statePriorityList) { String key = state + ".next"; - if (state.equals("MASTER")) - { + if (state.equals("MASTER")) { Map metadata = new HashMap(); metadata.put("OFFLINE", "OFFLINE"); metadata.put("DROPPED", "OFFLINE"); record.setMapField(key, metadata); } - if (state.equals("OFFLINE")) - { + if (state.equals("OFFLINE")) { Map metadata = new HashMap(); metadata.put("MASTER", "MASTER"); metadata.put("DROPPED", "DROPPED"); record.setMapField(key, metadata); } - if (state.equals("ERROR")) - { + if (state.equals("ERROR")) { Map metadata = new HashMap(); metadata.put("OFFLINE", "OFFLINE"); record.setMapField(key, metadata); @@ -115,15 +98,13 @@ public class StateModelConfigGenerator stateTransitionPriorityList.add("MASTER-OFFLINE"); stateTransitionPriorityList.add("OFFLINE-MASTER"); record.setListField(StateModelDefinitionProperty.STATE_TRANSITION_PRIORITYLIST.toString(), - stateTransitionPriorityList); + stateTransitionPriorityList); return record; } - public static ZNRecord generateConfigForMasterSlave() - { + public static ZNRecord generateConfigForMasterSlave() { ZNRecord record = new ZNRecord("MasterSlave"); - record.setSimpleField(StateModelDefinitionProperty.INITIAL_STATE.toString(), - "OFFLINE"); + record.setSimpleField(StateModelDefinitionProperty.INITIAL_STATE.toString(), "OFFLINE"); List statePriorityList = new ArrayList(); statePriorityList.add("MASTER"); statePriorityList.add("SLAVE"); @@ -131,66 +112,48 @@ public class StateModelConfigGenerator statePriorityList.add("DROPPED"); statePriorityList.add("ERROR"); record.setListField(StateModelDefinitionProperty.STATE_PRIORITY_LIST.toString(), - statePriorityList); - for (String state : statePriorityList) - { + statePriorityList); + for (String state : statePriorityList) { String key = state + ".meta"; Map metadata = new HashMap(); - if (state.equals("MASTER")) - { + if (state.equals("MASTER")) { metadata.put("count", "1"); record.setMapField(key, metadata); - } - else if (state.equals("SLAVE")) - { + } else if (state.equals("SLAVE")) { metadata.put("count", "R"); record.setMapField(key, metadata); - } - else if (state.equals("OFFLINE")) - { + } else if (state.equals("OFFLINE")) { metadata.put("count", "-1"); record.setMapField(key, metadata); - } - else if (state.equals("DROPPED")) - { + } else if (state.equals("DROPPED")) { metadata.put("count", "-1"); record.setMapField(key, metadata); - } - else if (state.equals("ERROR")) - { + } else if (state.equals("ERROR")) { metadata.put("count", "-1"); record.setMapField(key, metadata); } } - for (String state : statePriorityList) - { + for (String state : statePriorityList) { String key = state + ".next"; - if (state.equals("MASTER")) - { + if (state.equals("MASTER")) { Map metadata = new HashMap(); metadata.put("SLAVE", "SLAVE"); metadata.put("OFFLINE", "SLAVE"); metadata.put("DROPPED", "SLAVE"); record.setMapField(key, metadata); - } - else if (state.equals("SLAVE")) - { + } else if (state.equals("SLAVE")) { Map metadata = new HashMap(); metadata.put("MASTER", "MASTER"); metadata.put("OFFLINE", "OFFLINE"); metadata.put("DROPPED", "OFFLINE"); record.setMapField(key, metadata); - } - else if (state.equals("OFFLINE")) - { + } else if (state.equals("OFFLINE")) { Map metadata = new HashMap(); metadata.put("SLAVE", "SLAVE"); metadata.put("MASTER", "SLAVE"); metadata.put("DROPPED", "DROPPED"); record.setMapField(key, metadata); - } - else if (state.equals("ERROR")) - { + } else if (state.equals("ERROR")) { Map metadata = new HashMap(); metadata.put("OFFLINE", "OFFLINE"); record.setMapField(key, metadata); @@ -203,72 +166,61 @@ public class StateModelConfigGenerator stateTransitionPriorityList.add("SLAVE-OFFLINE"); stateTransitionPriorityList.add("OFFLINE-DROPPED"); record.setListField(StateModelDefinitionProperty.STATE_TRANSITION_PRIORITYLIST.toString(), - stateTransitionPriorityList); + stateTransitionPriorityList); return record; // ZNRecordSerializer serializer = new ZNRecordSerializer(); // System.out.println(new String(serializer.serialize(record))); } - public static ZNRecord generateConfigForLeaderStandby() - { + public static ZNRecord generateConfigForLeaderStandby() { ZNRecord record = new ZNRecord("LeaderStandby"); - record.setSimpleField(StateModelDefinitionProperty.INITIAL_STATE.toString(), - "OFFLINE"); + record.setSimpleField(StateModelDefinitionProperty.INITIAL_STATE.toString(), "OFFLINE"); List statePriorityList = new ArrayList(); statePriorityList.add("LEADER"); statePriorityList.add("STANDBY"); statePriorityList.add("OFFLINE"); statePriorityList.add("DROPPED"); record.setListField(StateModelDefinitionProperty.STATE_PRIORITY_LIST.toString(), - statePriorityList); - for (String state : statePriorityList) - { + statePriorityList); + for (String state : statePriorityList) { String key = state + ".meta"; Map metadata = new HashMap(); - if (state.equals("LEADER")) - { + if (state.equals("LEADER")) { metadata.put("count", "1"); record.setMapField(key, metadata); } - if (state.equals("STANDBY")) - { + if (state.equals("STANDBY")) { metadata.put("count", "R"); record.setMapField(key, metadata); } - if (state.equals("OFFLINE")) - { + if (state.equals("OFFLINE")) { metadata.put("count", "-1"); record.setMapField(key, metadata); } - if (state.equals("DROPPED")) - { + if (state.equals("DROPPED")) { metadata.put("count", "-1"); record.setMapField(key, metadata); } } - for (String state : statePriorityList) - { + for (String state : statePriorityList) { String key = state + ".next"; - if (state.equals("LEADER")) - { + if (state.equals("LEADER")) { Map metadata = new HashMap(); metadata.put("STANDBY", "STANDBY"); metadata.put("OFFLINE", "STANDBY"); metadata.put("DROPPED", "STANDBY"); record.setMapField(key, metadata); } - if (state.equals("STANDBY")) - { + if (state.equals("STANDBY")) { Map metadata = new HashMap(); metadata.put("LEADER", "LEADER"); metadata.put("OFFLINE", "OFFLINE"); metadata.put("DROPPED", "OFFLINE"); record.setMapField(key, metadata); } - if (state.equals("OFFLINE")) - { + if (state.equals("OFFLINE")) { Map metadata = new HashMap(); metadata.put("STANDBY", "STANDBY"); metadata.put("LEADER", "STANDBY"); @@ -285,56 +237,47 @@ public class StateModelConfigGenerator stateTransitionPriorityList.add("OFFLINE-DROPPED"); record.setListField(StateModelDefinitionProperty.STATE_TRANSITION_PRIORITYLIST.toString(), - stateTransitionPriorityList); + stateTransitionPriorityList); return record; // ZNRecordSerializer serializer = new ZNRecordSerializer(); // System.out.println(new String(serializer.serialize(record))); } - public static ZNRecord generateConfigForOnlineOffline() - { + public static ZNRecord generateConfigForOnlineOffline() { ZNRecord record = new ZNRecord("OnlineOffline"); - record.setSimpleField(StateModelDefinitionProperty.INITIAL_STATE.toString(), - "OFFLINE"); + record.setSimpleField(StateModelDefinitionProperty.INITIAL_STATE.toString(), "OFFLINE"); List statePriorityList = new ArrayList(); statePriorityList.add("ONLINE"); statePriorityList.add("OFFLINE"); statePriorityList.add("DROPPED"); record.setListField(StateModelDefinitionProperty.STATE_PRIORITY_LIST.toString(), - statePriorityList); - for (String state : statePriorityList) - { + statePriorityList); + for (String state : statePriorityList) { String key = state + ".meta"; Map metadata = new HashMap(); - if (state.equals("ONLINE")) - { + if (state.equals("ONLINE")) { metadata.put("count", "R"); record.setMapField(key, metadata); } - if (state.equals("OFFLINE")) - { + if (state.equals("OFFLINE")) { metadata.put("count", "-1"); record.setMapField(key, metadata); } - if (state.equals("DROPPED")) - { + if (state.equals("DROPPED")) { metadata.put("count", "-1"); record.setMapField(key, metadata); } } - for (String state : statePriorityList) - { + for (String state : statePriorityList) { String key = state + ".next"; - if (state.equals("ONLINE")) - { + if (state.equals("ONLINE")) { Map metadata = new HashMap(); metadata.put("OFFLINE", "OFFLINE"); metadata.put("DROPPED", "OFFLINE"); record.setMapField(key, metadata); } - if (state.equals("OFFLINE")) - { + if (state.equals("OFFLINE")) { Map metadata = new HashMap(); metadata.put("ONLINE", "ONLINE"); metadata.put("DROPPED", "DROPPED"); @@ -347,39 +290,33 @@ public class StateModelConfigGenerator stateTransitionPriorityList.add("OFFLINE-DROPPED"); record.setListField(StateModelDefinitionProperty.STATE_TRANSITION_PRIORITYLIST.toString(), - stateTransitionPriorityList); + stateTransitionPriorityList); return record; // ZNRecordSerializer serializer = new ZNRecordSerializer(); // System.out.println(new String(serializer.serialize(record))); } - - public static ZNRecord generateConfigForScheduledTaskQueue() - { + + public static ZNRecord generateConfigForScheduledTaskQueue() { ZNRecord record = new ZNRecord(DefaultSchedulerMessageHandlerFactory.SCHEDULER_TASK_QUEUE); - record.setSimpleField(StateModelDefinitionProperty.INITIAL_STATE.toString(), - "OFFLINE"); + record.setSimpleField(StateModelDefinitionProperty.INITIAL_STATE.toString(), "OFFLINE"); List statePriorityList = new ArrayList(); statePriorityList.add("COMPLETED"); statePriorityList.add("OFFLINE"); statePriorityList.add("DROPPED"); record.setListField(StateModelDefinitionProperty.STATE_PRIORITY_LIST.toString(), - statePriorityList); - for (String state : statePriorityList) - { + statePriorityList); + for (String state : statePriorityList) { String key = state + ".meta"; Map metadata = new HashMap(); - if (state.equals("COMPLETED")) - { + if (state.equals("COMPLETED")) { metadata.put("count", "1"); record.setMapField(key, metadata); } - if (state.equals("OFFLINE")) - { + if (state.equals("OFFLINE")) { metadata.put("count", "-1"); record.setMapField(key, metadata); } - if (state.equals("DROPPED")) - { + if (state.equals("DROPPED")) { metadata.put("count", "-1"); record.setMapField(key, metadata); } @@ -398,8 +335,7 @@ public class StateModelConfigGenerator StateTransitionTableBuilder builder = new StateTransitionTableBuilder(); Map> next = builder.buildTransitionTable(states, transitions); - for (String state : statePriorityList) - { + for (String state : statePriorityList) { String key = state + ".next"; record.setMapField(key, next.get(state)); } @@ -409,7 +345,7 @@ public class StateModelConfigGenerator stateTransitionPriorityList.add("COMPLETED-DROPPED"); record.setListField(StateModelDefinitionProperty.STATE_TRANSITION_PRIORITYLIST.toString(), - stateTransitionPriorityList); + stateTransitionPriorityList); return record; } } http://git-wip-us.apache.org/repos/asf/incubator-helix/blob/f414aad4/helix-core/src/main/java/org/apache/helix/tools/TestCommand.java ---------------------------------------------------------------------- diff --git a/helix-core/src/main/java/org/apache/helix/tools/TestCommand.java b/helix-core/src/main/java/org/apache/helix/tools/TestCommand.java index 2d5e84d..edf5ff9 100644 --- a/helix-core/src/main/java/org/apache/helix/tools/TestCommand.java +++ b/helix-core/src/main/java/org/apache/helix/tools/TestCommand.java @@ -21,23 +21,19 @@ package org.apache.helix.tools; import org.apache.helix.HelixManager; -public class TestCommand -{ - public enum CommandType - { +public class TestCommand { + public enum CommandType { MODIFY, VERIFY, START, STOP } - public static class NodeOpArg - { + public static class NodeOpArg { public HelixManager _manager; public Thread _thread; - public NodeOpArg(HelixManager manager, Thread thread) - { + public NodeOpArg(HelixManager manager, Thread thread) { _manager = manager; _thread = thread; } @@ -52,56 +48,46 @@ public class TestCommand public long _finishTimestamp; /** - * * @param type * @param arg */ - public TestCommand(CommandType type, ZnodeOpArg arg) - { + public TestCommand(CommandType type, ZnodeOpArg arg) { this(type, new TestTrigger(), arg); } /** - * * @param type * @param trigger * @param arg */ - public TestCommand(CommandType type, TestTrigger trigger, ZnodeOpArg arg) - { + public TestCommand(CommandType type, TestTrigger trigger, ZnodeOpArg arg) { _commandType = type; _trigger = trigger; _znodeOpArg = arg; } /** - * * @param type * @param trigger * @param arg */ - public TestCommand(CommandType type, TestTrigger trigger, NodeOpArg arg) - { + public TestCommand(CommandType type, TestTrigger trigger, NodeOpArg arg) { _commandType = type; _trigger = trigger; _nodeOpArg = arg; } @Override - public String toString() - { + public String toString() { String ret = super.toString().substring(super.toString().lastIndexOf(".") + 1) + " "; - if (_finishTimestamp > 0) - { - ret += "FINISH@" + _finishTimestamp + "-START@" + _startTimestamp - + "=" + (_finishTimestamp - _startTimestamp) + "ms "; + if (_finishTimestamp > 0) { + ret += + "FINISH@" + _finishTimestamp + "-START@" + _startTimestamp + "=" + + (_finishTimestamp - _startTimestamp) + "ms "; } - if (_commandType == CommandType.MODIFY || _commandType == CommandType.VERIFY) - { + if (_commandType == CommandType.MODIFY || _commandType == CommandType.VERIFY) { ret += _commandType.toString() + "|" + _trigger.toString() + "|" + _znodeOpArg.toString(); - } - else if (_commandType == CommandType.START || _commandType == CommandType.STOP) - { + } else if (_commandType == CommandType.START || _commandType == CommandType.STOP) { ret += _commandType.toString() + "|" + _trigger.toString() + "|" + _nodeOpArg.toString(); } http://git-wip-us.apache.org/repos/asf/incubator-helix/blob/f414aad4/helix-core/src/main/java/org/apache/helix/tools/TestExecutor.java ---------------------------------------------------------------------- diff --git a/helix-core/src/main/java/org/apache/helix/tools/TestExecutor.java b/helix-core/src/main/java/org/apache/helix/tools/TestExecutor.java index 137a7b6..724b70c 100644 --- a/helix-core/src/main/java/org/apache/helix/tools/TestExecutor.java +++ b/helix-core/src/main/java/org/apache/helix/tools/TestExecutor.java @@ -42,131 +42,97 @@ import org.apache.helix.tools.TestCommand.CommandType; import org.apache.log4j.Logger; import org.apache.zookeeper.data.Stat; - /** * a test is structured logically as a list of commands a command has three parts: COMMAND * | TRIGGER | ARG'S COMMAND could be: modify, verify, start, stop - * * TRIGGER is optional and consists of start-time, timeout, and expect-value which means * the COMMAND is triggered between [start-time, start-time + timeout] and is triggered * when the value in concern equals to expect-value - * * ARG's format depends on COMMAND if COMMAND is modify/verify, arg is in form of: * in which key is k1 for SIMPLE, k1|index for LIST, and k1|k2 for MAP field * if COMMAND is start/stop, arg is a thread handler - * - * */ -public class TestExecutor -{ +public class TestExecutor { /** * SIMPLE: simple field change LIST: list field change MAP: map field change ZNODE: * entire znode change */ - public enum ZnodePropertyType - { - SIMPLE, LIST, MAP, ZNODE + public enum ZnodePropertyType { + SIMPLE, + LIST, + MAP, + ZNODE } - private enum ZnodeModValueType - { - INVALID, SINGLE_VALUE, LIST_VALUE, MAP_VALUE, ZNODE_VALUE + private enum ZnodeModValueType { + INVALID, + SINGLE_VALUE, + LIST_VALUE, + MAP_VALUE, + ZNODE_VALUE } - private static Logger logger = - Logger.getLogger(TestExecutor.class); - private static final long SLEEP_TIME = 500; // in - // ms + private static Logger logger = Logger.getLogger(TestExecutor.class); + private static final long SLEEP_TIME = 500; // in + // ms - private final static PropertyJsonComparator STRING_COMPARATOR = - new PropertyJsonComparator(String.class); + private final static PropertyJsonComparator STRING_COMPARATOR = + new PropertyJsonComparator(String.class); private final static PropertyJsonSerializer ZNRECORD_SERIALIZER = - new PropertyJsonSerializer(ZNRecord.class); + new PropertyJsonSerializer(ZNRecord.class); - private static ZnodeModValueType getValueType(ZnodePropertyType type, String key) - { + private static ZnodeModValueType getValueType(ZnodePropertyType type, String key) { ZnodeModValueType valueType = ZnodeModValueType.INVALID; - switch (type) - { + switch (type) { case SIMPLE: - if (key == null) - { + if (key == null) { logger.warn("invalid key for simple field: key is null"); - } - else - { + } else { String keyParts[] = key.split("/"); - if (keyParts.length != 1) - { - logger.warn("invalid key for simple field: " + key - + ", expect 1 part: key1 (no slash)"); - } - else - { + if (keyParts.length != 1) { + logger.warn("invalid key for simple field: " + key + ", expect 1 part: key1 (no slash)"); + } else { valueType = ZnodeModValueType.SINGLE_VALUE; } } break; case LIST: - if (key == null) - { + if (key == null) { logger.warn("invalid key for simple field: key is null"); - } - else - { + } else { String keyParts[] = key.split("/"); - if (keyParts.length < 1 || keyParts.length > 2) - { + if (keyParts.length < 1 || keyParts.length > 2) { logger.warn("invalid key for list field: " + key + ", expect 1 or 2 parts: key1 or key1/index)"); - } - else if (keyParts.length == 1) - { + } else if (keyParts.length == 1) { valueType = ZnodeModValueType.LIST_VALUE; - } - else - { - try - { + } else { + try { int index = Integer.parseInt(keyParts[1]); - if (index < 0) - { + if (index < 0) { logger.warn("invalid key for list field: " + key + ", index < 0"); - } - else - { + } else { valueType = ZnodeModValueType.SINGLE_VALUE; } - } - catch (NumberFormatException e) - { - logger.warn("invalid key for list field: " + key - + ", part-2 is NOT an integer"); + } catch (NumberFormatException e) { + logger.warn("invalid key for list field: " + key + ", part-2 is NOT an integer"); } } } break; case MAP: - if (key == null) - { + if (key == null) { logger.warn("invalid key for simple field: key is null"); - } - else - { + } else { String keyParts[] = key.split("/"); - if (keyParts.length < 1 || keyParts.length > 2) - { + if (keyParts.length < 1 || keyParts.length > 2) { logger.warn("invalid key for map field: " + key + ", expect 1 or 2 parts: key1 or key1/key2)"); - } - else if (keyParts.length == 1) - { + } else if (keyParts.length == 1) { valueType = ZnodeModValueType.MAP_VALUE; - } - else - { + } else { valueType = ZnodeModValueType.SINGLE_VALUE; } } @@ -179,27 +145,22 @@ public class TestExecutor return valueType; } - private static String getSingleValue(ZNRecord record, ZnodePropertyType type, String key) - { - if (record == null || key == null) - { + private static String getSingleValue(ZNRecord record, ZnodePropertyType type, String key) { + if (record == null || key == null) { return null; } String value = null; String keyParts[] = key.split("/"); - switch (type) - { + switch (type) { case SIMPLE: value = record.getSimpleField(key); break; case LIST: List list = record.getListField(keyParts[0]); - if (list == null) - { - logger.warn("invalid key for list field: " + key - + ", map for key part-1 doesn't exist"); + if (list == null) { + logger.warn("invalid key for list field: " + key + ", map for key part-1 doesn't exist"); return null; } int idx = Integer.parseInt(keyParts[1]); @@ -207,10 +168,8 @@ public class TestExecutor break; case MAP: Map map = record.getMapField(keyParts[0]); - if (map == null) - { - logger.warn("invalid key for map field: " + key - + ", map for key part-1 doesn't exist"); + if (map == null) { + logger.warn("invalid key for map field: " + key + ", map for key part-1 doesn't exist"); return null; } value = map.get(keyParts[1]); @@ -222,106 +181,77 @@ public class TestExecutor return value; } - private static List getListValue(ZNRecord record, String key) - { - if (record == null) - { + private static List getListValue(ZNRecord record, String key) { + if (record == null) { return null; } return record.getListField(key); } - private static Map getMapValue(ZNRecord record, String key) - { + private static Map getMapValue(ZNRecord record, String key) { return record.getMapField(key); } // comparator's for single/list/map-value - private static boolean compareSingleValue(String actual, - String expect, - String key, - ZNRecord diff) - { + private static boolean compareSingleValue(String actual, String expect, String key, ZNRecord diff) { boolean ret = (STRING_COMPARATOR.compare(actual, expect) == 0); - if (diff != null) - { + if (diff != null) { diff.setSimpleField(key + "/expect", expect); diff.setSimpleField(key + "/actual", actual); } return ret; } - private static boolean compareListValue(List actualList, - List expectList, - String key, - ZNRecord diff) - { + private static boolean compareListValue(List actualList, List expectList, + String key, ZNRecord diff) { boolean ret = true; - if (actualList == null && expectList == null) - { + if (actualList == null && expectList == null) { ret = true; - } - else if (actualList == null && expectList != null) - { + } else if (actualList == null && expectList != null) { ret = false; - if (diff != null) - { + if (diff != null) { diff.setListField(key + "/expect", expectList); } - } - else if (actualList != null && expectList == null) - { + } else if (actualList != null && expectList == null) { ret = false; - if (diff != null) - { + if (diff != null) { diff.setListField(key + "/actual", actualList); } - } - else - { + } else { Iterator itrActual = actualList.iterator(); Iterator itrExpect = expectList.iterator(); - if (diff != null && diff.getListField(key + "/expect") == null) - { + if (diff != null && diff.getListField(key + "/expect") == null) { diff.setListField(key + "/expect", new ArrayList()); } - if (diff != null && diff.getListField(key + "/actual") == null) - { + if (diff != null && diff.getListField(key + "/actual") == null) { diff.setListField(key + "/actual", new ArrayList()); } - while (itrActual.hasNext() && itrExpect.hasNext()) - { + while (itrActual.hasNext() && itrExpect.hasNext()) { String actual = itrActual.next(); String expect = itrExpect.next(); - if (STRING_COMPARATOR.compare(actual, expect) != 0) - { + if (STRING_COMPARATOR.compare(actual, expect) != 0) { ret = false; - if (diff != null) - { + if (diff != null) { diff.getListField(key + "/expect").add(expect); diff.getListField(key + "/actual").add(actual); } } } - while (itrActual.hasNext()) - { + while (itrActual.hasNext()) { String actual = itrActual.next(); - if (diff != null) - { + if (diff != null) { diff.getListField(key + "/actual").add(actual); } } - while (itrExpect.hasNext()) - { + while (itrExpect.hasNext()) { String expect = itrExpect.next(); - if (diff != null) - { + if (diff != null) { diff.getListField(key + "/expect").add(expect); } } @@ -329,64 +259,43 @@ public class TestExecutor return ret; } - private static void setMapField(ZNRecord record, String key1, String key2, String value) - { - if (record.getMapField(key1) == null) - { + private static void setMapField(ZNRecord record, String key1, String key2, String value) { + if (record.getMapField(key1) == null) { record.setMapField(key1, new TreeMap()); } record.getMapField(key1).put(key2, value); } private static boolean compareMapValue(Map actualMap, - Map expectMap, - String mapKey, - ZNRecord diff) - { + Map expectMap, String mapKey, ZNRecord diff) { boolean ret = true; - if (actualMap == null && expectMap == null) - { + if (actualMap == null && expectMap == null) { ret = true; - } - else if (actualMap == null && expectMap != null) - { + } else if (actualMap == null && expectMap != null) { ret = false; - if (diff != null) - { + if (diff != null) { diff.setMapField(mapKey + "/expect", expectMap); } - } - else if (actualMap != null && expectMap == null) - { + } else if (actualMap != null && expectMap == null) { ret = false; - if (diff != null) - { + if (diff != null) { diff.setMapField(mapKey + "/actual", actualMap); } - } - else - { - for (String key : actualMap.keySet()) - { + } else { + for (String key : actualMap.keySet()) { String actual = actualMap.get(key); - if (!expectMap.containsKey(key)) - { + if (!expectMap.containsKey(key)) { ret = false; - if (diff != null) - { + if (diff != null) { setMapField(diff, mapKey + "/actual", key, actual); } - } - else - { + } else { String expect = expectMap.get(key); - if (STRING_COMPARATOR.compare(actual, expect) != 0) - { + if (STRING_COMPARATOR.compare(actual, expect) != 0) { ret = false; - if (diff != null) - { + if (diff != null) { setMapField(diff, mapKey + "/actual", key, actual); setMapField(diff, mapKey + "/expect", key, expect); } @@ -394,26 +303,19 @@ public class TestExecutor } } - for (String key : expectMap.keySet()) - { + for (String key : expectMap.keySet()) { String expect = expectMap.get(key); - if (!actualMap.containsKey(key)) - { + if (!actualMap.containsKey(key)) { ret = false; - if (diff != null) - { + if (diff != null) { setMapField(diff, mapKey + "/expect", key, expect); } - } - else - { + } else { String actual = actualMap.get(key); - if (STRING_COMPARATOR.compare(actual, expect) != 0) - { + if (STRING_COMPARATOR.compare(actual, expect) != 0) { ret = false; - if (diff != null) - { + if (diff != null) { setMapField(diff, mapKey + "/actual", key, actual); setMapField(diff, mapKey + "/expect", key, expect); } @@ -424,98 +326,66 @@ public class TestExecutor return ret; } - private static void setZNRecord(ZNRecord diff, ZNRecord record, String keySuffix) - { - if (diff == null || record == null) - { + private static void setZNRecord(ZNRecord diff, ZNRecord record, String keySuffix) { + if (diff == null || record == null) { return; } - for (String key : record.getSimpleFields().keySet()) - { + for (String key : record.getSimpleFields().keySet()) { diff.setSimpleField(key + "/" + keySuffix, record.getSimpleField(key)); } - for (String key : record.getListFields().keySet()) - { + for (String key : record.getListFields().keySet()) { diff.setListField(key + "/" + keySuffix, record.getListField(key)); } - for (String key : record.getMapFields().keySet()) - { + for (String key : record.getMapFields().keySet()) { diff.setMapField(key + "/" + keySuffix, record.getMapField(key)); } } - private static boolean compareZnodeValue(ZNRecord actual, ZNRecord expect, ZNRecord diff) - { + private static boolean compareZnodeValue(ZNRecord actual, ZNRecord expect, ZNRecord diff) { boolean ret = true; - if (actual == null && expect == null) - { + if (actual == null && expect == null) { ret = true; - } - else if (actual == null && expect != null) - { + } else if (actual == null && expect != null) { ret = false; - if (diff != null) - { + if (diff != null) { setZNRecord(diff, expect, "expect"); } - } - else if (actual != null && expect == null) - { + } else if (actual != null && expect == null) { ret = false; - if (diff != null) - { + if (diff != null) { setZNRecord(diff, actual, "actual"); } - } - else - { - for (String key : actual.getSimpleFields().keySet()) - { - if (compareSingleValue(actual.getSimpleField(key), - expect.getSimpleField(key), - key, - diff) == false) - { + } else { + for (String key : actual.getSimpleFields().keySet()) { + if (compareSingleValue(actual.getSimpleField(key), expect.getSimpleField(key), key, diff) == false) { ret = false; } } - for (String key : expect.getMapFields().keySet()) - { - if (!actual.getMapFields().containsKey(key)) - { - if (diff != null) - { + for (String key : expect.getMapFields().keySet()) { + if (!actual.getMapFields().containsKey(key)) { + if (diff != null) { ret = false; diff.setMapField(key + "/expect", expect.getMapField(key)); } - } - else - { - if (compareMapValue(actual.getMapField(key), expect.getMapField(key), key, diff) == false) - { + } else { + if (compareMapValue(actual.getMapField(key), expect.getMapField(key), key, diff) == false) { ret = false; } } } - for (String key : actual.getMapFields().keySet()) - { - if (!expect.getMapFields().containsKey(key)) - { - if (diff != null) - { + for (String key : actual.getMapFields().keySet()) { + if (!expect.getMapFields().containsKey(key)) { + if (diff != null) { ret = false; diff.setMapField(key + "/actual", actual.getMapField(key)); } - } - else - { - if (compareMapValue(actual.getMapField(key), expect.getMapField(key), key, diff) == false) - { + } else { + if (compareMapValue(actual.getMapField(key), expect.getMapField(key), key, diff) == false) { ret = false; } } @@ -524,33 +394,25 @@ public class TestExecutor return ret; } - private static void resetZNRecord(ZNRecord record) - { - if (record != null) - { + private static void resetZNRecord(ZNRecord record) { + if (record != null) { record.getSimpleFields().clear(); record.getListFields().clear(); record.getMapFields().clear(); } } - private static boolean isValueExpected(ZNRecord current, - ZnodePropertyType type, - String key, - ZnodeValue expect, - ZNRecord diff) - { + private static boolean isValueExpected(ZNRecord current, ZnodePropertyType type, String key, + ZnodeValue expect, ZNRecord diff) { // expect value = null means not expect any value - if (expect == null) - { + if (expect == null) { return true; } boolean result = false; resetZNRecord(diff); ZnodeModValueType valueType = getValueType(type, key); - switch (valueType) - { + switch (valueType) { case SINGLE_VALUE: String singleValue = getSingleValue(current, type, key); result = compareSingleValue(singleValue, expect._singleValue, key, diff); @@ -574,24 +436,18 @@ public class TestExecutor return result; } - private static void setSingleValue(ZNRecord record, - ZnodePropertyType type, - String key, - String value) - { + private static void setSingleValue(ZNRecord record, ZnodePropertyType type, String key, + String value) { String keyParts[] = key.split("/"); - switch (type) - { + switch (type) { case SIMPLE: record.setSimpleField(key, value); break; case LIST: List list = record.getListField(keyParts[0]); - if (list == null) - { - logger.warn("invalid key for list field: " + key - + ", value for key part-1 doesn't exist"); + if (list == null) { + logger.warn("invalid key for list field: " + key + ", value for key part-1 doesn't exist"); return; } int idx = Integer.parseInt(keyParts[1]); @@ -600,10 +456,8 @@ public class TestExecutor break; case MAP: Map map = record.getMapField(keyParts[0]); - if (map == null) - { - logger.warn("invalid key for map field: " + key - + ", value for key part-1 doesn't exist"); + if (map == null) { + logger.warn("invalid key for map field: " + key + ", value for key part-1 doesn't exist"); return; } map.put(keyParts[1], value); @@ -613,37 +467,28 @@ public class TestExecutor } } - private static void setListValue(ZNRecord record, String key, List value) - { + private static void setListValue(ZNRecord record, String key, List value) { record.setListField(key, value); } - private static void setMapValue(ZNRecord record, String key, Map value) - { + private static void setMapValue(ZNRecord record, String key, Map value) { record.setMapField(key, value); } - private static void removeSingleValue(ZNRecord record, - ZnodePropertyType type, - String key) - { - if (record == null) - { + private static void removeSingleValue(ZNRecord record, ZnodePropertyType type, String key) { + if (record == null) { return; } String keyParts[] = key.split("/"); - switch (type) - { + switch (type) { case SIMPLE: record.getSimpleFields().remove(key); break; case LIST: List list = record.getListField(keyParts[0]); - if (list == null) - { - logger.warn("invalid key for list field: " + key - + ", value for key part-1 doesn't exist"); + if (list == null) { + logger.warn("invalid key for list field: " + key + ", value for key part-1 doesn't exist"); return; } int idx = Integer.parseInt(keyParts[1]); @@ -651,10 +496,8 @@ public class TestExecutor break; case MAP: Map map = record.getMapField(keyParts[0]); - if (map == null) - { - logger.warn("invalid key for map field: " + key - + ", value for key part-1 doesn't exist"); + if (map == null) { + logger.warn("invalid key for map field: " + key + ", value for key part-1 doesn't exist"); return; } map.remove(keyParts[1]); @@ -664,35 +507,25 @@ public class TestExecutor } } - private static void removeListValue(ZNRecord record, String key) - { - if (record == null || record.getListFields() == null) - { + private static void removeListValue(ZNRecord record, String key) { + if (record == null || record.getListFields() == null) { record.getListFields().remove(key); } } - private static void removeMapValue(ZNRecord record, String key) - { + private static void removeMapValue(ZNRecord record, String key) { record.getMapFields().remove(key); } - private static boolean executeVerifier(ZNRecord actual, - TestCommand command, - ZNRecord diff) - { + private static boolean executeVerifier(ZNRecord actual, TestCommand command, ZNRecord diff) { final ZnodeOpArg arg = command._znodeOpArg; final ZnodeValue expectValue = command._trigger._expectValue; - boolean result = - isValueExpected(actual, arg._propertyType, arg._key, expectValue, diff); + boolean result = isValueExpected(actual, arg._propertyType, arg._key, expectValue, diff); String operation = arg._operation; - if (operation.equals("!=")) - { + if (operation.equals("!=")) { result = !result; - } - else if (!operation.equals("==")) - { + } else if (!operation.equals("==")) { logger.warn("fail to execute (unsupport operation=" + operation + "):" + operation); result = false; } @@ -700,11 +533,8 @@ public class TestExecutor return result; } - private static boolean compareAndSetZnode(ZnodeValue expect, - ZnodeOpArg arg, - ZkClient zkClient, - ZNRecord diff) - { + private static boolean compareAndSetZnode(ZnodeValue expect, ZnodeOpArg arg, ZkClient zkClient, + ZNRecord diff) { String path = arg._znodePath; ZnodePropertyType type = arg._propertyType; String key = arg._key; @@ -712,29 +542,20 @@ public class TestExecutor // retry 3 times in case there are write conflicts long backoffTime = 20; // ms - for (int i = 0; i < 3; i++) - { - try - { + for (int i = 0; i < 3; i++) { + try { Stat stat = new Stat(); ZNRecord record = zkClient. readDataAndStat(path, stat, true); - if (isValueExpected(record, type, key, expect, diff)) - { - if (arg._operation.compareTo("+") == 0) - { - if (record == null) - { + if (isValueExpected(record, type, key, expect, diff)) { + if (arg._operation.compareTo("+") == 0) { + if (record == null) { record = new ZNRecord("default"); } ZnodeModValueType valueType = getValueType(arg._propertyType, arg._key); - switch (valueType) - { + switch (valueType) { case SINGLE_VALUE: - setSingleValue(record, - arg._propertyType, - arg._key, - arg._updateValue._singleValue); + setSingleValue(record, arg._propertyType, arg._key, arg._updateValue._singleValue); break; case LIST_VALUE: setListValue(record, arg._key, arg._updateValue._listValue); @@ -745,19 +566,17 @@ public class TestExecutor case ZNODE_VALUE: // deep copy record = - ZNRECORD_SERIALIZER.deserialize(ZNRECORD_SERIALIZER.serialize(arg._updateValue._znodeValue)); + ZNRECORD_SERIALIZER.deserialize(ZNRECORD_SERIALIZER + .serialize(arg._updateValue._znodeValue)); break; case INVALID: break; default: break; } - } - else if (arg._operation.compareTo("-") == 0) - { + } else if (arg._operation.compareTo("-") == 0) { ZnodeModValueType valueType = getValueType(arg._propertyType, arg._key); - switch (valueType) - { + switch (valueType) { case SINGLE_VALUE: removeSingleValue(record, arg._propertyType, arg._key); break; @@ -775,54 +594,36 @@ public class TestExecutor default: break; } - } - else - { + } else { logger.warn("fail to execute (unsupport operation): " + arg._operation); success = false; } - if (success == true) - { - if (record == null) - { + if (success == true) { + if (record == null) { zkClient.delete(path); - } - else - { - try - { + } else { + try { zkClient.createPersistent(path, true); - } - catch (ZkNodeExistsException e) - { + } catch (ZkNodeExistsException e) { // OK } zkClient.writeData(path, record, stat.getVersion()); } return true; - } - else - { + } else { return false; } } - } - catch (ZkBadVersionException e) - { + } catch (ZkBadVersionException e) { // e.printStackTrace(); - } - catch (PropertyStoreException e) - { + } catch (PropertyStoreException e) { // e.printStackTrace(); } - try - { + try { Thread.sleep(backoffTime); - } - catch (InterruptedException e) - { + } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } @@ -832,20 +633,15 @@ public class TestExecutor return false; } - private static class ExecuteCommand implements Runnable - { - private final TestCommand _command; - private final long _startTime; - private final ZkClient _zkClient; - private final CountDownLatch _countDown; + private static class ExecuteCommand implements Runnable { + private final TestCommand _command; + private final long _startTime; + private final ZkClient _zkClient; + private final CountDownLatch _countDown; private final Map _testResults; - public ExecuteCommand(long startTime, - TestCommand command, - CountDownLatch countDown, - ZkClient zkClient, - Map testResults) - { + public ExecuteCommand(long startTime, TestCommand command, CountDownLatch countDown, + ZkClient zkClient, Map testResults) { _startTime = startTime; _command = command; _countDown = countDown; @@ -854,61 +650,46 @@ public class TestExecutor } @Override - public void run() - { + public void run() { boolean result = false; long now = System.currentTimeMillis(); final long timeout = now + _command._trigger._timeout; ZNRecord diff = new ZNRecord("diff"); - try - { - if (now < _startTime) - { + try { + if (now < _startTime) { Thread.sleep(_startTime - now); } - do - { - if (_command._commandType == CommandType.MODIFY) - { + do { + if (_command._commandType == CommandType.MODIFY) { ZnodeOpArg arg = _command._znodeOpArg; final ZnodeValue expectValue = _command._trigger._expectValue; result = compareAndSetZnode(expectValue, arg, _zkClient, diff); // logger.error("result:" + result + ", " + _command); - if (result == true) - { + if (result == true) { _command._finishTimestamp = System.currentTimeMillis(); _testResults.put(_command, true); break; - } - else - { + } else { // logger.error("result:" + result + ", diff:" + diff); } - } - else if (_command._commandType == CommandType.VERIFY) - { + } else if (_command._commandType == CommandType.VERIFY) { ZnodeOpArg arg = _command._znodeOpArg; final String znodePath = arg._znodePath; ZNRecord record = _zkClient. readData(znodePath, true); result = executeVerifier(record, _command, diff); // logger.error("result:" + result + ", " + _command.toString()); - if (result == true) - { + if (result == true) { _command._finishTimestamp = System.currentTimeMillis(); _testResults.put(_command, true); break; - } - else - { + } else { // logger.error("result:" + result + ", diff:" + diff); } - } - else if (_command._commandType == CommandType.START) - { + } else if (_command._commandType == CommandType.START) { // TODO add data trigger for START command Thread thread = _command._nodeOpArg._thread; thread.start(); @@ -918,9 +699,7 @@ public class TestExecutor logger.info("result:" + result + ", " + _command.toString()); _testResults.put(_command, true); break; - } - else if (_command._commandType == CommandType.STOP) - { + } else if (_command._commandType == CommandType.STOP) { // TODO add data trigger for STOP command HelixManager manager = _command._nodeOpArg._manager; manager.disconnect(); @@ -934,9 +713,7 @@ public class TestExecutor logger.info("result:" + result + ", " + _command.toString()); _testResults.put(_command, true); break; - } - else - { + } else { throw new IllegalArgumentException("Unsupport command type (was " + _command._commandType + ")"); } @@ -944,24 +721,17 @@ public class TestExecutor Thread.sleep(SLEEP_TIME); now = System.currentTimeMillis(); - } - while (now <= timeout); - } - catch (Exception e) - { + } while (now <= timeout); + } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); - } - finally - { - if (result == false) - { + } finally { + if (result == false) { _command._finishTimestamp = System.currentTimeMillis(); logger.error("result:" + result + ", diff: " + diff); } _countDown.countDown(); - if (_countDown.getCount() == 0) - { + if (_countDown.getCount() == 0) { if (_zkClient != null && _zkClient.getConnection() != null) { @@ -973,55 +743,44 @@ public class TestExecutor } private static Map executeTestHelper(List commandList, - String zkAddr, - CountDownLatch countDown) - { + String zkAddr, CountDownLatch countDown) { - final Map testResults = - new ConcurrentHashMap(); + final Map testResults = new ConcurrentHashMap(); ZkClient zkClient = null; zkClient = new ZkClient(zkAddr, ZkClient.DEFAULT_CONNECTION_TIMEOUT); zkClient.setZkSerializer(new ZNRecordSerializer()); // sort on trigger's start time, stable sort - Collections.sort(commandList, new Comparator() - { + Collections.sort(commandList, new Comparator() { @Override - public int compare(TestCommand o1, TestCommand o2) - { + public int compare(TestCommand o1, TestCommand o2) { return (int) (o1._trigger._startTime - o2._trigger._startTime); } }); - for (TestCommand command : commandList) - { + for (TestCommand command : commandList) { testResults.put(command, new Boolean(false)); TestTrigger trigger = command._trigger; command._startTimestamp = System.currentTimeMillis() + trigger._startTime; - new Thread(new ExecuteCommand(command._startTimestamp, - command, - countDown, - zkClient, - testResults)).start(); + new Thread(new ExecuteCommand(command._startTimestamp, command, countDown, zkClient, + testResults)).start(); } return testResults; } - public static void executeTestAsync(List commandList, String zkAddr) throws InterruptedException - { + public static void executeTestAsync(List commandList, String zkAddr) + throws InterruptedException { CountDownLatch countDown = new CountDownLatch(commandList.size()); executeTestHelper(commandList, zkAddr, countDown); } - public static Map executeTest(List commandList, - String zkAddr) throws InterruptedException - { + public static Map executeTest(List commandList, String zkAddr) + throws InterruptedException { final CountDownLatch countDown = new CountDownLatch(commandList.size()); - Map testResults = - executeTestHelper(commandList, zkAddr, countDown); + Map testResults = executeTestHelper(commandList, zkAddr, countDown); // TODO add timeout countDown.await(); http://git-wip-us.apache.org/repos/asf/incubator-helix/blob/f414aad4/helix-core/src/main/java/org/apache/helix/tools/TestTrigger.java ---------------------------------------------------------------------- diff --git a/helix-core/src/main/java/org/apache/helix/tools/TestTrigger.java b/helix-core/src/main/java/org/apache/helix/tools/TestTrigger.java index b225a0a..7c5bdf4 100644 --- a/helix-core/src/main/java/org/apache/helix/tools/TestTrigger.java +++ b/helix-core/src/main/java/org/apache/helix/tools/TestTrigger.java @@ -24,20 +24,16 @@ import java.util.Map; import org.apache.helix.ZNRecord; - - -public class TestTrigger -{ +public class TestTrigger { public long _startTime; public long _timeout; public ZnodeValue _expectValue; - + /** * no time or data trigger */ - public TestTrigger() - { - this(0, 0, (ZnodeValue)null); + public TestTrigger() { + this(0, 0, (ZnodeValue) null); } /** @@ -45,17 +41,15 @@ public class TestTrigger * @param startTime * @param timeout */ - public TestTrigger(long startTime) - { - this(startTime, 0, (ZnodeValue)null); + public TestTrigger(long startTime) { + this(startTime, 0, (ZnodeValue) null); } /** * simple field data trigger * @param expect */ - public TestTrigger(long startTime, long timeout, String expect) - { + public TestTrigger(long startTime, long timeout, String expect) { this(startTime, timeout, new ZnodeValue(expect)); } @@ -63,8 +57,7 @@ public class TestTrigger * list field data trigger * @param expect */ - public TestTrigger(long startTime, long timeout, List expect) - { + public TestTrigger(long startTime, long timeout, List expect) { this(startTime, timeout, new ZnodeValue(expect)); } @@ -72,61 +65,53 @@ public class TestTrigger * map field data trigger * @param expect */ - public TestTrigger(long startTime, long timeout, Map expect) - { + public TestTrigger(long startTime, long timeout, Map expect) { this(startTime, timeout, new ZnodeValue(expect)); } - + /** * znode data trigger * @param expect */ - public TestTrigger(long startTime, long timeout, ZNRecord expect) - { + public TestTrigger(long startTime, long timeout, ZNRecord expect) { this(startTime, timeout, new ZnodeValue(expect)); } - + /** - * * @param startTime * @param timeout * @param expect */ - public TestTrigger(long startTime, long timeout, ZnodeValue expect) - { + public TestTrigger(long startTime, long timeout, ZnodeValue expect) { _startTime = startTime; _timeout = timeout; _expectValue = expect; } - + @Override - public String toString() - { + public String toString() { String ret = "<" + _startTime + "~" + _timeout + "ms, " + _expectValue + ">"; return ret; } // TODO temp test; remove it /* - public static void main(String[] args) - { - TestTrigger trigger = new TestTrigger(0, 0, "simpleValue0"); - System.out.println("trigger=" + trigger); - - List list = new ArrayList(); - list.add("listValue1"); - list.add("listValue2"); - trigger = new TestTrigger(0, 0, list); - System.out.println("trigger=" + trigger); - - Map map = new HashMap(); - map.put("mapKey3", "mapValue3"); - map.put("mapKey4", "mapValue4"); - trigger = new TestTrigger(0, 0, map); - System.out.println("trigger=" + trigger); - - trigger = new TestTrigger(); - System.out.println("trigger=" + trigger); - } - */ + * public static void main(String[] args) + * { + * TestTrigger trigger = new TestTrigger(0, 0, "simpleValue0"); + * System.out.println("trigger=" + trigger); + * List list = new ArrayList(); + * list.add("listValue1"); + * list.add("listValue2"); + * trigger = new TestTrigger(0, 0, list); + * System.out.println("trigger=" + trigger); + * Map map = new HashMap(); + * map.put("mapKey3", "mapValue3"); + * map.put("mapKey4", "mapValue4"); + * trigger = new TestTrigger(0, 0, map); + * System.out.println("trigger=" + trigger); + * trigger = new TestTrigger(); + * System.out.println("trigger=" + trigger); + * } + */ } http://git-wip-us.apache.org/repos/asf/incubator-helix/blob/f414aad4/helix-core/src/main/java/org/apache/helix/tools/YAISCalculator.java ---------------------------------------------------------------------- diff --git a/helix-core/src/main/java/org/apache/helix/tools/YAISCalculator.java b/helix-core/src/main/java/org/apache/helix/tools/YAISCalculator.java index fdfba33..4292baa 100644 --- a/helix-core/src/main/java/org/apache/helix/tools/YAISCalculator.java +++ b/helix-core/src/main/java/org/apache/helix/tools/YAISCalculator.java @@ -25,51 +25,40 @@ import java.util.LinkedList; import java.util.List; import java.util.Random; -public class YAISCalculator -{ - static class Assignment - { +public class YAISCalculator { + static class Assignment { private final int numNodes; private final int replication; Partition[] partitions; Node[] nodes; - public Assignment(int numNodes, int numPartitions, int replication) - { + public Assignment(int numNodes, int numPartitions, int replication) { this.numNodes = numNodes; this.replication = replication; partitions = new Partition[numPartitions]; - for (int i = 0; i < numPartitions; i++) - { + for (int i = 0; i < numPartitions; i++) { partitions[i] = new Partition(i, replication); } nodes = new Node[numNodes]; - for (int i = 0; i < numNodes; i++) - { + for (int i = 0; i < numNodes; i++) { nodes[i] = new Node(replication); } } - public void assign(int partitionId, int replicaId, int nodeId) - { - System.out.println("Assigning (" + partitionId + "," + replicaId - + ") to " + nodeId); + public void assign(int partitionId, int replicaId, int nodeId) { + System.out.println("Assigning (" + partitionId + "," + replicaId + ") to " + nodeId); partitions[partitionId].nodeIds[replicaId] = nodeId; nodes[nodeId].partitionLists.get(replicaId).push(partitionId); } - public void unassign(int partitionId, int replicaId) - { + public void unassign(int partitionId, int replicaId) { } - Integer[] getPartitionsPerNode(int nodeId, int replicaId) - { + Integer[] getPartitionsPerNode(int nodeId, int replicaId) { List partitionsList = new ArrayList(); - for (Partition p : partitions) - { - if (p.nodeIds[replicaId] == nodeId) - { + for (Partition p : partitions) { + if (p.nodeIds[replicaId] == nodeId) { partitionsList.add(p.partionId); } } @@ -78,18 +67,14 @@ public class YAISCalculator return array; } - public void printPerNode() - { - for (int nodeId = 0; nodeId < numNodes; nodeId++) - { - for (int r = 0; r < replication; r++) - { + public void printPerNode() { + for (int nodeId = 0; nodeId < numNodes; nodeId++) { + for (int r = 0; r < replication; r++) { StringBuilder sb = new StringBuilder(); sb.append("(").append(nodeId).append(",").append(r).append("):\t"); Node node = nodes[nodeId]; LinkedList linkedList = node.partitionLists.get(r); - for (int partitionId : linkedList) - { + for (int partitionId : linkedList) { sb.append(partitionId).append(","); } System.out.println(sb.toString()); @@ -99,13 +84,11 @@ public class YAISCalculator } } - static class Partition - { + static class Partition { final int partionId; - public Partition(int partionId, int replication) - { + public Partition(int partionId, int replication) { this.partionId = partionId; nodeIds = new int[replication]; Arrays.fill(nodeIds, -1); @@ -114,49 +97,41 @@ public class YAISCalculator int nodeIds[]; } - static class Node - { + static class Node { private final int replication; ArrayList> partitionLists; - public Node(int replication) - { + public Node(int replication) { this.replication = replication; partitionLists = new ArrayList>(replication); - for (int i = 0; i < replication; i++) - { + for (int i = 0; i < replication; i++) { partitionLists.add(new LinkedList()); } } } - public static void main(String[] args) - { - doAssignment(new int[] - { 5 }, 120, 3); + public static void main(String[] args) { + doAssignment(new int[] { + 5 + }, 120, 3); } - private static void doAssignment(int[] nodes, int partitions, int replication) - { + private static void doAssignment(int[] nodes, int partitions, int replication) { int N = nodes[0]; int totalNodes = 0; - for (int temp : nodes) - { + for (int temp : nodes) { totalNodes += temp; } Assignment assignment = new Assignment(totalNodes, partitions, replication); int nodeId = 0; - for (int i = 0; i < partitions; i++) - { + for (int i = 0; i < partitions; i++) { assignment.assign(i, 0, nodeId); nodeId = (nodeId + 1) % N; } Random random = new Random(); - for (int r = 1; r < replication; r++) - { - for (int id = 0; id < N; id++) - { + for (int r = 1; r < replication; r++) { + for (int id = 0; id < N; id++) { Integer[] partitionsPerNode = assignment.getPartitionsPerNode(id, 0); boolean[] used = new boolean[partitionsPerNode.length]; Arrays.fill(used, false); @@ -164,16 +139,12 @@ public class YAISCalculator nodeId = (id + r) % N; int count = partitionsPerNode.length; boolean done = false; - do - { - if (nodeId != id) - { + do { + if (nodeId != id) { int nextInt = random.nextInt(count); int temp = 0; - for (int b = 0; b < used.length; b++) - { - if (!used[b] && temp == nextInt) - { + for (int b = 0; b < used.length; b++) { + if (!used[b] && temp == nextInt) { assignment.assign(partitionsPerNode[b], r, nodeId); used[b] = true; break; @@ -185,16 +156,13 @@ public class YAISCalculator } } - if (nodes.length > 1) - { + if (nodes.length > 1) { int prevNodeCount = nodes[0]; - for (int i = 1; i < nodes.length; i++) - { + for (int i = 1; i < nodes.length; i++) { int newNodeCount = prevNodeCount + nodes[i]; - int masterPartitionsToMove = (int) ((partitions * 1.0 / prevNodeCount - partitions - * 1.0 / newNodeCount) * 1 * prevNodeCount); - while (masterPartitionsToMove > 0) - { + int masterPartitionsToMove = + (int) ((partitions * 1.0 / prevNodeCount - partitions * 1.0 / newNodeCount) * 1 * prevNodeCount); + while (masterPartitionsToMove > 0) { } http://git-wip-us.apache.org/repos/asf/incubator-helix/blob/f414aad4/helix-core/src/main/java/org/apache/helix/tools/ZKDumper.java ---------------------------------------------------------------------- diff --git a/helix-core/src/main/java/org/apache/helix/tools/ZKDumper.java b/helix-core/src/main/java/org/apache/helix/tools/ZKDumper.java index 3d6c52f..729ccad 100644 --- a/helix-core/src/main/java/org/apache/helix/tools/ZKDumper.java +++ b/helix-core/src/main/java/org/apache/helix/tools/ZKDumper.java @@ -42,8 +42,6 @@ import org.apache.helix.manager.zk.ZkClient; /** * Dumps the Zookeeper file structure on to Disk - * - * */ @SuppressWarnings("static-access") public class ZKDumper { @@ -74,24 +72,28 @@ public class ZKDumper { options = new Options(); OptionGroup optionGroup = new OptionGroup(); - Option d = OptionBuilder.withLongOpt("download") - .withDescription("Download from ZK to File System").create(); + Option d = + OptionBuilder.withLongOpt("download").withDescription("Download from ZK to File System") + .create(); d.setArgs(0); - Option dSuffix = OptionBuilder.withLongOpt("addSuffix") - .withDescription("add suffix to every file downloaded from ZK").create(); + Option dSuffix = + OptionBuilder.withLongOpt("addSuffix") + .withDescription("add suffix to every file downloaded from ZK").create(); dSuffix.setArgs(1); dSuffix.setRequired(false); - Option u = OptionBuilder.withLongOpt("upload").withDescription("Upload from File System to ZK") - .create(); + Option u = + OptionBuilder.withLongOpt("upload").withDescription("Upload from File System to ZK") + .create(); u.setArgs(0); - Option uSuffix = OptionBuilder.withLongOpt("removeSuffix") - .withDescription("remove suffix from every file uploaded to ZK").create(); + Option uSuffix = + OptionBuilder.withLongOpt("removeSuffix") + .withDescription("remove suffix from every file uploaded to ZK").create(); uSuffix.setArgs(0); uSuffix.setRequired(false); - Option del = OptionBuilder.withLongOpt("delete").withDescription("Delete given path from ZK") - .create(); + Option del = + OptionBuilder.withLongOpt("delete").withDescription("Delete given path from ZK").create(); optionGroup.setRequired(true); optionGroup.addOption(del); http://git-wip-us.apache.org/repos/asf/incubator-helix/blob/f414aad4/helix-core/src/main/java/org/apache/helix/tools/ZKLogFormatter.java ---------------------------------------------------------------------- diff --git a/helix-core/src/main/java/org/apache/helix/tools/ZKLogFormatter.java b/helix-core/src/main/java/org/apache/helix/tools/ZKLogFormatter.java index cf09ce4..da63b9e 100644 --- a/helix-core/src/main/java/org/apache/helix/tools/ZKLogFormatter.java +++ b/helix-core/src/main/java/org/apache/helix/tools/ZKLogFormatter.java @@ -54,68 +54,58 @@ import org.apache.zookeeper.server.persistence.FileTxnLog; import org.apache.zookeeper.server.util.SerializeUtils; import org.apache.zookeeper.txn.TxnHeader; -public class ZKLogFormatter -{ +public class ZKLogFormatter { private static final Logger LOG = Logger.getLogger(ZKLogFormatter.class); - private static DateFormat dateTimeInstance = DateFormat.getDateTimeInstance( - DateFormat.SHORT, DateFormat.LONG); + private static DateFormat dateTimeInstance = DateFormat.getDateTimeInstance(DateFormat.SHORT, + DateFormat.LONG); private static HexBinaryAdapter adapter = new HexBinaryAdapter(); private static String fieldDelim = ":"; private static String fieldSep = " "; static BufferedWriter bw = null; + /** * @param args */ - public static void main(String[] args) throws Exception - { - if (args.length != 2 && args.length != 3) - { + public static void main(String[] args) throws Exception { + if (args.length != 2 && args.length != 3) { System.err.println("USAGE: LogFormatter log_file"); System.exit(2); } - - if (args.length == 3) - { + + if (args.length == 3) { bw = new BufferedWriter(new FileWriter(new File(args[2]))); } - - if (args[0].equals("log")) - { + + if (args[0].equals("log")) { readTransactionLog(args[1]); - } else if (args[0].equals("snapshot")) - { + } else if (args[0].equals("snapshot")) { readSnapshotLog(args[1]); } - - if (bw != null) - { + + if (bw != null) { bw.close(); } } - private static void readSnapshotLog(String snapshotPath) throws Exception - { + private static void readSnapshotLog(String snapshotPath) throws Exception { FileInputStream fis = new FileInputStream(snapshotPath); BinaryInputArchive ia = BinaryInputArchive.getArchive(fis); Map sessions = new HashMap(); DataTree dt = new DataTree(); FileHeader header = new FileHeader(); header.deserialize(ia, "fileheader"); - if (header.getMagic() != FileSnap.SNAP_MAGIC) - { - throw new IOException("mismatching magic headers " + header.getMagic() - + " != " + FileSnap.SNAP_MAGIC); + if (header.getMagic() != FileSnap.SNAP_MAGIC) { + throw new IOException("mismatching magic headers " + header.getMagic() + " != " + + FileSnap.SNAP_MAGIC); } SerializeUtils.deserializeSnapshot(dt, ia, sessions); - if (bw != null) - { + if (bw != null) { bw.write(sessions.toString()); bw.newLine(); - } else - { - System.out.println(sessions); + } else { + System.out.println(sessions); } traverse(dt, 1, "/"); @@ -124,37 +114,29 @@ public class ZKLogFormatter /* * Level order traversal */ - private static void traverse(DataTree dt, int startId, String startPath) throws Exception - { + private static void traverse(DataTree dt, int startId, String startPath) throws Exception { LinkedList queue = new LinkedList(); queue.add(new Pair(startPath, startId)); - while (!queue.isEmpty()) - { + while (!queue.isEmpty()) { Pair pair = queue.removeFirst(); String path = pair._path; DataNode head = dt.getNode(path); Stat stat = new Stat(); byte[] data = null; - try - { + try { data = dt.getData(path, stat, null); - } catch (NoNodeException e) - { + } catch (NoNodeException e) { e.printStackTrace(); } // print the node format(startId, pair, head, data); Set children = head.getChildren(); - if (children != null) - { - for (String child : children) - { + if (children != null) { + for (String child : children) { String childPath; - if (path.endsWith("/")) - { + if (path.endsWith("/")) { childPath = path + child; - } else - { + } else { childPath = path + "/" + child; } queue.add(new Pair(childPath, startId)); @@ -165,114 +147,99 @@ public class ZKLogFormatter } - static class Pair - { + static class Pair { private final String _path; private final int _parentId; - public Pair(String path, int parentId) - { + public Pair(String path, int parentId) { _path = path; _parentId = parentId; } } - private static void format(int id, Pair pair, DataNode head, byte[] data) throws Exception - { + private static void format(int id, Pair pair, DataNode head, byte[] data) throws Exception { String dataStr = ""; - if (data != null) - { + if (data != null) { dataStr = new String(data).replaceAll("[\\s]+", ""); } StringBuffer sb = new StringBuffer(); - //@formatter:off + // @formatter:off sb.append("id").append(fieldDelim).append(id).append(fieldSep); sb.append("parent").append(fieldDelim).append(pair._parentId).append(fieldSep); sb.append("path").append(fieldDelim).append(pair._path).append(fieldSep); - sb.append("session").append(fieldDelim).append("0x" +Long.toHexString(head.stat.getEphemeralOwner())).append(fieldSep); - sb.append("czxid").append(fieldDelim).append("0x" +Long.toHexString(head.stat.getCzxid())).append(fieldSep); + sb.append("session").append(fieldDelim) + .append("0x" + Long.toHexString(head.stat.getEphemeralOwner())).append(fieldSep); + sb.append("czxid").append(fieldDelim).append("0x" + Long.toHexString(head.stat.getCzxid())) + .append(fieldSep); sb.append("ctime").append(fieldDelim).append(head.stat.getCtime()).append(fieldSep); sb.append("mtime").append(fieldDelim).append(head.stat.getMtime()).append(fieldSep); - sb.append("cmzxid").append(fieldDelim).append("0x" +Long.toHexString(head.stat.getMzxid())).append(fieldSep); - sb.append("pzxid").append(fieldDelim).append("0x" +Long.toHexString(head.stat.getPzxid())).append(fieldSep); + sb.append("cmzxid").append(fieldDelim).append("0x" + Long.toHexString(head.stat.getMzxid())) + .append(fieldSep); + sb.append("pzxid").append(fieldDelim).append("0x" + Long.toHexString(head.stat.getPzxid())) + .append(fieldSep); sb.append("aversion").append(fieldDelim).append(head.stat.getAversion()).append(fieldSep); sb.append("cversion").append(fieldDelim).append(head.stat.getCversion()).append(fieldSep); sb.append("version").append(fieldDelim).append(head.stat.getVersion()).append(fieldSep); sb.append("data").append(fieldDelim).append(dataStr).append(fieldSep); - //@formatter:on + // @formatter:on - if (bw != null) - { + if (bw != null) { bw.write(sb.toString()); bw.newLine(); - } else - { - System.out.println(sb); + } else { + System.out.println(sb); } } - private static void readTransactionLog(String logfilepath) - throws FileNotFoundException, IOException, EOFException - { + private static void readTransactionLog(String logfilepath) throws FileNotFoundException, + IOException, EOFException { FileInputStream fis = new FileInputStream(logfilepath); BinaryInputArchive logStream = BinaryInputArchive.getArchive(fis); FileHeader fhdr = new FileHeader(); fhdr.deserialize(logStream, "fileheader"); - if (fhdr.getMagic() != FileTxnLog.TXNLOG_MAGIC) - { + if (fhdr.getMagic() != FileTxnLog.TXNLOG_MAGIC) { System.err.println("Invalid magic number for " + logfilepath); System.exit(2); } - if (bw != null) - { - bw.write("ZooKeeper Transactional Log File with dbid " - + fhdr.getDbid() + " txnlog format version " + fhdr.getVersion()); + if (bw != null) { + bw.write("ZooKeeper Transactional Log File with dbid " + fhdr.getDbid() + + " txnlog format version " + fhdr.getVersion()); bw.newLine(); - } else - { - System.out.println("ZooKeeper Transactional Log File with dbid " - + fhdr.getDbid() + " txnlog format version " + fhdr.getVersion()); + } else { + System.out.println("ZooKeeper Transactional Log File with dbid " + fhdr.getDbid() + + " txnlog format version " + fhdr.getVersion()); } - int count = 0; - while (true) - { + while (true) { long crcValue; byte[] bytes; - try - { + try { crcValue = logStream.readLong("crcvalue"); bytes = logStream.readBuffer("txnEntry"); - } catch (EOFException e) - { - if (bw != null) - { + } catch (EOFException e) { + if (bw != null) { bw.write("EOF reached after " + count + " txns."); bw.newLine(); - } else - { + } else { System.out.println("EOF reached after " + count + " txns."); } break; } - if (bytes.length == 0) - { + if (bytes.length == 0) { // Since we preallocate, we define EOF to be an // empty transaction - if (bw != null) - { + if (bw != null) { bw.write("EOF reached after " + count + " txns."); bw.newLine(); - } else - { + } else { System.out.println("EOF reached after " + count + " txns."); } @@ -280,26 +247,20 @@ public class ZKLogFormatter } Checksum crc = new Adler32(); crc.update(bytes, 0, bytes.length); - if (crcValue != crc.getValue()) - { - throw new IOException("CRC doesn't match " + crcValue + " vs " - + crc.getValue()); + if (crcValue != crc.getValue()) { + throw new IOException("CRC doesn't match " + crcValue + " vs " + crc.getValue()); } - InputArchive iab = BinaryInputArchive - .getArchive(new ByteArrayInputStream(bytes)); + InputArchive iab = BinaryInputArchive.getArchive(new ByteArrayInputStream(bytes)); TxnHeader hdr = new TxnHeader(); Record txn = SerializeUtils.deserializeTxn(iab, hdr); - if (bw != null) - { + if (bw != null) { bw.write(formatTransaction(hdr, txn)); bw.newLine(); - } else - { + } else { System.out.println(formatTransaction(hdr, txn)); } - if (logStream.readByte("EOR") != 'B') - { + if (logStream.readByte("EOR") != 'B') { LOG.error("Last transaction was partial."); throw new EOFException("Last transaction was partial."); } @@ -307,10 +268,8 @@ public class ZKLogFormatter } } - static String op2String(int op) - { - switch (op) - { + static String op2String(int op) { + switch (op) { case OpCode.notification: return "notification"; case OpCode.create: @@ -344,8 +303,7 @@ public class ZKLogFormatter } } - private static String formatTransaction(TxnHeader header, Record txn) - { + private static String formatTransaction(TxnHeader header, Record txn) { StringBuilder sb = new StringBuilder(); sb.append("time").append(fieldDelim).append(header.getTime()); @@ -355,41 +313,27 @@ public class ZKLogFormatter .append(Long.toHexString(header.getCxid())); sb.append(fieldSep).append("zxid").append(fieldDelim).append("0x") .append(Long.toHexString(header.getZxid())); - sb.append(fieldSep).append("type").append(fieldDelim) - .append(op2String(header.getType())); - if (txn != null) - { - try - { + sb.append(fieldSep).append("type").append(fieldDelim).append(op2String(header.getType())); + if (txn != null) { + try { byte[] data = null; for (PropertyDescriptor pd : Introspector.getBeanInfo(txn.getClass()) - .getPropertyDescriptors()) - { - if (pd.getName().equalsIgnoreCase("data")) - { + .getPropertyDescriptors()) { + if (pd.getName().equalsIgnoreCase("data")) { data = (byte[]) pd.getReadMethod().invoke(txn); continue; } - if (pd.getReadMethod() != null && !"class".equals(pd.getName())) - { - sb.append(fieldSep) - .append(pd.getDisplayName()) - .append(fieldDelim) - .append( - pd.getReadMethod().invoke(txn).toString() - .replaceAll("[\\s]+", "")); + if (pd.getReadMethod() != null && !"class".equals(pd.getName())) { + sb.append(fieldSep).append(pd.getDisplayName()).append(fieldDelim) + .append(pd.getReadMethod().invoke(txn).toString().replaceAll("[\\s]+", "")); } } - if (data != null) - { + if (data != null) { sb.append(fieldSep).append("data").append(fieldDelim) .append(new String(data).replaceAll("[\\s]+", "")); } - } catch (Exception e) - { - LOG.error( - "Error while retrieving bean property values for " + txn.getClass(), - e); + } catch (Exception e) { + LOG.error("Error while retrieving bean property values for " + txn.getClass(), e); } }