Return-Path: X-Original-To: apmail-hbase-commits-archive@www.apache.org Delivered-To: apmail-hbase-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 79A7D18F64 for ; Mon, 13 Jul 2015 17:30:42 +0000 (UTC) Received: (qmail 3772 invoked by uid 500); 13 Jul 2015 17:26:43 -0000 Delivered-To: apmail-hbase-commits-archive@hbase.apache.org Received: (qmail 97637 invoked by uid 500); 13 Jul 2015 17:26:39 -0000 Mailing-List: contact commits-help@hbase.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@hbase.apache.org Delivered-To: mailing list commits@hbase.apache.org Received: (qmail 91570 invoked by uid 99); 13 Jul 2015 17:04:11 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Mon, 13 Jul 2015 17:04:11 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 0C062E0523; Mon, 13 Jul 2015 17:04:11 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: busbey@apache.org To: commits@hbase.apache.org Date: Mon, 13 Jul 2015 17:04:12 -0000 Message-Id: In-Reply-To: <461378c9ac674167be7c92639f6aeae2@git.apache.org> References: <461378c9ac674167be7c92639f6aeae2@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: [2/8] hbase git commit: Revert "Procedure v2 - Use nonces for double submits from client (Stephen Yuan Jiang)" http://git-wip-us.apache.org/repos/asf/hbase/blob/67404e7e/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ProcedureProtos.java ---------------------------------------------------------------------- diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ProcedureProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ProcedureProtos.java index d9d6ccb..3c7dcdb 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ProcedureProtos.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ProcedureProtos.java @@ -382,34 +382,6 @@ public final class ProcedureProtos { * */ com.google.protobuf.ByteString getStateData(); - - // optional uint64 nonce_group = 13 [default = 0]; - /** - * optional uint64 nonce_group = 13 [default = 0]; - * - *
-     * Nonce to prevent same procedure submit by multiple times
-     * 
- */ - boolean hasNonceGroup(); - /** - * optional uint64 nonce_group = 13 [default = 0]; - * - *
-     * Nonce to prevent same procedure submit by multiple times
-     * 
- */ - long getNonceGroup(); - - // optional uint64 nonce = 14 [default = 0]; - /** - * optional uint64 nonce = 14 [default = 0]; - */ - boolean hasNonce(); - /** - * optional uint64 nonce = 14 [default = 0]; - */ - long getNonce(); } /** * Protobuf type {@code Procedure} @@ -557,16 +529,6 @@ public final class ProcedureProtos { stateData_ = input.readBytes(); break; } - case 104: { - bitField0_ |= 0x00000800; - nonceGroup_ = input.readUInt64(); - break; - } - case 112: { - bitField0_ |= 0x00001000; - nonce_ = input.readUInt64(); - break; - } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { @@ -937,46 +899,6 @@ public final class ProcedureProtos { return stateData_; } - // optional uint64 nonce_group = 13 [default = 0]; - public static final int NONCE_GROUP_FIELD_NUMBER = 13; - private long nonceGroup_; - /** - * optional uint64 nonce_group = 13 [default = 0]; - * - *
-     * Nonce to prevent same procedure submit by multiple times
-     * 
- */ - public boolean hasNonceGroup() { - return ((bitField0_ & 0x00000800) == 0x00000800); - } - /** - * optional uint64 nonce_group = 13 [default = 0]; - * - *
-     * Nonce to prevent same procedure submit by multiple times
-     * 
- */ - public long getNonceGroup() { - return nonceGroup_; - } - - // optional uint64 nonce = 14 [default = 0]; - public static final int NONCE_FIELD_NUMBER = 14; - private long nonce_; - /** - * optional uint64 nonce = 14 [default = 0]; - */ - public boolean hasNonce() { - return ((bitField0_ & 0x00001000) == 0x00001000); - } - /** - * optional uint64 nonce = 14 [default = 0]; - */ - public long getNonce() { - return nonce_; - } - private void initFields() { className_ = ""; parentId_ = 0L; @@ -990,8 +912,6 @@ public final class ProcedureProtos { exception_ = org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.getDefaultInstance(); result_ = com.google.protobuf.ByteString.EMPTY; stateData_ = com.google.protobuf.ByteString.EMPTY; - nonceGroup_ = 0L; - nonce_ = 0L; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { @@ -1061,12 +981,6 @@ public final class ProcedureProtos { if (((bitField0_ & 0x00000400) == 0x00000400)) { output.writeBytes(12, stateData_); } - if (((bitField0_ & 0x00000800) == 0x00000800)) { - output.writeUInt64(13, nonceGroup_); - } - if (((bitField0_ & 0x00001000) == 0x00001000)) { - output.writeUInt64(14, nonce_); - } getUnknownFields().writeTo(output); } @@ -1129,14 +1043,6 @@ public final class ProcedureProtos { size += com.google.protobuf.CodedOutputStream .computeBytesSize(12, stateData_); } - if (((bitField0_ & 0x00000800) == 0x00000800)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(13, nonceGroup_); - } - if (((bitField0_ & 0x00001000) == 0x00001000)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(14, nonce_); - } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; @@ -1217,16 +1123,6 @@ public final class ProcedureProtos { result = result && getStateData() .equals(other.getStateData()); } - result = result && (hasNonceGroup() == other.hasNonceGroup()); - if (hasNonceGroup()) { - result = result && (getNonceGroup() - == other.getNonceGroup()); - } - result = result && (hasNonce() == other.hasNonce()); - if (hasNonce()) { - result = result && (getNonce() - == other.getNonce()); - } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; @@ -1288,14 +1184,6 @@ public final class ProcedureProtos { hash = (37 * hash) + STATE_DATA_FIELD_NUMBER; hash = (53 * hash) + getStateData().hashCode(); } - if (hasNonceGroup()) { - hash = (37 * hash) + NONCE_GROUP_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getNonceGroup()); - } - if (hasNonce()) { - hash = (37 * hash) + NONCE_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getNonce()); - } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; @@ -1439,10 +1327,6 @@ public final class ProcedureProtos { bitField0_ = (bitField0_ & ~0x00000400); stateData_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000800); - nonceGroup_ = 0L; - bitField0_ = (bitField0_ & ~0x00001000); - nonce_ = 0L; - bitField0_ = (bitField0_ & ~0x00002000); return this; } @@ -1524,14 +1408,6 @@ public final class ProcedureProtos { to_bitField0_ |= 0x00000400; } result.stateData_ = stateData_; - if (((from_bitField0_ & 0x00001000) == 0x00001000)) { - to_bitField0_ |= 0x00000800; - } - result.nonceGroup_ = nonceGroup_; - if (((from_bitField0_ & 0x00002000) == 0x00002000)) { - to_bitField0_ |= 0x00001000; - } - result.nonce_ = nonce_; result.bitField0_ = to_bitField0_; onBuilt(); return result; @@ -1595,12 +1471,6 @@ public final class ProcedureProtos { if (other.hasStateData()) { setStateData(other.getStateData()); } - if (other.hasNonceGroup()) { - setNonceGroup(other.getNonceGroup()); - } - if (other.hasNonce()) { - setNonce(other.getNonce()); - } this.mergeUnknownFields(other.getUnknownFields()); return this; } @@ -2404,88 +2274,6 @@ public final class ProcedureProtos { return this; } - // optional uint64 nonce_group = 13 [default = 0]; - private long nonceGroup_ ; - /** - * optional uint64 nonce_group = 13 [default = 0]; - * - *
-       * Nonce to prevent same procedure submit by multiple times
-       * 
- */ - public boolean hasNonceGroup() { - return ((bitField0_ & 0x00001000) == 0x00001000); - } - /** - * optional uint64 nonce_group = 13 [default = 0]; - * - *
-       * Nonce to prevent same procedure submit by multiple times
-       * 
- */ - public long getNonceGroup() { - return nonceGroup_; - } - /** - * optional uint64 nonce_group = 13 [default = 0]; - * - *
-       * Nonce to prevent same procedure submit by multiple times
-       * 
- */ - public Builder setNonceGroup(long value) { - bitField0_ |= 0x00001000; - nonceGroup_ = value; - onChanged(); - return this; - } - /** - * optional uint64 nonce_group = 13 [default = 0]; - * - *
-       * Nonce to prevent same procedure submit by multiple times
-       * 
- */ - public Builder clearNonceGroup() { - bitField0_ = (bitField0_ & ~0x00001000); - nonceGroup_ = 0L; - onChanged(); - return this; - } - - // optional uint64 nonce = 14 [default = 0]; - private long nonce_ ; - /** - * optional uint64 nonce = 14 [default = 0]; - */ - public boolean hasNonce() { - return ((bitField0_ & 0x00002000) == 0x00002000); - } - /** - * optional uint64 nonce = 14 [default = 0]; - */ - public long getNonce() { - return nonce_; - } - /** - * optional uint64 nonce = 14 [default = 0]; - */ - public Builder setNonce(long value) { - bitField0_ |= 0x00002000; - nonce_ = value; - onChanged(); - return this; - } - /** - * optional uint64 nonce = 14 [default = 0]; - */ - public Builder clearNonce() { - bitField0_ = (bitField0_ & ~0x00002000); - nonce_ = 0L; - onChanged(); - return this; - } - // @@protoc_insertion_point(builder_scope:Procedure) } @@ -7336,34 +7124,33 @@ public final class ProcedureProtos { descriptor; static { java.lang.String[] descriptorData = { - "\n\017Procedure.proto\032\023ErrorHandling.proto\"\271" + + "\n\017Procedure.proto\032\023ErrorHandling.proto\"\217" + "\002\n\tProcedure\022\022\n\nclass_name\030\001 \002(\t\022\021\n\tpare" + "nt_id\030\002 \001(\004\022\017\n\007proc_id\030\003 \002(\004\022\022\n\nstart_ti" + "me\030\004 \002(\004\022\r\n\005owner\030\005 \001(\t\022\036\n\005state\030\006 \002(\0162\017" + ".ProcedureState\022\020\n\010stack_id\030\007 \003(\r\022\023\n\013las" + "t_update\030\010 \002(\004\022\017\n\007timeout\030\t \001(\r\022+\n\texcep" + "tion\030\n \001(\0132\030.ForeignExceptionMessage\022\016\n\006" + - "result\030\013 \001(\014\022\022\n\nstate_data\030\014 \001(\014\022\026\n\013nonc" + - "e_group\030\r \001(\004:\0010\022\020\n\005nonce\030\016 \001(\004:\0010\"+\n\027Se" + - "quentialProcedureData\022\020\n\010executed\030\001 \002(\010\"", - "*\n\031StateMachineProcedureData\022\r\n\005state\030\001 " + - "\003(\r\"X\n\022ProcedureWALHeader\022\017\n\007version\030\001 \002" + - "(\r\022\014\n\004type\030\002 \002(\r\022\016\n\006log_id\030\003 \002(\004\022\023\n\013min_" + - "proc_id\030\004 \002(\004\";\n\023ProcedureWALTrailer\022\017\n\007" + - "version\030\001 \002(\r\022\023\n\013tracker_pos\030\002 \002(\004\"\214\001\n\025P" + - "rocedureStoreTracker\0220\n\004node\030\001 \003(\0132\".Pro" + - "cedureStoreTracker.TrackerNode\032A\n\013Tracke" + - "rNode\022\020\n\010start_id\030\001 \002(\004\022\017\n\007updated\030\002 \003(\004" + - "\022\017\n\007deleted\030\003 \003(\004\"\266\001\n\021ProcedureWALEntry\022" + - "%\n\004type\030\001 \002(\0162\027.ProcedureWALEntry.Type\022\035", - "\n\tprocedure\030\002 \003(\0132\n.Procedure\022\017\n\007proc_id" + - "\030\003 \001(\004\"J\n\004Type\022\007\n\003EOF\020\001\022\010\n\004INIT\020\002\022\n\n\006INS" + - "ERT\020\003\022\n\n\006UPDATE\020\004\022\n\n\006DELETE\020\005\022\013\n\007COMPACT" + - "\020\006*p\n\016ProcedureState\022\020\n\014INITIALIZING\020\001\022\014" + - "\n\010RUNNABLE\020\002\022\013\n\007WAITING\020\003\022\023\n\017WAITING_TIM" + - "EOUT\020\004\022\016\n\nROLLEDBACK\020\005\022\014\n\010FINISHED\020\006BE\n*" + - "org.apache.hadoop.hbase.protobuf.generat" + - "edB\017ProcedureProtosH\001\210\001\001\240\001\001" + "result\030\013 \001(\014\022\022\n\nstate_data\030\014 \001(\014\"+\n\027Sequ" + + "entialProcedureData\022\020\n\010executed\030\001 \002(\010\"*\n" + + "\031StateMachineProcedureData\022\r\n\005state\030\001 \003(", + "\r\"X\n\022ProcedureWALHeader\022\017\n\007version\030\001 \002(\r" + + "\022\014\n\004type\030\002 \002(\r\022\016\n\006log_id\030\003 \002(\004\022\023\n\013min_pr" + + "oc_id\030\004 \002(\004\";\n\023ProcedureWALTrailer\022\017\n\007ve" + + "rsion\030\001 \002(\r\022\023\n\013tracker_pos\030\002 \002(\004\"\214\001\n\025Pro" + + "cedureStoreTracker\0220\n\004node\030\001 \003(\0132\".Proce" + + "dureStoreTracker.TrackerNode\032A\n\013TrackerN" + + "ode\022\020\n\010start_id\030\001 \002(\004\022\017\n\007updated\030\002 \003(\004\022\017" + + "\n\007deleted\030\003 \003(\004\"\266\001\n\021ProcedureWALEntry\022%\n" + + "\004type\030\001 \002(\0162\027.ProcedureWALEntry.Type\022\035\n\t" + + "procedure\030\002 \003(\0132\n.Procedure\022\017\n\007proc_id\030\003", + " \001(\004\"J\n\004Type\022\007\n\003EOF\020\001\022\010\n\004INIT\020\002\022\n\n\006INSER" + + "T\020\003\022\n\n\006UPDATE\020\004\022\n\n\006DELETE\020\005\022\013\n\007COMPACT\020\006" + + "*p\n\016ProcedureState\022\020\n\014INITIALIZING\020\001\022\014\n\010" + + "RUNNABLE\020\002\022\013\n\007WAITING\020\003\022\023\n\017WAITING_TIMEO" + + "UT\020\004\022\016\n\nROLLEDBACK\020\005\022\014\n\010FINISHED\020\006BE\n*or" + + "g.apache.hadoop.hbase.protobuf.generated" + + "B\017ProcedureProtosH\001\210\001\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { @@ -7375,7 +7162,7 @@ public final class ProcedureProtos { internal_static_Procedure_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Procedure_descriptor, - new java.lang.String[] { "ClassName", "ParentId", "ProcId", "StartTime", "Owner", "State", "StackId", "LastUpdate", "Timeout", "Exception", "Result", "StateData", "NonceGroup", "Nonce", }); + new java.lang.String[] { "ClassName", "ParentId", "ProcId", "StartTime", "Owner", "State", "StackId", "LastUpdate", "Timeout", "Exception", "Result", "StateData", }); internal_static_SequentialProcedureData_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_SequentialProcedureData_fieldAccessorTable = new http://git-wip-us.apache.org/repos/asf/hbase/blob/67404e7e/hbase-protocol/src/main/protobuf/Master.proto ---------------------------------------------------------------------- diff --git a/hbase-protocol/src/main/protobuf/Master.proto b/hbase-protocol/src/main/protobuf/Master.proto index 469c0a4..0b2e875 100644 --- a/hbase-protocol/src/main/protobuf/Master.proto +++ b/hbase-protocol/src/main/protobuf/Master.proto @@ -36,8 +36,6 @@ import "Quota.proto"; message AddColumnRequest { required TableName table_name = 1; required ColumnFamilySchema column_families = 2; - optional uint64 nonce_group = 3 [default = 0]; - optional uint64 nonce = 4 [default = 0]; } message AddColumnResponse { @@ -46,8 +44,6 @@ message AddColumnResponse { message DeleteColumnRequest { required TableName table_name = 1; required bytes column_name = 2; - optional uint64 nonce_group = 3 [default = 0]; - optional uint64 nonce = 4 [default = 0]; } message DeleteColumnResponse { @@ -56,8 +52,6 @@ message DeleteColumnResponse { message ModifyColumnRequest { required TableName table_name = 1; required ColumnFamilySchema column_families = 2; - optional uint64 nonce_group = 3 [default = 0]; - optional uint64 nonce = 4 [default = 0]; } message ModifyColumnResponse { @@ -112,8 +106,6 @@ message OfflineRegionResponse { message CreateTableRequest { required TableSchema table_schema = 1; repeated bytes split_keys = 2; - optional uint64 nonce_group = 3 [default = 0]; - optional uint64 nonce = 4 [default = 0]; } message CreateTableResponse { @@ -122,8 +114,6 @@ message CreateTableResponse { message DeleteTableRequest { required TableName table_name = 1; - optional uint64 nonce_group = 2 [default = 0]; - optional uint64 nonce = 3 [default = 0]; } message DeleteTableResponse { @@ -133,8 +123,6 @@ message DeleteTableResponse { message TruncateTableRequest { required TableName tableName = 1; optional bool preserveSplits = 2 [default = false]; - optional uint64 nonce_group = 3 [default = 0]; - optional uint64 nonce = 4 [default = 0]; } message TruncateTableResponse { @@ -142,8 +130,6 @@ message TruncateTableResponse { message EnableTableRequest { required TableName table_name = 1; - optional uint64 nonce_group = 2 [default = 0]; - optional uint64 nonce = 3 [default = 0]; } message EnableTableResponse { @@ -152,8 +138,6 @@ message EnableTableResponse { message DisableTableRequest { required TableName table_name = 1; - optional uint64 nonce_group = 2 [default = 0]; - optional uint64 nonce = 3 [default = 0]; } message DisableTableResponse { @@ -163,8 +147,6 @@ message DisableTableResponse { message ModifyTableRequest { required TableName table_name = 1; required TableSchema table_schema = 2; - optional uint64 nonce_group = 3 [default = 0]; - optional uint64 nonce = 4 [default = 0]; } message ModifyTableResponse { http://git-wip-us.apache.org/repos/asf/hbase/blob/67404e7e/hbase-protocol/src/main/protobuf/Procedure.proto ---------------------------------------------------------------------- diff --git a/hbase-protocol/src/main/protobuf/Procedure.proto b/hbase-protocol/src/main/protobuf/Procedure.proto index 46ff967..232c290 100644 --- a/hbase-protocol/src/main/protobuf/Procedure.proto +++ b/hbase-protocol/src/main/protobuf/Procedure.proto @@ -54,10 +54,6 @@ message Procedure { optional ForeignExceptionMessage exception = 10; optional bytes result = 11; // opaque (user) result structure optional bytes state_data = 12; // opaque (user) procedure internal-state - - // Nonce to prevent same procedure submit by multiple times - optional uint64 nonce_group = 13 [default = 0]; - optional uint64 nonce = 14 [default = 0]; } /** http://git-wip-us.apache.org/repos/asf/hbase/blob/67404e7e/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java index bb2470c..3e5d909 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java @@ -305,7 +305,7 @@ public class HMaster extends HRegionServer implements MasterServices, Server { SnapshotManager snapshotManager; // monitor for distributed procedures MasterProcedureManagerHost mpmHost; - + // it is assigned after 'initialized' guard set to true, so should be volatile private volatile MasterQuotaManager quotaManager; @@ -355,7 +355,7 @@ public class HMaster extends HRegionServer implements MasterServices, Server { conf.getLong("hbase.master.buffer.for.rs.fatals", 1*1024*1024)); LOG.info("hbase.rootdir=" + FSUtils.getRootDir(this.conf) + - ", hbase.cluster.distributed=" + this.conf.getBoolean(HConstants.CLUSTER_DISTRIBUTED, false)); + ", hbase.cluster.distributed=" + this.conf.getBoolean(HConstants.CLUSTER_DISTRIBUTED, false)); // Disable usage of meta replicas in the master this.conf.setBoolean(HConstants.USE_META_REPLICAS, false); @@ -1435,11 +1435,8 @@ public class HMaster extends HRegionServer implements MasterServices, Server { } @Override - public long createTable( - final HTableDescriptor hTableDescriptor, - final byte [][] splitKeys, - final long nonceGroup, - final long nonce) throws IOException { + public long createTable(HTableDescriptor hTableDescriptor, + byte [][] splitKeys) throws IOException { if (isStopped()) { throw new MasterNotRunningException(); } @@ -1460,10 +1457,8 @@ public class HMaster extends HRegionServer implements MasterServices, Server { // TableExistsException by saying if the schema is the same or not. ProcedurePrepareLatch latch = ProcedurePrepareLatch.createLatch(); long procId = this.procedureExecutor.submitProcedure( - new CreateTableProcedure( - procedureExecutor.getEnvironment(), hTableDescriptor, newRegions, latch), - nonceGroup, - nonce); + new CreateTableProcedure(procedureExecutor.getEnvironment(), + hTableDescriptor, newRegions, latch)); latch.await(); if (cpHost != null) { @@ -1701,10 +1696,7 @@ public class HMaster extends HRegionServer implements MasterServices, Server { } @Override - public long deleteTable( - final TableName tableName, - final long nonceGroup, - final long nonce) throws IOException { + public long deleteTable(final TableName tableName) throws IOException { checkInitialized(); if (cpHost != null) { cpHost.preDeleteTable(tableName); @@ -1714,9 +1706,7 @@ public class HMaster extends HRegionServer implements MasterServices, Server { // TODO: We can handle/merge duplicate request ProcedurePrepareLatch latch = ProcedurePrepareLatch.createLatch(); long procId = this.procedureExecutor.submitProcedure( - new DeleteTableProcedure(procedureExecutor.getEnvironment(), tableName, latch), - nonceGroup, - nonce); + new DeleteTableProcedure(procedureExecutor.getEnvironment(), tableName, latch)); latch.await(); if (cpHost != null) { @@ -1727,11 +1717,7 @@ public class HMaster extends HRegionServer implements MasterServices, Server { } @Override - public void truncateTable( - final TableName tableName, - final boolean preserveSplits, - final long nonceGroup, - final long nonce) throws IOException { + public void truncateTable(TableName tableName, boolean preserveSplits) throws IOException { checkInitialized(); if (cpHost != null) { cpHost.preTruncateTable(tableName); @@ -1739,9 +1725,7 @@ public class HMaster extends HRegionServer implements MasterServices, Server { LOG.info(getClientIdAuditPrefix() + " truncate " + tableName); long procId = this.procedureExecutor.submitProcedure( - new TruncateTableProcedure(procedureExecutor.getEnvironment(), tableName, preserveSplits), - nonceGroup, - nonce); + new TruncateTableProcedure(procedureExecutor.getEnvironment(), tableName, preserveSplits)); ProcedureSyncWait.waitForProcedureToComplete(procedureExecutor, procId); if (cpHost != null) { @@ -1750,11 +1734,7 @@ public class HMaster extends HRegionServer implements MasterServices, Server { } @Override - public void addColumn( - final TableName tableName, - final HColumnDescriptor columnDescriptor, - final long nonceGroup, - final long nonce) + public void addColumn(final TableName tableName, final HColumnDescriptor columnDescriptor) throws IOException { checkInitialized(); checkCompression(columnDescriptor); @@ -1765,10 +1745,9 @@ public class HMaster extends HRegionServer implements MasterServices, Server { } } // Execute the operation synchronously - wait for the operation to complete before continuing. - long procId = this.procedureExecutor.submitProcedure( - new AddColumnFamilyProcedure(procedureExecutor.getEnvironment(), tableName, columnDescriptor), - nonceGroup, - nonce); + long procId = + this.procedureExecutor.submitProcedure(new AddColumnFamilyProcedure(procedureExecutor + .getEnvironment(), tableName, columnDescriptor)); ProcedureSyncWait.waitForProcedureToComplete(procedureExecutor, procId); if (cpHost != null) { cpHost.postAddColumn(tableName, columnDescriptor); @@ -1776,11 +1755,7 @@ public class HMaster extends HRegionServer implements MasterServices, Server { } @Override - public void modifyColumn( - final TableName tableName, - final HColumnDescriptor descriptor, - final long nonceGroup, - final long nonce) + public void modifyColumn(TableName tableName, HColumnDescriptor descriptor) throws IOException { checkInitialized(); checkCompression(descriptor); @@ -1793,10 +1768,9 @@ public class HMaster extends HRegionServer implements MasterServices, Server { LOG.info(getClientIdAuditPrefix() + " modify " + descriptor); // Execute the operation synchronously - wait for the operation to complete before continuing. - long procId = this.procedureExecutor.submitProcedure( - new ModifyColumnFamilyProcedure(procedureExecutor.getEnvironment(), tableName, descriptor), - nonceGroup, - nonce); + long procId = + this.procedureExecutor.submitProcedure(new ModifyColumnFamilyProcedure(procedureExecutor + .getEnvironment(), tableName, descriptor)); ProcedureSyncWait.waitForProcedureToComplete(procedureExecutor, procId); if (cpHost != null) { @@ -1805,11 +1779,7 @@ public class HMaster extends HRegionServer implements MasterServices, Server { } @Override - public void deleteColumn( - final TableName tableName, - final byte[] columnName, - final long nonceGroup, - final long nonce) + public void deleteColumn(final TableName tableName, final byte[] columnName) throws IOException { checkInitialized(); if (cpHost != null) { @@ -1820,10 +1790,9 @@ public class HMaster extends HRegionServer implements MasterServices, Server { LOG.info(getClientIdAuditPrefix() + " delete " + Bytes.toString(columnName)); // Execute the operation synchronously - wait for the operation to complete before continuing. - long procId = this.procedureExecutor.submitProcedure( - new DeleteColumnFamilyProcedure(procedureExecutor.getEnvironment(), tableName, columnName), - nonceGroup, - nonce); + long procId = + this.procedureExecutor.submitProcedure(new DeleteColumnFamilyProcedure(procedureExecutor + .getEnvironment(), tableName, columnName)); ProcedureSyncWait.waitForProcedureToComplete(procedureExecutor, procId); if (cpHost != null) { @@ -1832,10 +1801,7 @@ public class HMaster extends HRegionServer implements MasterServices, Server { } @Override - public long enableTable( - final TableName tableName, - final long nonceGroup, - final long nonce) throws IOException { + public long enableTable(final TableName tableName) throws IOException { checkInitialized(); if (cpHost != null) { cpHost.preEnableTable(tableName); @@ -1844,10 +1810,9 @@ public class HMaster extends HRegionServer implements MasterServices, Server { // Execute the operation asynchronously - client will check the progress of the operation final ProcedurePrepareLatch prepareLatch = ProcedurePrepareLatch.createLatch(); - long procId = this.procedureExecutor.submitProcedure( - new EnableTableProcedure(procedureExecutor.getEnvironment(), tableName, false, prepareLatch), - nonceGroup, - nonce); + long procId = + this.procedureExecutor.submitProcedure(new EnableTableProcedure(procedureExecutor + .getEnvironment(), tableName, false, prepareLatch)); // Before returning to client, we want to make sure that the table is prepared to be // enabled (the table is locked and the table state is set). // @@ -1862,10 +1827,7 @@ public class HMaster extends HRegionServer implements MasterServices, Server { } @Override - public long disableTable( - final TableName tableName, - final long nonceGroup, - final long nonce) throws IOException { + public long disableTable(final TableName tableName) throws IOException { checkInitialized(); if (cpHost != null) { cpHost.preDisableTable(tableName); @@ -1875,10 +1837,9 @@ public class HMaster extends HRegionServer implements MasterServices, Server { // Execute the operation asynchronously - client will check the progress of the operation final ProcedurePrepareLatch prepareLatch = ProcedurePrepareLatch.createLatch(); // Execute the operation asynchronously - client will check the progress of the operation - long procId = this.procedureExecutor.submitProcedure( - new DisableTableProcedure(procedureExecutor.getEnvironment(), tableName, false, prepareLatch), - nonceGroup, - nonce); + long procId = + this.procedureExecutor.submitProcedure(new DisableTableProcedure(procedureExecutor + .getEnvironment(), tableName, false, prepareLatch)); // Before returning to client, we want to make sure that the table is prepared to be // enabled (the table is locked and the table state is set). // @@ -1929,11 +1890,7 @@ public class HMaster extends HRegionServer implements MasterServices, Server { } @Override - public void modifyTable( - final TableName tableName, - final HTableDescriptor descriptor, - final long nonceGroup, - final long nonce) + public void modifyTable(final TableName tableName, final HTableDescriptor descriptor) throws IOException { checkInitialized(); sanityCheckTableDescriptor(descriptor); @@ -1945,9 +1902,7 @@ public class HMaster extends HRegionServer implements MasterServices, Server { // Execute the operation synchronously - wait for the operation completes before continuing. long procId = this.procedureExecutor.submitProcedure( - new ModifyTableProcedure(procedureExecutor.getEnvironment(), descriptor), - nonceGroup, - nonce); + new ModifyTableProcedure(procedureExecutor.getEnvironment(), descriptor)); ProcedureSyncWait.waitForProcedureToComplete(procedureExecutor, procId); @@ -2102,7 +2057,7 @@ public class HMaster extends HRegionServer implements MasterServices, Server { public MasterCoprocessorHost getMasterCoprocessorHost() { return cpHost; } - + @Override public MasterQuotaManager getMasterQuotaManager() { return quotaManager; http://git-wip-us.apache.org/repos/asf/hbase/blob/67404e7e/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java index fb45090..27fab4f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java @@ -347,11 +347,8 @@ public class MasterRpcServices extends RSRpcServices public AddColumnResponse addColumn(RpcController controller, AddColumnRequest req) throws ServiceException { try { - master.addColumn( - ProtobufUtil.toTableName(req.getTableName()), - HColumnDescriptor.convert(req.getColumnFamilies()), - req.getNonceGroup(), - req.getNonce()); + master.addColumn(ProtobufUtil.toTableName(req.getTableName()), + HColumnDescriptor.convert(req.getColumnFamilies())); } catch (IOException ioe) { throw new ServiceException(ioe); } @@ -418,8 +415,7 @@ public class MasterRpcServices extends RSRpcServices HTableDescriptor hTableDescriptor = HTableDescriptor.convert(req.getTableSchema()); byte [][] splitKeys = ProtobufUtil.getSplitKeysArray(req); try { - long procId = - master.createTable(hTableDescriptor, splitKeys, req.getNonceGroup(), req.getNonce()); + long procId = master.createTable(hTableDescriptor, splitKeys); return CreateTableResponse.newBuilder().setProcId(procId).build(); } catch (IOException ioe) { throw new ServiceException(ioe); @@ -430,11 +426,8 @@ public class MasterRpcServices extends RSRpcServices public DeleteColumnResponse deleteColumn(RpcController controller, DeleteColumnRequest req) throws ServiceException { try { - master.deleteColumn( - ProtobufUtil.toTableName(req.getTableName()), - req.getColumnName().toByteArray(), - req.getNonceGroup(), - req.getNonce()); + master.deleteColumn(ProtobufUtil.toTableName(req.getTableName()), + req.getColumnName().toByteArray()); } catch (IOException ioe) { throw new ServiceException(ioe); } @@ -478,8 +471,7 @@ public class MasterRpcServices extends RSRpcServices public DeleteTableResponse deleteTable(RpcController controller, DeleteTableRequest request) throws ServiceException { try { - long procId = master.deleteTable(ProtobufUtil.toTableName( - request.getTableName()), request.getNonceGroup(), request.getNonce()); + long procId = master.deleteTable(ProtobufUtil.toTableName(request.getTableName())); return DeleteTableResponse.newBuilder().setProcId(procId).build(); } catch (IOException ioe) { throw new ServiceException(ioe); @@ -490,11 +482,8 @@ public class MasterRpcServices extends RSRpcServices public TruncateTableResponse truncateTable(RpcController controller, TruncateTableRequest request) throws ServiceException { try { - master.truncateTable( - ProtobufUtil.toTableName(request.getTableName()), - request.getPreserveSplits(), - request.getNonceGroup(), - request.getNonce()); + master.truncateTable(ProtobufUtil.toTableName(request.getTableName()), + request.getPreserveSplits()); } catch (IOException ioe) { throw new ServiceException(ioe); } @@ -505,10 +494,7 @@ public class MasterRpcServices extends RSRpcServices public DisableTableResponse disableTable(RpcController controller, DisableTableRequest request) throws ServiceException { try { - long procId = master.disableTable( - ProtobufUtil.toTableName(request.getTableName()), - request.getNonceGroup(), - request.getNonce()); + long procId = master.disableTable(ProtobufUtil.toTableName(request.getTableName())); return DisableTableResponse.newBuilder().setProcId(procId).build(); } catch (IOException ioe) { throw new ServiceException(ioe); @@ -594,10 +580,7 @@ public class MasterRpcServices extends RSRpcServices public EnableTableResponse enableTable(RpcController controller, EnableTableRequest request) throws ServiceException { try { - long procId = master.enableTable( - ProtobufUtil.toTableName(request.getTableName()), - request.getNonceGroup(), - request.getNonce()); + long procId = master.enableTable(ProtobufUtil.toTableName(request.getTableName())); return EnableTableResponse.newBuilder().setProcId(procId).build(); } catch (IOException ioe) { throw new ServiceException(ioe); @@ -1067,11 +1050,8 @@ public class MasterRpcServices extends RSRpcServices public ModifyColumnResponse modifyColumn(RpcController controller, ModifyColumnRequest req) throws ServiceException { try { - master.modifyColumn( - ProtobufUtil.toTableName(req.getTableName()), - HColumnDescriptor.convert(req.getColumnFamilies()), - req.getNonceGroup(), - req.getNonce()); + master.modifyColumn(ProtobufUtil.toTableName(req.getTableName()), + HColumnDescriptor.convert(req.getColumnFamilies())); } catch (IOException ioe) { throw new ServiceException(ioe); } @@ -1094,11 +1074,8 @@ public class MasterRpcServices extends RSRpcServices public ModifyTableResponse modifyTable(RpcController controller, ModifyTableRequest req) throws ServiceException { try { - master.modifyTable( - ProtobufUtil.toTableName(req.getTableName()), - HTableDescriptor.convert(req.getTableSchema()), - req.getNonceGroup(), - req.getNonce()); + master.modifyTable(ProtobufUtil.toTableName(req.getTableName()), + HTableDescriptor.convert(req.getTableSchema())); } catch (IOException ioe) { throw new ServiceException(ioe); } http://git-wip-us.apache.org/repos/asf/hbase/blob/67404e7e/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java index c22f0ed..dd64bc8 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java @@ -98,125 +98,75 @@ public interface MasterServices extends Server { * Create a table using the given table definition. * @param desc The table definition * @param splitKeys Starting row keys for the initial table regions. If null - * @param nonceGroup - * @param nonce * a single region is created. */ - long createTable( - final HTableDescriptor desc, - final byte[][] splitKeys, - final long nonceGroup, - final long nonce) throws IOException; + long createTable(HTableDescriptor desc, byte[][] splitKeys) + throws IOException; /** * Delete a table * @param tableName The table name - * @param nonceGroup - * @param nonce * @throws IOException */ - long deleteTable( - final TableName tableName, - final long nonceGroup, - final long nonce) throws IOException; + long deleteTable(final TableName tableName) throws IOException; /** * Truncate a table * @param tableName The table name * @param preserveSplits True if the splits should be preserved - * @param nonceGroup - * @param nonce * @throws IOException */ - public void truncateTable( - final TableName tableName, - final boolean preserveSplits, - final long nonceGroup, - final long nonce) throws IOException; + public void truncateTable(final TableName tableName, boolean preserveSplits) throws IOException; /** * Modify the descriptor of an existing table * @param tableName The table name * @param descriptor The updated table descriptor - * @param nonceGroup - * @param nonce * @throws IOException */ - void modifyTable( - final TableName tableName, - final HTableDescriptor descriptor, - final long nonceGroup, - final long nonce) + void modifyTable(final TableName tableName, final HTableDescriptor descriptor) throws IOException; /** * Enable an existing table * @param tableName The table name - * @param nonceGroup - * @param nonce * @throws IOException */ - long enableTable( - final TableName tableName, - final long nonceGroup, - final long nonce) throws IOException; + long enableTable(final TableName tableName) throws IOException; /** * Disable an existing table * @param tableName The table name - * @param nonceGroup - * @param nonce * @throws IOException */ - long disableTable( - final TableName tableName, - final long nonceGroup, - final long nonce) throws IOException; + long disableTable(final TableName tableName) throws IOException; /** * Add a new column to an existing table * @param tableName The table name * @param column The column definition - * @param nonceGroup - * @param nonce * @throws IOException */ - void addColumn( - final TableName tableName, - final HColumnDescriptor column, - final long nonceGroup, - final long nonce) + void addColumn(final TableName tableName, final HColumnDescriptor column) throws IOException; /** * Modify the column descriptor of an existing column in an existing table * @param tableName The table name * @param descriptor The updated column definition - * @param nonceGroup - * @param nonce * @throws IOException */ - void modifyColumn( - final TableName tableName, - final HColumnDescriptor descriptor, - final long nonceGroup, - final long nonce) + void modifyColumn(TableName tableName, HColumnDescriptor descriptor) throws IOException; /** * Delete a column from an existing table * @param tableName The table name * @param columnName The column name - * @param nonceGroup - * @param nonce * @throws IOException */ - void deleteColumn( - final TableName tableName, - final byte[] columnName, - final long nonceGroup, - final long nonce) + void deleteColumn(final TableName tableName, final byte[] columnName) throws IOException; /** http://git-wip-us.apache.org/repos/asf/hbase/blob/67404e7e/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ServerNonceManager.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ServerNonceManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ServerNonceManager.java index b2b656b..0d974b1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ServerNonceManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ServerNonceManager.java @@ -31,7 +31,6 @@ import org.apache.hadoop.hbase.ScheduledChore; import org.apache.hadoop.hbase.Stoppable; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -import org.apache.hadoop.hbase.util.NonceKey; import com.google.common.annotations.VisibleForTesting; @@ -104,6 +103,38 @@ public class ServerNonceManager { } /** + * This implementation is not smart and just treats nonce group and nonce as random bits. + */ + // TODO: we could use pure byte arrays, but then we wouldn't be able to use hash map. + private static class NonceKey { + private long group; + private long nonce; + + public NonceKey(long group, long nonce) { + assert nonce != HConstants.NO_NONCE; + this.group = group; + this.nonce = nonce; + } + + @Override + public boolean equals(Object obj) { + if (obj == null || !(obj instanceof NonceKey)) return false; + NonceKey nk = ((NonceKey)obj); + return this.nonce == nk.nonce && this.group == nk.group; + } + + @Override + public int hashCode() { + return (int)((group >> 32) ^ group ^ (nonce >> 32) ^ nonce); + } + + @Override + public String toString() { + return "[" + group + ":" + nonce + "]"; + } + } + + /** * Nonces. * Approximate overhead per nonce: 64 bytes from hashmap, 32 from two objects (k/v), * NK: 16 bytes (2 longs), OC: 8 bytes (1 long) - so, 120 bytes. http://git-wip-us.apache.org/repos/asf/hbase/blob/67404e7e/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java index 4ccf4dc..131ff14 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java @@ -136,9 +136,7 @@ public class AccessControlLists { // Set cache data blocks in L1 if more than one cache tier deployed; e.g. this will // be the case if we are using CombinedBlockCache (Bucket Cache). .setCacheDataInL1(true)), - null, - HConstants.NO_NONCE, - HConstants.NO_NONCE); + null); } /** http://git-wip-us.apache.org/repos/asf/hbase/blob/67404e7e/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java index 7d3b82e..442f7ea 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java @@ -198,7 +198,7 @@ public class VisibilityController extends BaseMasterAndRegionObserver implements DisabledRegionSplitPolicy.class.getName()); labelsTable.setValue(Bytes.toBytes(HConstants.DISALLOW_WRITES_IN_RECOVERING), Bytes.toBytes(true)); - master.createTable(labelsTable, null, HConstants.NO_NONCE, HConstants.NO_NONCE); + master.createTable(labelsTable, null); } } http://git-wip-us.apache.org/repos/asf/hbase/blob/67404e7e/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHBaseAdminNoCluster.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHBaseAdminNoCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHBaseAdminNoCluster.java index 19774d7..6672eb2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHBaseAdminNoCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHBaseAdminNoCluster.java @@ -45,7 +45,6 @@ import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest; -import org.junit.Ignore; import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Matchers; @@ -67,10 +66,6 @@ public class TestHBaseAdminNoCluster { * @throws MasterNotRunningException * @throws ServiceException */ - //TODO: Clean up, with Procedure V2 and nonce to prevent the same procedure to call mulitple - // time, this test is invalid anymore. Just keep the test around for some time before - // fully removing it. - @Ignore @Test public void testMasterMonitorCallableRetries() throws MasterNotRunningException, ZooKeeperConnectionException, IOException, ServiceException { http://git-wip-us.apache.org/repos/asf/hbase/blob/67404e7e/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManagerOnCluster.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManagerOnCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManagerOnCluster.java index 343cd4c..29766b3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManagerOnCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManagerOnCluster.java @@ -1154,7 +1154,7 @@ public class TestAssignmentManagerOnCluster { assertEquals(oldServerName, regionStates.getRegionServerOfRegion(hri)); // Disable the table now. - master.disableTable(hri.getTable(), HConstants.NO_NONCE, HConstants.NO_NONCE); + master.disableTable(hri.getTable()); // Kill the hosting server, which doesn't have meta on it. cluster.killRegionServer(oldServerName); http://git-wip-us.apache.org/repos/asf/hbase/blob/67404e7e/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java index fbdf6a2..053dc99 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java @@ -224,11 +224,8 @@ public class TestCatalogJanitor { } @Override - public long createTable( - final HTableDescriptor desc, - final byte[][] splitKeys, - final long nonceGroup, - final long nonce) throws IOException { + public long createTable(HTableDescriptor desc, byte[][] splitKeys) + throws IOException { // no-op return -1; } @@ -257,7 +254,7 @@ public class TestCatalogJanitor { public MasterCoprocessorHost getMasterCoprocessorHost() { return null; } - + @Override public MasterQuotaManager getMasterQuotaManager() { return null; @@ -412,64 +409,39 @@ public class TestCatalogJanitor { } @Override - public long deleteTable( - final TableName tableName, - final long nonceGroup, - final long nonce) throws IOException { + public long deleteTable(TableName tableName) throws IOException { return -1; } - public void truncateTable( - final TableName tableName, - final boolean preserveSplits, - final long nonceGroup, - final long nonce) throws IOException { - } + @Override + public void truncateTable(TableName tableName, boolean preserveSplits) throws IOException { } + @Override - public void modifyTable( - final TableName tableName, - final HTableDescriptor descriptor, - final long nonceGroup, - final long nonce) throws IOException { - } + public void modifyTable(TableName tableName, HTableDescriptor descriptor) + throws IOException { } @Override - public long enableTable( - final TableName tableName, - final long nonceGroup, - final long nonce) throws IOException { + public long enableTable(TableName tableName) throws IOException { return -1; } @Override - public long disableTable( - TableName tableName, - final long nonceGroup, - final long nonce) throws IOException { + public long disableTable(TableName tableName) throws IOException { return -1; } @Override - public void addColumn( - final TableName tableName, - final HColumnDescriptor columnDescriptor, - final long nonceGroup, - final long nonce) throws IOException { } + public void addColumn(TableName tableName, HColumnDescriptor column) + throws IOException { } @Override - public void modifyColumn( - final TableName tableName, - final HColumnDescriptor descriptor, - final long nonceGroup, - final long nonce) throws IOException { } + public void modifyColumn(TableName tableName, HColumnDescriptor descriptor) + throws IOException { } @Override - public void deleteColumn( - final TableName tableName, - final byte[] columnName, - final long nonceGroup, - final long nonce) throws IOException { } + public void deleteColumn(TableName tableName, byte[] columnName) + throws IOException { } @Override public TableLockManager getTableLockManager() { http://git-wip-us.apache.org/repos/asf/hbase/blob/67404e7e/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureTestingUtility.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureTestingUtility.java index 77ca96e..00f82f4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureTestingUtility.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureTestingUtility.java @@ -38,7 +38,6 @@ import org.apache.hadoop.hbase.TableStateManager; import org.apache.hadoop.hbase.client.BufferedMutator; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Durability; -import org.apache.hadoop.hbase.client.NonceGenerator; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.MetaScanner; import org.apache.hadoop.hbase.client.MetaScanner.MetaScannerVisitor; @@ -426,14 +425,6 @@ public class MasterProcedureTestingUtility { return put; } - public static long generateNonceGroup(final HMaster master) { - return master.getConnection().getNonceGenerator().getNonceGroup(); - } - - public static long generateNonce(final HMaster master) { - return master.getConnection().getNonceGenerator().newNonce(); - } - public static class InjectAbortOnLoadListener implements ProcedureExecutor.ProcedureExecutorListener { private final ProcedureExecutor procExec; http://git-wip-us.apache.org/repos/asf/hbase/blob/67404e7e/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestAddColumnFamilyProcedure.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestAddColumnFamilyProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestAddColumnFamilyProcedure.java index e09c838..05eb602 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestAddColumnFamilyProcedure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestAddColumnFamilyProcedure.java @@ -25,7 +25,6 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; -import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.InvalidFamilyOperationException; import org.apache.hadoop.hbase.TableName; @@ -47,9 +46,6 @@ public class TestAddColumnFamilyProcedure { protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); - private static long nonceGroup = HConstants.NO_NONCE; - private static long nonce = HConstants.NO_NONCE; - private static void setupConf(Configuration conf) { conf.setInt(MasterProcedureConstants.MASTER_PROCEDURE_THREADS, 1); } @@ -72,9 +68,6 @@ public class TestAddColumnFamilyProcedure { @Before public void setup() throws Exception { ProcedureTestingUtility.setKillAndToggleBeforeStoreUpdate(getMasterProcedureExecutor(), false); - nonceGroup = - MasterProcedureTestingUtility.generateNonceGroup(UTIL.getHBaseCluster().getMaster()); - nonce = MasterProcedureTestingUtility.generateNonce(UTIL.getHBaseCluster().getMaster()); } @After @@ -98,10 +91,9 @@ public class TestAddColumnFamilyProcedure { MasterProcedureTestingUtility.createTable(procExec, tableName, null, "f3"); // Test 1: Add a column family online - long procId1 = procExec.submitProcedure( - new AddColumnFamilyProcedure(procExec.getEnvironment(), tableName, columnDescriptor1), - nonceGroup, - nonce); + long procId1 = + procExec.submitProcedure(new AddColumnFamilyProcedure(procExec.getEnvironment(), tableName, + columnDescriptor1)); // Wait the completion ProcedureTestingUtility.waitProcedure(procExec, procId1); ProcedureTestingUtility.assertProcNotFailed(procExec, procId1); @@ -111,10 +103,9 @@ public class TestAddColumnFamilyProcedure { // Test 2: Add a column family offline UTIL.getHBaseAdmin().disableTable(tableName); - long procId2 = procExec.submitProcedure( - new AddColumnFamilyProcedure(procExec.getEnvironment(), tableName, columnDescriptor2), - nonceGroup + 1, - nonce + 1); + long procId2 = + procExec.submitProcedure(new AddColumnFamilyProcedure(procExec.getEnvironment(), tableName, + columnDescriptor2)); // Wait the completion ProcedureTestingUtility.waitProcedure(procExec, procId2); ProcedureTestingUtility.assertProcNotFailed(procExec, procId2); @@ -133,10 +124,9 @@ public class TestAddColumnFamilyProcedure { MasterProcedureTestingUtility.createTable(procExec, tableName, null, "f1"); // add the column family - long procId1 = procExec.submitProcedure( - new AddColumnFamilyProcedure(procExec.getEnvironment(), tableName, columnDescriptor), - nonceGroup, - nonce); + long procId1 = + procExec.submitProcedure(new AddColumnFamilyProcedure(procExec.getEnvironment(), tableName, + columnDescriptor)); // Wait the completion ProcedureTestingUtility.waitProcedure(procExec, procId1); ProcedureTestingUtility.assertProcNotFailed(procExec, procId1); @@ -144,10 +134,9 @@ public class TestAddColumnFamilyProcedure { tableName, cf2); // add the column family that exists - long procId2 = procExec.submitProcedure( - new AddColumnFamilyProcedure(procExec.getEnvironment(), tableName, columnDescriptor), - nonceGroup + 1, - nonce + 1); + long procId2 = + procExec.submitProcedure(new AddColumnFamilyProcedure(procExec.getEnvironment(), tableName, + columnDescriptor)); // Wait the completion ProcedureTestingUtility.waitProcedure(procExec, procId2); @@ -159,10 +148,9 @@ public class TestAddColumnFamilyProcedure { // Do the same add the existing column family - this time offline UTIL.getHBaseAdmin().disableTable(tableName); - long procId3 = procExec.submitProcedure( - new AddColumnFamilyProcedure(procExec.getEnvironment(), tableName, columnDescriptor), - nonceGroup + 2, - nonce + 2); + long procId3 = + procExec.submitProcedure(new AddColumnFamilyProcedure(procExec.getEnvironment(), tableName, + columnDescriptor)); // Wait the completion ProcedureTestingUtility.waitProcedure(procExec, procId3); @@ -173,37 +161,6 @@ public class TestAddColumnFamilyProcedure { assertTrue(result.getException().getCause() instanceof InvalidFamilyOperationException); } - @Test(timeout=60000) - public void testAddSameColumnFamilyTwiceWithSameNonce() throws Exception { - final TableName tableName = TableName.valueOf("testAddSameColumnFamilyTwiceWithSameNonce"); - final String cf2 = "cf2"; - final HColumnDescriptor columnDescriptor = new HColumnDescriptor(cf2); - - final ProcedureExecutor procExec = getMasterProcedureExecutor(); - - MasterProcedureTestingUtility.createTable(procExec, tableName, null, "f1"); - - // add the column family - long procId1 = procExec.submitProcedure( - new AddColumnFamilyProcedure(procExec.getEnvironment(), tableName, columnDescriptor), - nonceGroup, - nonce); - long procId2 = procExec.submitProcedure( - new AddColumnFamilyProcedure(procExec.getEnvironment(), tableName, columnDescriptor), - nonceGroup, - nonce); - // Wait the completion - ProcedureTestingUtility.waitProcedure(procExec, procId1); - ProcedureTestingUtility.assertProcNotFailed(procExec, procId1); - MasterProcedureTestingUtility.validateColumnFamilyAddition(UTIL.getHBaseCluster().getMaster(), - tableName, cf2); - - // Wait the completion and expect not fail - because it is the same proc - ProcedureTestingUtility.waitProcedure(procExec, procId2); - ProcedureTestingUtility.assertProcNotFailed(procExec, procId2); - assertTrue(procId1 == procId2); - } - @Test(timeout = 60000) public void testRecoveryAndDoubleExecutionOffline() throws Exception { final TableName tableName = TableName.valueOf("testRecoveryAndDoubleExecutionOffline"); @@ -218,10 +175,9 @@ public class TestAddColumnFamilyProcedure { ProcedureTestingUtility.setKillAndToggleBeforeStoreUpdate(procExec, true); // Start the AddColumnFamily procedure && kill the executor - long procId = procExec.submitProcedure( - new AddColumnFamilyProcedure(procExec.getEnvironment(), tableName, columnDescriptor), - nonceGroup, - nonce); + long procId = + procExec.submitProcedure(new AddColumnFamilyProcedure(procExec.getEnvironment(), tableName, + columnDescriptor)); // Restart the executor and execute the step twice int numberOfSteps = AddColumnFamilyState.values().length; @@ -245,10 +201,9 @@ public class TestAddColumnFamilyProcedure { ProcedureTestingUtility.setKillAndToggleBeforeStoreUpdate(procExec, true); // Start the AddColumnFamily procedure && kill the executor - long procId = procExec.submitProcedure( - new AddColumnFamilyProcedure(procExec.getEnvironment(), tableName, columnDescriptor), - nonceGroup, - nonce); + long procId = + procExec.submitProcedure(new AddColumnFamilyProcedure(procExec.getEnvironment(), tableName, + columnDescriptor)); // Restart the executor and execute the step twice int numberOfSteps = AddColumnFamilyState.values().length; @@ -272,10 +227,9 @@ public class TestAddColumnFamilyProcedure { ProcedureTestingUtility.setKillAndToggleBeforeStoreUpdate(procExec, true); // Start the AddColumnFamily procedure && kill the executor - long procId = procExec.submitProcedure( - new AddColumnFamilyProcedure(procExec.getEnvironment(), tableName, columnDescriptor), - nonceGroup, - nonce); + long procId = + procExec.submitProcedure(new AddColumnFamilyProcedure(procExec.getEnvironment(), tableName, + columnDescriptor)); int numberOfSteps = AddColumnFamilyState.values().length - 2; // failing in the middle of proc MasterProcedureTestingUtility.testRollbackAndDoubleExecution(procExec, procId, numberOfSteps, http://git-wip-us.apache.org/repos/asf/hbase/blob/67404e7e/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCreateTableProcedure.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCreateTableProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCreateTableProcedure.java index 73843e0..21129f2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCreateTableProcedure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCreateTableProcedure.java @@ -24,7 +24,6 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; -import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.TableExistsException; @@ -35,6 +34,7 @@ import org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.CreateTa import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ModifyRegionUtils; + import org.junit.After; import org.junit.AfterClass; import org.junit.Before; @@ -42,7 +42,10 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; @Category(MediumTests.class) public class TestCreateTableProcedure { @@ -50,9 +53,6 @@ public class TestCreateTableProcedure { protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); - private static long nonceGroup = HConstants.NO_NONCE; - private static long nonce = HConstants.NO_NONCE; - private static void setupConf(Configuration conf) { conf.setInt(MasterProcedureConstants.MASTER_PROCEDURE_THREADS, 1); } @@ -75,9 +75,6 @@ public class TestCreateTableProcedure { @Before public void setup() throws Exception { resetProcExecutorTestingKillFlag(); - nonceGroup = - MasterProcedureTestingUtility.generateNonceGroup(UTIL.getHBaseCluster().getMaster()); - nonce = MasterProcedureTestingUtility.generateNonce(UTIL.getHBaseCluster().getMaster()); } @After @@ -127,14 +124,12 @@ public class TestCreateTableProcedure { // create the table long procId1 = procExec.submitProcedure( - new CreateTableProcedure(procExec.getEnvironment(), htd, regions), nonceGroup, nonce); + new CreateTableProcedure(procExec.getEnvironment(), htd, regions)); // create another with the same name ProcedurePrepareLatch latch2 = new ProcedurePrepareLatch.CompatibilityLatch(); long procId2 = procExec.submitProcedure( - new CreateTableProcedure(procExec.getEnvironment(), htd, regions, latch2), - nonceGroup + 1, - nonce + 1); + new CreateTableProcedure(procExec.getEnvironment(), htd, regions, latch2)); ProcedureTestingUtility.waitProcedure(procExec, procId1); ProcedureTestingUtility.assertProcNotFailed(procExec.getResult(procId1)); @@ -144,29 +139,6 @@ public class TestCreateTableProcedure { } @Test(timeout=60000) - public void testCreateTwiceWithSameNonce() throws Exception { - final TableName tableName = TableName.valueOf("testCreateTwiceWithSameNonce"); - final ProcedureExecutor procExec = getMasterProcedureExecutor(); - final HTableDescriptor htd = MasterProcedureTestingUtility.createHTD(tableName, "f"); - final HRegionInfo[] regions = ModifyRegionUtils.createHRegionInfos(htd, null); - - // create the table - long procId1 = procExec.submitProcedure( - new CreateTableProcedure(procExec.getEnvironment(), htd, regions), nonceGroup, nonce); - - // create another with the same name - long procId2 = procExec.submitProcedure( - new CreateTableProcedure(procExec.getEnvironment(), htd, regions), nonceGroup, nonce); - - ProcedureTestingUtility.waitProcedure(procExec, procId1); - ProcedureTestingUtility.assertProcNotFailed(procExec.getResult(procId1)); - - ProcedureTestingUtility.waitProcedure(procExec, procId2); - ProcedureTestingUtility.assertProcNotFailed(procExec.getResult(procId2)); - assertTrue(procId1 == procId2); - } - - @Test(timeout=60000) public void testRecoveryAndDoubleExecution() throws Exception { final TableName tableName = TableName.valueOf("testRecoveryAndDoubleExecution"); @@ -179,7 +151,7 @@ public class TestCreateTableProcedure { HTableDescriptor htd = MasterProcedureTestingUtility.createHTD(tableName, "f1", "f2"); HRegionInfo[] regions = ModifyRegionUtils.createHRegionInfos(htd, splitKeys); long procId = procExec.submitProcedure( - new CreateTableProcedure(procExec.getEnvironment(), htd, regions), nonceGroup, nonce); + new CreateTableProcedure(procExec.getEnvironment(), htd, regions)); // Restart the executor and execute the step twice // NOTE: the 6 (number of CreateTableState steps) is hardcoded, @@ -207,7 +179,7 @@ public class TestCreateTableProcedure { htd.setRegionReplication(3); HRegionInfo[] regions = ModifyRegionUtils.createHRegionInfos(htd, splitKeys); long procId = procExec.submitProcedure( - new CreateTableProcedure(procExec.getEnvironment(), htd, regions), nonceGroup, nonce); + new CreateTableProcedure(procExec.getEnvironment(), htd, regions)); // NOTE: the 4 (number of CreateTableState steps) is hardcoded, // so you have to look at this test at least once when you add a new step. @@ -237,7 +209,7 @@ public class TestCreateTableProcedure { HTableDescriptor htd = MasterProcedureTestingUtility.createHTD(tableName, "f1", "f2"); HRegionInfo[] regions = ModifyRegionUtils.createHRegionInfos(htd, splitKeys); long procId = procExec.submitProcedure( - new FaultyCreateTableProcedure(procExec.getEnvironment(), htd, regions), nonceGroup, nonce); + new FaultyCreateTableProcedure(procExec.getEnvironment(), htd, regions)); // NOTE: the 4 (number of CreateTableState steps) is hardcoded, // so you have to look at this test at least once when you add a new step. http://git-wip-us.apache.org/repos/asf/hbase/blob/67404e7e/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteColumnFamilyProcedure.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteColumnFamilyProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteColumnFamilyProcedure.java index e7efc2b..5aec002 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteColumnFamilyProcedure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteColumnFamilyProcedure.java @@ -24,7 +24,6 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; -import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.InvalidFamilyOperationException; @@ -47,9 +46,6 @@ public class TestDeleteColumnFamilyProcedure { protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); - private static long nonceGroup = HConstants.NO_NONCE; - private static long nonce = HConstants.NO_NONCE; - private static void setupConf(Configuration conf) { conf.setInt(MasterProcedureConstants.MASTER_PROCEDURE_THREADS, 1); } @@ -72,9 +68,6 @@ public class TestDeleteColumnFamilyProcedure { @Before public void setup() throws Exception { ProcedureTestingUtility.setKillAndToggleBeforeStoreUpdate(getMasterProcedureExecutor(), false); - nonceGroup = - MasterProcedureTestingUtility.generateNonceGroup(UTIL.getHBaseCluster().getMaster()); - nonce = MasterProcedureTestingUtility.generateNonce(UTIL.getHBaseCluster().getMaster()); } @After @@ -96,10 +89,9 @@ public class TestDeleteColumnFamilyProcedure { MasterProcedureTestingUtility.createTable(procExec, tableName, null, cf1, cf2, "f3"); // Test 1: delete the column family that exists online - long procId1 = procExec.submitProcedure( - new DeleteColumnFamilyProcedure(procExec.getEnvironment(), tableName, cf1.getBytes()), - nonceGroup, - nonce); + long procId1 = + procExec.submitProcedure(new DeleteColumnFamilyProcedure(procExec.getEnvironment(), + tableName, cf1.getBytes())); // Wait the completion ProcedureTestingUtility.waitProcedure(procExec, procId1); ProcedureTestingUtility.assertProcNotFailed(procExec, procId1); @@ -109,10 +101,9 @@ public class TestDeleteColumnFamilyProcedure { // Test 2: delete the column family that exists offline UTIL.getHBaseAdmin().disableTable(tableName); - long procId2 = procExec.submitProcedure( - new DeleteColumnFamilyProcedure(procExec.getEnvironment(), tableName, cf2.getBytes()), - nonceGroup, - nonce); + long procId2 = + procExec.submitProcedure(new DeleteColumnFamilyProcedure(procExec.getEnvironment(), + tableName, cf2.getBytes())); // Wait the completion ProcedureTestingUtility.waitProcedure(procExec, procId2); ProcedureTestingUtility.assertProcNotFailed(procExec, procId2); @@ -128,10 +119,9 @@ public class TestDeleteColumnFamilyProcedure { MasterProcedureTestingUtility.createTable(procExec, tableName, null, "f1", cf2); // delete the column family that exists - long procId1 = procExec.submitProcedure( - new DeleteColumnFamilyProcedure(procExec.getEnvironment(), tableName, cf2.getBytes()), - nonceGroup, - nonce); + long procId1 = + procExec.submitProcedure(new DeleteColumnFamilyProcedure(procExec.getEnvironment(), + tableName, cf2.getBytes())); // Wait the completion ProcedureTestingUtility.waitProcedure(procExec, procId1); // First delete should succeed @@ -141,10 +131,9 @@ public class TestDeleteColumnFamilyProcedure { tableName, cf2); // delete the column family that does not exist - long procId2 = procExec.submitProcedure( - new DeleteColumnFamilyProcedure(procExec.getEnvironment(), tableName, cf2.getBytes()), - nonceGroup + 1, - nonce + 1); + long procId2 = + procExec.submitProcedure(new DeleteColumnFamilyProcedure(procExec.getEnvironment(), + tableName, cf2.getBytes())); // Wait the completion ProcedureTestingUtility.waitProcedure(procExec, procId2); @@ -157,10 +146,9 @@ public class TestDeleteColumnFamilyProcedure { // Try again, this time with table disabled. UTIL.getHBaseAdmin().disableTable(tableName); - long procId3 = procExec.submitProcedure( - new DeleteColumnFamilyProcedure(procExec.getEnvironment(), tableName, cf2.getBytes()), - nonceGroup + 2, - nonce + 2); + long procId3 = + procExec.submitProcedure(new DeleteColumnFamilyProcedure(procExec.getEnvironment(), + tableName, cf2.getBytes())); // Wait the completion ProcedureTestingUtility.waitProcedure(procExec, procId3); // Expect fail with InvalidFamilyOperationException @@ -171,37 +159,6 @@ public class TestDeleteColumnFamilyProcedure { } @Test(timeout=60000) - public void testDeleteColumnFamilyTwiceWithSameNonce() throws Exception { - final TableName tableName = TableName.valueOf("testDeleteColumnFamilyTwiceWithSameNonce"); - final ProcedureExecutor procExec = getMasterProcedureExecutor(); - - final String cf2 = "cf2"; - - MasterProcedureTestingUtility.createTable(procExec, tableName, null, "f1", cf2); - - // delete the column family that exists - long procId1 = procExec.submitProcedure( - new DeleteColumnFamilyProcedure(procExec.getEnvironment(), tableName, cf2.getBytes()), - nonceGroup, - nonce); - long procId2 = procExec.submitProcedure( - new DeleteColumnFamilyProcedure(procExec.getEnvironment(), tableName, cf2.getBytes()), - nonceGroup, - nonce); - - // Wait the completion - ProcedureTestingUtility.waitProcedure(procExec, procId1); - ProcedureTestingUtility.assertProcNotFailed(procExec, procId1); - MasterProcedureTestingUtility.validateColumnFamilyDeletion(UTIL.getHBaseCluster().getMaster(), - tableName, cf2); - - // Wait the completion and expect not fail - because it is the same proc - ProcedureTestingUtility.waitProcedure(procExec, procId2); - ProcedureTestingUtility.assertProcNotFailed(procExec, procId2); - assertTrue(procId1 == procId2); - } - - @Test(timeout=60000) public void testDeleteNonExistingColumnFamily() throws Exception { final TableName tableName = TableName.valueOf("testDeleteNonExistingColumnFamily"); final ProcedureExecutor procExec = getMasterProcedureExecutor(); @@ -211,10 +168,9 @@ public class TestDeleteColumnFamilyProcedure { MasterProcedureTestingUtility.createTable(procExec, tableName, null, "f1", "f2"); // delete the column family that does not exist - long procId1 = procExec.submitProcedure( - new DeleteColumnFamilyProcedure(procExec.getEnvironment(), tableName, cf3.getBytes()), - nonceGroup, - nonce); + long procId1 = + procExec.submitProcedure(new DeleteColumnFamilyProcedure(procExec.getEnvironment(), + tableName, cf3.getBytes())); // Wait the completion ProcedureTestingUtility.waitProcedure(procExec, procId1); @@ -238,10 +194,9 @@ public class TestDeleteColumnFamilyProcedure { ProcedureTestingUtility.setKillAndToggleBeforeStoreUpdate(procExec, true); // Start the Delete procedure && kill the executor - long procId = procExec.submitProcedure( - new DeleteColumnFamilyProcedure(procExec.getEnvironment(), tableName, cf4.getBytes()), - nonceGroup, - nonce); + long procId = + procExec.submitProcedure(new DeleteColumnFamilyProcedure(procExec.getEnvironment(), + tableName, cf4.getBytes())); // Restart the executor and execute the step twice int numberOfSteps = DeleteColumnFamilyState.values().length; @@ -265,10 +220,9 @@ public class TestDeleteColumnFamilyProcedure { ProcedureTestingUtility.setKillAndToggleBeforeStoreUpdate(procExec, true); // Start the Delete procedure && kill the executor - long procId = procExec.submitProcedure( - new DeleteColumnFamilyProcedure(procExec.getEnvironment(), tableName, cf5.getBytes()), - nonceGroup, - nonce); + long procId = + procExec.submitProcedure(new DeleteColumnFamilyProcedure(procExec.getEnvironment(), + tableName, cf5.getBytes())); // Restart the executor and execute the step twice int numberOfSteps = DeleteColumnFamilyState.values().length; @@ -294,9 +248,7 @@ public class TestDeleteColumnFamilyProcedure { // Start the Delete procedure && kill the executor long procId = procExec.submitProcedure( - new DeleteColumnFamilyProcedure(procExec.getEnvironment(), tableName, cf5.getBytes()), - nonceGroup, - nonce); + new DeleteColumnFamilyProcedure(procExec.getEnvironment(), tableName, cf5.getBytes())); // Failing before DELETE_COLUMN_FAMILY_DELETE_FS_LAYOUT we should trigger the rollback // NOTE: the 1 (number before DELETE_COLUMN_FAMILY_DELETE_FS_LAYOUT step) is hardcoded, @@ -327,9 +279,7 @@ public class TestDeleteColumnFamilyProcedure { // Start the Delete procedure && kill the executor long procId = procExec.submitProcedure( - new DeleteColumnFamilyProcedure(procExec.getEnvironment(), tableName, cf5.getBytes()), - nonceGroup, - nonce); + new DeleteColumnFamilyProcedure(procExec.getEnvironment(), tableName, cf5.getBytes())); // Failing after DELETE_COLUMN_FAMILY_DELETE_FS_LAYOUT we should not trigger the rollback. // NOTE: the 4 (number of DELETE_COLUMN_FAMILY_DELETE_FS_LAYOUT + 1 step) is hardcoded, http://git-wip-us.apache.org/repos/asf/hbase/blob/67404e7e/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteTableProcedure.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteTableProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteTableProcedure.java index a215c21..dc1d90a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteTableProcedure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteTableProcedure.java @@ -22,7 +22,6 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; -import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.TableName; @@ -34,6 +33,7 @@ import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility; import org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.DeleteTableState; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; + import org.junit.After; import org.junit.AfterClass; import org.junit.Before; @@ -41,7 +41,10 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; @Category(MediumTests.class) public class TestDeleteTableProcedure { @@ -49,9 +52,6 @@ public class TestDeleteTableProcedure { protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); - private long nonceGroup = HConstants.NO_NONCE; - private long nonce = HConstants.NO_NONCE; - private static void setupConf(Configuration conf) { conf.setInt(MasterProcedureConstants.MASTER_PROCEDURE_THREADS, 1); } @@ -76,10 +76,6 @@ public class TestDeleteTableProcedure { final ProcedureExecutor procExec = getMasterProcedureExecutor(); ProcedureTestingUtility.setKillAndToggleBeforeStoreUpdate(procExec, false); assertTrue("expected executor to be running", procExec.isRunning()); - - nonceGroup = - MasterProcedureTestingUtility.generateNonceGroup(UTIL.getHBaseCluster().getMaster()); - nonce = MasterProcedureTestingUtility.generateNonce(UTIL.getHBaseCluster().getMaster()); } @After @@ -126,10 +122,10 @@ public class TestDeleteTableProcedure { // delete the table (that exists) long procId1 = procExec.submitProcedure( - new DeleteTableProcedure(procExec.getEnvironment(), tableName), nonceGroup, nonce); + new DeleteTableProcedure(procExec.getEnvironment(), tableName)); // delete the table (that will no longer exist) long procId2 = procExec.submitProcedure( - new DeleteTableProcedure(procExec.getEnvironment(), tableName), nonceGroup + 1, nonce + 1); + new DeleteTableProcedure(procExec.getEnvironment(), tableName)); // Wait the completion ProcedureTestingUtility.waitProcedure(procExec, procId1); @@ -148,36 +144,6 @@ public class TestDeleteTableProcedure { } @Test(timeout=60000) - public void testDoubleDeletedTableWithSameNonce() throws Exception { - final TableName tableName = TableName.valueOf("testDoubleDeletedTableWithSameNonce"); - final ProcedureExecutor procExec = getMasterProcedureExecutor(); - - HRegionInfo[] regions = MasterProcedureTestingUtility.createTable( - procExec, tableName, null, "f"); - UTIL.getHBaseAdmin().disableTable(tableName); - - // delete the table (that exists) - long procId1 = procExec.submitProcedure( - new DeleteTableProcedure(procExec.getEnvironment(), tableName), nonceGroup, nonce); - // delete the table (that will no longer exist) - long procId2 = procExec.submitProcedure( - new DeleteTableProcedure(procExec.getEnvironment(), tableName), nonceGroup, nonce); - - // Wait the completion - ProcedureTestingUtility.waitProcedure(procExec, procId1); - ProcedureTestingUtility.waitProcedure(procExec, procId2); - - // First delete should succeed - ProcedureTestingUtility.assertProcNotFailed(procExec, procId1); - MasterProcedureTestingUtility.validateTableDeletion( - UTIL.getHBaseCluster().getMaster(), tableName, regions, "f"); - - // Second delete should not fail, because it is the same delete - ProcedureTestingUtility.assertProcNotFailed(procExec, procId2); - assertTrue(procId1 == procId2); - } - - @Test(timeout=60000) public void testSimpleDelete() throws Exception { final TableName tableName = TableName.valueOf("testSimpleDelete"); final byte[][] splitKeys = null; @@ -223,7 +189,7 @@ public class TestDeleteTableProcedure { // Start the Delete procedure && kill the executor long procId = procExec.submitProcedure( - new DeleteTableProcedure(procExec.getEnvironment(), tableName), nonceGroup, nonce); + new DeleteTableProcedure(procExec.getEnvironment(), tableName)); // Restart the executor and execute the step twice // NOTE: the 6 (number of DeleteTableState steps) is hardcoded,