Return-Path: X-Original-To: apmail-hadoop-common-commits-archive@www.apache.org Delivered-To: apmail-hadoop-common-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id E0203172E8 for ; Wed, 23 Sep 2015 03:40:54 +0000 (UTC) Received: (qmail 72580 invoked by uid 500); 23 Sep 2015 03:40:40 -0000 Delivered-To: apmail-hadoop-common-commits-archive@hadoop.apache.org Received: (qmail 72134 invoked by uid 500); 23 Sep 2015 03:40:39 -0000 Mailing-List: contact common-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: common-dev@hadoop.apache.org Delivered-To: mailing list common-commits@hadoop.apache.org Received: (qmail 67082 invoked by uid 99); 23 Sep 2015 03:40:36 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Wed, 23 Sep 2015 03:40:36 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 849CCE0664; Wed, 23 Sep 2015 03:40:36 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: zhz@apache.org To: common-commits@hadoop.apache.org Date: Wed, 23 Sep 2015 03:41:14 -0000 Message-Id: In-Reply-To: <8cf4af5698604e9b832863cb05dca78f@git.apache.org> References: <8cf4af5698604e9b832863cb05dca78f@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: [40/50] [abbrv] hadoop git commit: HDFS-9111. Move hdfs-client protobuf convert methods from PBHelper to PBHelperClient. Contributed by Mingliang Liu. http://git-wip-us.apache.org/repos/asf/hadoop/blob/06022b8f/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/protocolPB/QJournalProtocolTranslatorPB.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/protocolPB/QJournalProtocolTranslatorPB.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/protocolPB/QJournalProtocolTranslatorPB.java index 6d9dc23..740b5cf 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/protocolPB/QJournalProtocolTranslatorPB.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/protocolPB/QJournalProtocolTranslatorPB.java @@ -25,6 +25,7 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hdfs.protocolPB.JournalProtocolPB; import org.apache.hadoop.hdfs.protocolPB.PBHelper; +import org.apache.hadoop.hdfs.protocolPB.PBHelperClient; import org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocol; import org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos; import org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.AcceptRecoveryRequestProto; @@ -161,7 +162,7 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface, .setSegmentTxnId(segmentTxId) .setFirstTxnId(firstTxnId) .setNumTxns(numTxns) - .setRecords(PBHelper.getByteString(records)) + .setRecords(PBHelperClient.getByteString(records)) .build(); try { rpcProxy.journal(NULL_CONTROLLER, req); http://git-wip-us.apache.org/repos/asf/hadoop/blob/06022b8f/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java index e09ba32..3559065 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java @@ -65,7 +65,7 @@ import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.LocatedBlock; import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CacheDirectiveInfoProto; import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CachePoolInfoProto; -import org.apache.hadoop.hdfs.protocolPB.PBHelper; +import org.apache.hadoop.hdfs.protocolPB.PBHelperClient; import org.apache.hadoop.hdfs.server.blockmanagement.BlockManager; import org.apache.hadoop.hdfs.server.blockmanagement.CacheReplicationMonitor; import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor; @@ -1048,7 +1048,7 @@ public final class CacheManager { Expiration expiry = info.getExpiration(); if (expiry != null) { assert (!expiry.isRelative()); - b.setExpiration(PBHelper.convert(expiry)); + b.setExpiration(PBHelperClient.convert(expiry)); } directives.add(b.build()); http://git-wip-us.apache.org/repos/asf/hadoop/blob/06022b8f/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EncryptionZoneManager.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EncryptionZoneManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EncryptionZoneManager.java index 7c3c895..0663b8e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EncryptionZoneManager.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EncryptionZoneManager.java @@ -36,7 +36,7 @@ import org.apache.hadoop.hdfs.XAttrHelper; import org.apache.hadoop.hdfs.protocol.EncryptionZone; import org.apache.hadoop.hdfs.protocol.SnapshotAccessControlException; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos; -import org.apache.hadoop.hdfs.protocolPB.PBHelper; +import org.apache.hadoop.hdfs.protocolPB.PBHelperClient; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -305,7 +305,7 @@ public class EncryptionZoneManager { } final HdfsProtos.ZoneEncryptionInfoProto proto = - PBHelper.convert(suite, version, keyName); + PBHelperClient.convert(suite, version, keyName); final XAttr ezXAttr = XAttrHelper .buildXAttr(CRYPTO_XATTR_ENCRYPTION_ZONE, proto.toByteArray()); http://git-wip-us.apache.org/repos/asf/hadoop/blob/06022b8f/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirXAttrOp.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirXAttrOp.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirXAttrOp.java index ced085a..24d3360 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirXAttrOp.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirXAttrOp.java @@ -29,7 +29,6 @@ import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.XAttrHelper; import org.apache.hadoop.hdfs.protocol.HdfsFileStatus; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos; -import org.apache.hadoop.hdfs.protocolPB.PBHelper; import org.apache.hadoop.hdfs.protocolPB.PBHelperClient; import org.apache.hadoop.security.AccessControlException; @@ -284,7 +283,7 @@ class FSDirXAttrOp { HdfsProtos.ZoneEncryptionInfoProto.parseFrom(xattr.getValue()); fsd.ezManager.addEncryptionZone(inode.getId(), PBHelperClient.convert(ezProto.getSuite()), - PBHelper.convert(ezProto.getCryptoProtocolVersion()), + PBHelperClient.convert(ezProto.getCryptoProtocolVersion()), ezProto.getKeyName()); } http://git-wip-us.apache.org/repos/asf/hadoop/blob/06022b8f/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java index e25e0e0..4dc5326 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java @@ -48,7 +48,6 @@ import org.apache.hadoop.hdfs.protocol.HdfsFileStatus; import org.apache.hadoop.hdfs.protocol.QuotaExceededException; import org.apache.hadoop.hdfs.protocol.SnapshotAccessControlException; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos; -import org.apache.hadoop.hdfs.protocolPB.PBHelper; import org.apache.hadoop.hdfs.protocolPB.PBHelperClient; import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfo; import org.apache.hadoop.hdfs.server.blockmanagement.BlockManager; @@ -1134,7 +1133,7 @@ public class FSDirectory implements Closeable { xattr.getValue()); ezManager.unprotectedAddEncryptionZone(inode.getId(), PBHelperClient.convert(ezProto.getSuite()), - PBHelper.convert(ezProto.getCryptoProtocolVersion()), + PBHelperClient.convert(ezProto.getCryptoProtocolVersion()), ezProto.getKeyName()); } catch (InvalidProtocolBufferException e) { NameNode.LOG.warn("Error parsing protocol buffer of " + @@ -1261,7 +1260,7 @@ public class FSDirectory implements Closeable { throws IOException { // Make the PB for the xattr final HdfsProtos.PerFileEncryptionInfoProto proto = - PBHelper.convertPerFileEncInfo(info); + PBHelperClient.convertPerFileEncInfo(info); final byte[] protoBytes = proto.toByteArray(); final XAttr fileEncryptionAttr = XAttrHelper.buildXAttr(CRYPTO_XATTR_FILE_ENCRYPTION_INFO, protoBytes); @@ -1327,7 +1326,7 @@ public class FSDirectory implements Closeable { HdfsProtos.PerFileEncryptionInfoProto fileProto = HdfsProtos.PerFileEncryptionInfoProto.parseFrom( fileXAttr.getValue()); - return PBHelper.convert(fileProto, suite, version, keyName); + return PBHelperClient.convert(fileProto, suite, version, keyName); } catch (InvalidProtocolBufferException e) { throw new IOException("Could not parse file encryption info for " + "inode " + inode, e); http://git-wip-us.apache.org/repos/asf/hadoop/blob/06022b8f/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java index 125e1cf..8a8a6e6 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java @@ -105,7 +105,7 @@ import org.apache.hadoop.hdfs.protocol.LayoutVersion; import org.apache.hadoop.hdfs.protocol.LayoutVersion.Feature; import org.apache.hadoop.hdfs.protocol.proto.EditLogProtos.AclEditLogProto; import org.apache.hadoop.hdfs.protocol.proto.EditLogProtos.XAttrEditLogProto; -import org.apache.hadoop.hdfs.protocolPB.PBHelper; +import org.apache.hadoop.hdfs.protocolPB.PBHelperClient; import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants; import org.apache.hadoop.hdfs.util.XMLUtils; @@ -410,7 +410,7 @@ public abstract class FSEditLogOp { return null; } XAttrEditLogProto proto = XAttrEditLogProto.parseDelimitedFrom(in); - return PBHelper.convertXAttrs(proto.getXAttrsList()); + return PBHelperClient.convertXAttrs(proto.getXAttrsList()); } @SuppressWarnings("unchecked") @@ -554,7 +554,7 @@ public abstract class FSEditLogOp { if (this.opCode == OP_ADD) { AclEditLogUtil.write(aclEntries, out); XAttrEditLogProto.Builder b = XAttrEditLogProto.newBuilder(); - b.addAllXAttrs(PBHelper.convertXAttrProto(xAttrs)); + b.addAllXAttrs(PBHelperClient.convertXAttrProto(xAttrs)); b.build().writeDelimitedTo(out); FSImageSerialization.writeString(clientName,out); FSImageSerialization.writeString(clientMachine,out); @@ -1631,7 +1631,7 @@ public abstract class FSEditLogOp { permissions.write(out); AclEditLogUtil.write(aclEntries, out); XAttrEditLogProto.Builder b = XAttrEditLogProto.newBuilder(); - b.addAllXAttrs(PBHelper.convertXAttrProto(xAttrs)); + b.addAllXAttrs(PBHelperClient.convertXAttrProto(xAttrs)); b.build().writeDelimitedTo(out); } @@ -4158,7 +4158,7 @@ public abstract class FSEditLogOp { void readFields(DataInputStream in, int logVersion) throws IOException { XAttrEditLogProto p = XAttrEditLogProto.parseDelimitedFrom(in); src = p.getSrc(); - xAttrs = PBHelper.convertXAttrs(p.getXAttrsList()); + xAttrs = PBHelperClient.convertXAttrs(p.getXAttrsList()); readRpcIds(in, logVersion); } @@ -4168,7 +4168,7 @@ public abstract class FSEditLogOp { if (src != null) { b.setSrc(src); } - b.addAllXAttrs(PBHelper.convertXAttrProto(xAttrs)); + b.addAllXAttrs(PBHelperClient.convertXAttrProto(xAttrs)); b.build().writeDelimitedTo(out); // clientId and callId writeRpcIds(rpcClientId, rpcCallId, out); @@ -4211,7 +4211,7 @@ public abstract class FSEditLogOp { void readFields(DataInputStream in, int logVersion) throws IOException { XAttrEditLogProto p = XAttrEditLogProto.parseDelimitedFrom(in); src = p.getSrc(); - xAttrs = PBHelper.convertXAttrs(p.getXAttrsList()); + xAttrs = PBHelperClient.convertXAttrs(p.getXAttrsList()); readRpcIds(in, logVersion); } @@ -4221,7 +4221,7 @@ public abstract class FSEditLogOp { if (src != null) { b.setSrc(src); } - b.addAllXAttrs(PBHelper.convertXAttrProto(xAttrs)); + b.addAllXAttrs(PBHelperClient.convertXAttrProto(xAttrs)); b.build().writeDelimitedTo(out); // clientId and callId writeRpcIds(rpcClientId, rpcCallId, out); @@ -4267,7 +4267,7 @@ public abstract class FSEditLogOp { throw new IOException("Failed to read fields from SetAclOp"); } src = p.getSrc(); - aclEntries = PBHelper.convertAclEntry(p.getEntriesList()); + aclEntries = PBHelperClient.convertAclEntry(p.getEntriesList()); } @Override @@ -4275,7 +4275,7 @@ public abstract class FSEditLogOp { AclEditLogProto.Builder b = AclEditLogProto.newBuilder(); if (src != null) b.setSrc(src); - b.addAllEntries(PBHelper.convertAclEntryProto(aclEntries)); + b.addAllEntries(PBHelperClient.convertAclEntryProto(aclEntries)); b.build().writeDelimitedTo(out); } http://git-wip-us.apache.org/repos/asf/hadoop/blob/06022b8f/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatPBINode.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatPBINode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatPBINode.java index ac88919..0ae739c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatPBINode.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatPBINode.java @@ -41,7 +41,6 @@ import org.apache.hadoop.fs.XAttr; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto; -import org.apache.hadoop.hdfs.protocolPB.PBHelper; import org.apache.hadoop.hdfs.protocolPB.PBHelperClient; import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfo; import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfoContiguous; @@ -337,7 +336,7 @@ public final class FSImageFormatPBINode { BlockInfo[] blocks = new BlockInfo[bp.size()]; for (int i = 0, e = bp.size(); i < e; ++i) { blocks[i] = - new BlockInfoContiguous(PBHelper.convert(bp.get(i)), replication); + new BlockInfoContiguous(PBHelperClient.convert(bp.get(i)), replication); } final PermissionStatus permissions = loadPermission(f.getPermission(), parent.getLoaderContext().getStringTable()); @@ -447,7 +446,7 @@ public final class FSImageFormatPBINode { XATTR_NAMESPACE_EXT_OFFSET); xAttrCompactBuilder.setName(v); if (a.getValue() != null) { - xAttrCompactBuilder.setValue(PBHelper.getByteString(a.getValue())); + xAttrCompactBuilder.setValue(PBHelperClient.getByteString(a.getValue())); } b.addXAttrs(xAttrCompactBuilder.build()); } @@ -636,7 +635,7 @@ public final class FSImageFormatPBINode { if (n.getBlocks() != null) { for (Block block : n.getBlocks()) { - b.addBlocks(PBHelper.convert(block)); + b.addBlocks(PBHelperClient.convert(block)); } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/06022b8f/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/FSImageFormatPBSnapshot.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/FSImageFormatPBSnapshot.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/FSImageFormatPBSnapshot.java index 91ebaaf..cf21411 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/FSImageFormatPBSnapshot.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/FSImageFormatPBSnapshot.java @@ -41,7 +41,7 @@ import org.apache.hadoop.fs.StorageType; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto; -import org.apache.hadoop.hdfs.protocolPB.PBHelper; +import org.apache.hadoop.hdfs.protocolPB.PBHelperClient; import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfo; import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfoContiguous; import org.apache.hadoop.hdfs.server.blockmanagement.BlockManager; @@ -244,7 +244,7 @@ public class FSImageFormatPBSnapshot { List bpl = pbf.getBlocksList(); BlockInfo[] blocks = new BlockInfo[bpl.size()]; for(int j = 0, e = bpl.size(); j < e; ++j) { - Block blk = PBHelper.convert(bpl.get(j)); + Block blk = PBHelperClient.convert(bpl.get(j)); BlockInfo storedBlock = bm.getStoredBlock(blk); if(storedBlock == null) { storedBlock = bm.addBlockCollection( @@ -524,7 +524,7 @@ public class FSImageFormatPBSnapshot { .setFileSize(diff.getFileSize()); if(diff.getBlocks() != null) { for(Block block : diff.getBlocks()) { - fb.addBlocks(PBHelper.convert(block)); + fb.addBlocks(PBHelperClient.convert(block)); } } INodeFileAttributes copy = diff.snapshotINode; http://git-wip-us.apache.org/repos/asf/hadoop/blob/06022b8f/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocolPB/TestPBHelper.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocolPB/TestPBHelper.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocolPB/TestPBHelper.java index 3d2e8b9..851e5b9 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocolPB/TestPBHelper.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocolPB/TestPBHelper.java @@ -154,7 +154,7 @@ public class TestPBHelper { public void testConvertDatanodeID() { DatanodeID dn = DFSTestUtil.getLocalDatanodeID(); DatanodeIDProto dnProto = PBHelperClient.convert(dn); - DatanodeID dn2 = PBHelper.convert(dnProto); + DatanodeID dn2 = PBHelperClient.convert(dnProto); compare(dn, dn2); } @@ -176,8 +176,8 @@ public class TestPBHelper { @Test public void testConvertBlock() { Block b = new Block(1, 100, 3); - BlockProto bProto = PBHelper.convert(b); - Block b2 = PBHelper.convert(bProto); + BlockProto bProto = PBHelperClient.convert(b); + Block b2 = PBHelperClient.convert(bProto); assertEquals(b, b2); } @@ -399,7 +399,7 @@ public class TestPBHelper { "identifier".getBytes(), "password".getBytes(), new Text("kind"), new Text("service")); TokenProto tokenProto = PBHelperClient.convert(token); - Token token2 = PBHelper.convert(tokenProto); + Token token2 = PBHelperClient.convert(tokenProto); compare(token, token2); } @@ -489,16 +489,16 @@ public class TestPBHelper { @Test public void testConvertLocatedBlock() { LocatedBlock lb = createLocatedBlock(); - LocatedBlockProto lbProto = PBHelper.convert(lb); - LocatedBlock lb2 = PBHelper.convert(lbProto); + LocatedBlockProto lbProto = PBHelperClient.convert(lb); + LocatedBlock lb2 = PBHelperClient.convert(lbProto); compare(lb,lb2); } @Test public void testConvertLocatedBlockNoStorageMedia() { LocatedBlock lb = createLocatedBlockNoStorageMedia(); - LocatedBlockProto lbProto = PBHelper.convert(lb); - LocatedBlock lb2 = PBHelper.convert(lbProto); + LocatedBlockProto lbProto = PBHelperClient.convert(lb); + LocatedBlock lb2 = PBHelperClient.convert(lbProto); compare(lb,lb2); } @@ -508,8 +508,8 @@ public class TestPBHelper { for (int i=0;i<3;i++) { lbl.add(createLocatedBlock()); } - List lbpl = PBHelper.convertLocatedBlock2(lbl); - List lbl2 = PBHelper.convertLocatedBlock(lbpl); + List lbpl = PBHelperClient.convertLocatedBlock2(lbl); + List lbl2 = PBHelperClient.convertLocatedBlock(lbpl); assertEquals(lbl.size(), lbl2.size()); for (int i=0;i