Modified: hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java?rev=1511577&r1=1511576&r2=1511577&view=diff
==============================================================================
--- hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java (original)
+++ hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java Thu Aug 8 04:19:49 2013
@@ -26,6 +26,7 @@ import org.apache.commons.logging.LogFac
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.HTable;
@@ -99,7 +100,7 @@ public class AggregationClient {
* & propagated to it.
*/
public <R, S, P extends Message, Q extends Message, T extends Message> R max(
- final byte[] tableName, final ColumnInterpreter<R, S, P, Q, T> ci, final Scan scan)
+ final TableName tableName, final ColumnInterpreter<R, S, P, Q, T> ci, final Scan scan)
throws Throwable {
HTable table = null;
try {
@@ -188,7 +189,7 @@ public class AggregationClient {
* @throws Throwable
*/
public <R, S, P extends Message, Q extends Message, T extends Message> R min(
- final byte[] tableName, final ColumnInterpreter<R, S, P, Q, T> ci, final Scan scan)
+ final TableName tableName, final ColumnInterpreter<R, S, P, Q, T> ci, final Scan scan)
throws Throwable {
HTable table = null;
try {
@@ -268,7 +269,7 @@ public class AggregationClient {
* @throws Throwable
*/
public <R, S, P extends Message, Q extends Message, T extends Message> long rowCount(
- final byte[] tableName, final ColumnInterpreter<R, S, P, Q, T> ci, final Scan scan)
+ final TableName tableName, final ColumnInterpreter<R, S, P, Q, T> ci, final Scan scan)
throws Throwable {
HTable table = null;
try {
@@ -342,7 +343,7 @@ public class AggregationClient {
* @throws Throwable
*/
public <R, S, P extends Message, Q extends Message, T extends Message> S sum(
- final byte[] tableName, final ColumnInterpreter<R, S, P, Q, T> ci, final Scan scan)
+ final TableName tableName, final ColumnInterpreter<R, S, P, Q, T> ci, final Scan scan)
throws Throwable {
HTable table = null;
try {
@@ -415,7 +416,7 @@ public class AggregationClient {
* @throws Throwable
*/
private <R, S, P extends Message, Q extends Message, T extends Message> Pair<S, Long> getAvgArgs(
- final byte[] tableName, final ColumnInterpreter<R, S, P, Q, T> ci, final Scan scan)
+ final TableName tableName, final ColumnInterpreter<R, S, P, Q, T> ci, final Scan scan)
throws Throwable {
HTable table = null;
try {
@@ -498,7 +499,7 @@ public class AggregationClient {
* @throws Throwable
*/
public <R, S, P extends Message, Q extends Message, T extends Message>
- double avg(final byte[] tableName,
+ double avg(final TableName tableName,
final ColumnInterpreter<R, S, P, Q, T> ci, Scan scan) throws Throwable {
Pair<S, Long> p = getAvgArgs(tableName, ci, scan);
return ci.divideForAvg(p.getFirst(), p.getSecond());
@@ -606,7 +607,7 @@ public class AggregationClient {
* @throws Throwable
*/
public <R, S, P extends Message, Q extends Message, T extends Message>
- double std(final byte[] tableName, ColumnInterpreter<R, S, P, Q, T> ci,
+ double std(final TableName tableName, ColumnInterpreter<R, S, P, Q, T> ci,
Scan scan) throws Throwable {
HTable table = null;
try {
@@ -719,7 +720,7 @@ public class AggregationClient {
* @throws Throwable
*/
public <R, S, P extends Message, Q extends Message, T extends Message>
- R median(final byte[] tableName, ColumnInterpreter<R, S, P, Q, T> ci,
+ R median(final TableName tableName, ColumnInterpreter<R, S, P, Q, T> ci,
Scan scan) throws Throwable {
HTable table = null;
try {
Modified: hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/SecureBulkLoadClient.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/SecureBulkLoadClient.java?rev=1511577&r1=1511576&r2=1511577&view=diff
==============================================================================
--- hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/SecureBulkLoadClient.java (original)
+++ hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/SecureBulkLoadClient.java Thu Aug 8 04:19:49 2013
@@ -20,10 +20,12 @@ package org.apache.hadoop.hbase.client.c
import com.google.protobuf.ByteString;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.ipc.BlockingRpcCallback;
import org.apache.hadoop.hbase.ipc.ServerRpcController;
+import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos;
import org.apache.hadoop.hbase.security.SecureBulkLoadUtil;
@@ -45,7 +47,7 @@ public class SecureBulkLoadClient {
this.table = table;
}
- public String prepareBulkLoad(final byte[] tableName) throws IOException {
+ public String prepareBulkLoad(final TableName tableName) throws IOException {
try {
return
table.coprocessorService(SecureBulkLoadProtos.SecureBulkLoadService.class,
@@ -61,7 +63,7 @@ public class SecureBulkLoadClient {
SecureBulkLoadProtos.PrepareBulkLoadRequest request =
SecureBulkLoadProtos.PrepareBulkLoadRequest.newBuilder()
- .setTableName(com.google.protobuf.ByteString.copyFrom(tableName)).build();
+ .setTableName(ProtobufUtil.toProtoTableName(tableName)).build();
instance.prepareBulkLoad(controller,
request,
Modified: hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionCoprocessorRpcChannel.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionCoprocessorRpcChannel.java?rev=1511577&r1=1511576&r2=1511577&view=diff
==============================================================================
--- hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionCoprocessorRpcChannel.java (original)
+++ hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionCoprocessorRpcChannel.java Thu Aug 8 04:19:49 2013
@@ -23,6 +23,7 @@ import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.HConnection;
import org.apache.hadoop.hbase.client.RegionServerCallable;
import org.apache.hadoop.hbase.client.RpcRetryingCallerFactory;
@@ -48,13 +49,13 @@ public class RegionCoprocessorRpcChannel
private static Log LOG = LogFactory.getLog(RegionCoprocessorRpcChannel.class);
private final HConnection connection;
- private final byte[] table;
+ private final TableName table;
private final byte[] row;
private byte[] lastRegion;
private RpcRetryingCallerFactory rpcFactory;
- public RegionCoprocessorRpcChannel(HConnection conn, byte[] table, byte[] row) {
+ public RegionCoprocessorRpcChannel(HConnection conn, TableName table, byte[] row) {
this.connection = conn;
this.table = table;
this.row = row;
Modified: hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java?rev=1511577&r1=1511576&r2=1511577&view=diff
==============================================================================
--- hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java (original)
+++ hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java Thu Aug 8 04:19:49 2013
@@ -40,6 +40,7 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
@@ -47,6 +48,7 @@ import org.apache.hadoop.hbase.HRegionIn
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
+import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.Delete;
@@ -1563,7 +1565,7 @@ public final class ProtobufUtil {
* @return the converted Permission
*/
public static Permission toPermission(AccessControlProtos.Permission proto) {
- if (proto.hasTable()) {
+ if (proto.hasTableName()) {
return toTablePermission(proto);
} else {
List<Permission.Action> actions = toPermissionActions(proto.getActionList());
@@ -1582,9 +1584,9 @@ public final class ProtobufUtil {
byte[] qualifier = null;
byte[] family = null;
- byte[] table = null;
+ TableName table = null;
- if (proto.hasTable()) table = proto.getTable().toByteArray();
+ if (proto.hasTableName()) table = ProtobufUtil.toTableName(proto.getTableName());
if (proto.hasFamily()) family = proto.getFamily().toByteArray();
if (proto.hasQualifier()) qualifier = proto.getQualifier().toByteArray();
@@ -1603,7 +1605,7 @@ public final class ProtobufUtil {
if (perm instanceof TablePermission) {
TablePermission tablePerm = (TablePermission)perm;
if (tablePerm.hasTable()) {
- builder.setTable(ByteString.copyFrom(tablePerm.getTable()));
+ builder.setTableName(ProtobufUtil.toProtoTableName(tablePerm.getTable()));
}
if (tablePerm.hasFamily()) {
builder.setFamily(ByteString.copyFrom(tablePerm.getFamily()));
@@ -1692,7 +1694,7 @@ public final class ProtobufUtil {
permissionBuilder.addAction(toPermissionAction(a));
}
if (perm.hasTable()) {
- permissionBuilder.setTable(ByteString.copyFrom(perm.getTable()));
+ permissionBuilder.setTableName(ProtobufUtil.toProtoTableName(perm.getTable()));
}
if (perm.hasFamily()) {
permissionBuilder.setFamily(ByteString.copyFrom(perm.getFamily()));
@@ -1719,9 +1721,9 @@ public final class ProtobufUtil {
byte[] qualifier = null;
byte[] family = null;
- byte[] table = null;
+ TableName table = null;
- if (permission.hasTable()) table = permission.getTable().toByteArray();
+ if (permission.hasTableName()) table = ProtobufUtil.toTableName(permission.getTableName());
if (permission.hasFamily()) family = permission.getFamily().toByteArray();
if (permission.hasQualifier()) qualifier = permission.getQualifier().toByteArray();
@@ -1762,14 +1764,14 @@ public final class ProtobufUtil {
*
* @param protocol the AccessControlService protocol proxy
* @param userShortName the short name of the user to grant permissions
- * @param t optional table name
+ * @param tableName optional table name
* @param f optional column family
* @param q optional qualifier
* @param actions the permissions to be granted
* @throws ServiceException
*/
public static void grant(AccessControlService.BlockingInterface protocol,
- String userShortName, byte[] t, byte[] f, byte[] q,
+ String userShortName, TableName tableName, byte[] f, byte[] q,
Permission.Action... actions) throws ServiceException {
List<AccessControlProtos.Permission.Action> permActions =
Lists.newArrayListWithCapacity(actions.length);
@@ -1777,7 +1779,7 @@ public final class ProtobufUtil {
permActions.add(ProtobufUtil.toPermissionAction(a));
}
AccessControlProtos.GrantRequest request = RequestConverter.
- buildGrantRequest(userShortName, t, f, q, permActions.toArray(
+ buildGrantRequest(userShortName, tableName, f, q, permActions.toArray(
new AccessControlProtos.Permission.Action[actions.length]));
protocol.grant(null, request);
}
@@ -1791,14 +1793,14 @@ public final class ProtobufUtil {
*
* @param protocol the AccessControlService protocol proxy
* @param userShortName the short name of the user to revoke permissions
- * @param t optional table name
+ * @param tableName optional table name
* @param f optional column family
* @param q optional qualifier
* @param actions the permissions to be revoked
* @throws ServiceException
*/
public static void revoke(AccessControlService.BlockingInterface protocol,
- String userShortName, byte[] t, byte[] f, byte[] q,
+ String userShortName, TableName tableName, byte[] f, byte[] q,
Permission.Action... actions) throws ServiceException {
List<AccessControlProtos.Permission.Action> permActions =
Lists.newArrayListWithCapacity(actions.length);
@@ -1806,7 +1808,7 @@ public final class ProtobufUtil {
permActions.add(ProtobufUtil.toPermissionAction(a));
}
AccessControlProtos.RevokeRequest request = RequestConverter.
- buildRevokeRequest(userShortName, t, f, q, permActions.toArray(
+ buildRevokeRequest(userShortName, tableName, f, q, permActions.toArray(
new AccessControlProtos.Permission.Action[actions.length]));
protocol.revoke(null, request);
}
@@ -1822,11 +1824,11 @@ public final class ProtobufUtil {
*/
public static List<UserPermission> getUserPermissions(
AccessControlService.BlockingInterface protocol,
- byte[] t) throws ServiceException {
+ TableName t) throws ServiceException {
AccessControlProtos.UserPermissionsRequest.Builder builder =
AccessControlProtos.UserPermissionsRequest.newBuilder();
if (t != null) {
- builder.setTable(ByteString.copyFrom(t));
+ builder.setTableName(ProtobufUtil.toProtoTableName(t));
}
AccessControlProtos.UserPermissionsRequest request = builder.build();
AccessControlProtos.UserPermissionsResponse response =
@@ -1988,6 +1990,28 @@ public final class ProtobufUtil {
cell.getValue().toByteArray());
}
+ public static HBaseProtos.NamespaceDescriptor toProtoNamespaceDescriptor(NamespaceDescriptor ns) {
+ HBaseProtos.NamespaceDescriptor.Builder b =
+ HBaseProtos.NamespaceDescriptor.newBuilder()
+ .setName(ByteString.copyFromUtf8(ns.getName()));
+ for(Map.Entry<String, String> entry: ns.getConfiguration().entrySet()) {
+ b.addConfiguration(HBaseProtos.NameStringPair.newBuilder()
+ .setName(entry.getKey())
+ .setValue(entry.getValue()));
+ }
+ return b.build();
+ }
+
+ public static NamespaceDescriptor toNamespaceDescriptor(
+ HBaseProtos.NamespaceDescriptor desc) throws IOException {
+ NamespaceDescriptor.Builder b =
+ NamespaceDescriptor.create(desc.getName().toStringUtf8());
+ for(HBaseProtos.NameStringPair prop : desc.getConfigurationList()) {
+ b.addConfiguration(prop.getName(), prop.getValue());
+ }
+ return b.build();
+ }
+
/**
* Get an instance of the argument type declared in a class's signature. The
* argument type is assumed to be a PB Message subclass, and the instance is
@@ -2029,7 +2053,7 @@ public final class ProtobufUtil {
// input / output paths are relative to the store dir
// store dir is relative to region dir
CompactionDescriptor.Builder builder = CompactionDescriptor.newBuilder()
- .setTableName(ByteString.copyFrom(info.getTableName()))
+ .setTableName(ByteString.copyFrom(info.getTableName().getName()))
.setEncodedRegionName(ByteString.copyFrom(info.getEncodedNameAsBytes()))
.setFamilyName(ByteString.copyFrom(family))
.setStoreHomeDir(storeDir.getName()); //make relative
@@ -2077,4 +2101,15 @@ public final class ProtobufUtil {
return "row=" + Bytes.toString(proto.getRow().toByteArray()) +
", type=" + proto.getMutateType().toString();
}
+
+ public static TableName toTableName(HBaseProtos.TableName tableNamePB) {
+ return TableName.valueOf(tableNamePB.getNamespace().toByteArray(),
+ tableNamePB.getQualifier().toByteArray());
+ }
+
+ public static HBaseProtos.TableName toProtoTableName(TableName tableName) {
+ return HBaseProtos.TableName.newBuilder()
+ .setNamespace(ByteString.copyFrom(tableName.getNamespace()))
+ .setQualifier(ByteString.copyFrom(tableName.getQualifier())).build();
+ }
}
Modified: hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java?rev=1511577&r1=1511576&r2=1511577&view=diff
==============================================================================
--- hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java (original)
+++ hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java Thu Aug 8 04:19:49 2013
@@ -22,6 +22,7 @@ import java.util.List;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.CellScannable;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HRegionInfo;
@@ -904,9 +905,9 @@ public final class RequestConverter {
* @return an AddColumnRequest
*/
public static AddColumnRequest buildAddColumnRequest(
- final byte [] tableName, final HColumnDescriptor column) {
+ final TableName tableName, final HColumnDescriptor column) {
AddColumnRequest.Builder builder = AddColumnRequest.newBuilder();
- builder.setTableName(ByteString.copyFrom(tableName));
+ builder.setTableName(ProtobufUtil.toProtoTableName(tableName));
builder.setColumnFamilies(column.convert());
return builder.build();
}
@@ -919,9 +920,9 @@ public final class RequestConverter {
* @return a DeleteColumnRequest
*/
public static DeleteColumnRequest buildDeleteColumnRequest(
- final byte [] tableName, final byte [] columnName) {
+ final TableName tableName, final byte [] columnName) {
DeleteColumnRequest.Builder builder = DeleteColumnRequest.newBuilder();
- builder.setTableName(ByteString.copyFrom(tableName));
+ builder.setTableName(ProtobufUtil.toProtoTableName((tableName)));
builder.setColumnName(ByteString.copyFrom(columnName));
return builder.build();
}
@@ -934,9 +935,9 @@ public final class RequestConverter {
* @return an ModifyColumnRequest
*/
public static ModifyColumnRequest buildModifyColumnRequest(
- final byte [] tableName, final HColumnDescriptor column) {
+ final TableName tableName, final HColumnDescriptor column) {
ModifyColumnRequest.Builder builder = ModifyColumnRequest.newBuilder();
- builder.setTableName(ByteString.copyFrom(tableName));
+ builder.setTableName(ProtobufUtil.toProtoTableName((tableName)));
builder.setColumnFamilies(column.convert());
return builder.build();
}
@@ -1019,9 +1020,9 @@ public final class RequestConverter {
* @param tableName
* @return a DeleteTableRequest
*/
- public static DeleteTableRequest buildDeleteTableRequest(final byte [] tableName) {
+ public static DeleteTableRequest buildDeleteTableRequest(final TableName tableName) {
DeleteTableRequest.Builder builder = DeleteTableRequest.newBuilder();
- builder.setTableName(ByteString.copyFrom(tableName));
+ builder.setTableName(ProtobufUtil.toProtoTableName(tableName));
return builder.build();
}
@@ -1031,9 +1032,9 @@ public final class RequestConverter {
* @param tableName
* @return an EnableTableRequest
*/
- public static EnableTableRequest buildEnableTableRequest(final byte [] tableName) {
+ public static EnableTableRequest buildEnableTableRequest(final TableName tableName) {
EnableTableRequest.Builder builder = EnableTableRequest.newBuilder();
- builder.setTableName(ByteString.copyFrom(tableName));
+ builder.setTableName(ProtobufUtil.toProtoTableName(tableName));
return builder.build();
}
@@ -1043,9 +1044,9 @@ public final class RequestConverter {
* @param tableName
* @return a DisableTableRequest
*/
- public static DisableTableRequest buildDisableTableRequest(final byte [] tableName) {
+ public static DisableTableRequest buildDisableTableRequest(final TableName tableName) {
DisableTableRequest.Builder builder = DisableTableRequest.newBuilder();
- builder.setTableName(ByteString.copyFrom(tableName));
+ builder.setTableName(ProtobufUtil.toProtoTableName((tableName)));
return builder.build();
}
@@ -1077,9 +1078,9 @@ public final class RequestConverter {
* @return a ModifyTableRequest
*/
public static ModifyTableRequest buildModifyTableRequest(
- final byte [] table, final HTableDescriptor hTableDesc) {
+ final TableName tableName, final HTableDescriptor hTableDesc) {
ModifyTableRequest.Builder builder = ModifyTableRequest.newBuilder();
- builder.setTableName(ByteString.copyFrom(table));
+ builder.setTableName(ProtobufUtil.toProtoTableName((tableName)));
builder.setTableSchema(hTableDesc.convert());
return builder.build();
}
@@ -1091,9 +1092,9 @@ public final class RequestConverter {
* @return a GetSchemaAlterStatusRequest
*/
public static GetSchemaAlterStatusRequest buildGetSchemaAlterStatusRequest(
- final byte [] tableName) {
+ final TableName tableName) {
GetSchemaAlterStatusRequest.Builder builder = GetSchemaAlterStatusRequest.newBuilder();
- builder.setTableName(ByteString.copyFrom(tableName));
+ builder.setTableName(ProtobufUtil.toProtoTableName((tableName)));
return builder.build();
}
@@ -1104,11 +1105,11 @@ public final class RequestConverter {
* @return a GetTableDescriptorsRequest
*/
public static GetTableDescriptorsRequest buildGetTableDescriptorsRequest(
- final List<String> tableNames) {
+ final List<TableName> tableNames) {
GetTableDescriptorsRequest.Builder builder = GetTableDescriptorsRequest.newBuilder();
if (tableNames != null) {
- for (String str : tableNames) {
- builder.addTableNames(str);
+ for (TableName tableName : tableNames) {
+ builder.addTableNames(ProtobufUtil.toProtoTableName(tableName));
}
}
return builder.build();
@@ -1121,9 +1122,9 @@ public final class RequestConverter {
* @return a GetTableDescriptorsRequest
*/
public static GetTableDescriptorsRequest buildGetTableDescriptorsRequest(
- final byte[] tableName) {
+ final TableName tableName) {
return GetTableDescriptorsRequest.newBuilder()
- .addTableNames(Bytes.toString(tableName))
+ .addTableNames(ProtobufUtil.toProtoTableName(tableName))
.build();
}
@@ -1204,22 +1205,22 @@ public final class RequestConverter {
* Create a request to grant user permissions.
*
* @param username the short user name who to grant permissions
- * @param table optional table name the permissions apply
+ * @param tableName optional table name the permissions apply
* @param family optional column family
* @param qualifier optional qualifier
* @param actions the permissions to be granted
* @return A {@link AccessControlProtos} GrantRequest
*/
public static AccessControlProtos.GrantRequest buildGrantRequest(
- String username, byte[] table, byte[] family, byte[] qualifier,
+ String username, TableName tableName, byte[] family, byte[] qualifier,
AccessControlProtos.Permission.Action... actions) {
AccessControlProtos.Permission.Builder permissionBuilder =
AccessControlProtos.Permission.newBuilder();
for (AccessControlProtos.Permission.Action a : actions) {
permissionBuilder.addAction(a);
}
- if (table != null) {
- permissionBuilder.setTable(ByteString.copyFrom(table));
+ if (tableName != null) {
+ permissionBuilder.setTableName(ProtobufUtil.toProtoTableName(tableName));
}
if (family != null) {
permissionBuilder.setFamily(ByteString.copyFrom(family));
@@ -1240,22 +1241,22 @@ public final class RequestConverter {
* Create a request to revoke user permissions.
*
* @param username the short user name whose permissions to be revoked
- * @param table optional table name the permissions apply
+ * @param tableName optional table name the permissions apply
* @param family optional column family
* @param qualifier optional qualifier
* @param actions the permissions to be revoked
* @return A {@link AccessControlProtos} RevokeRequest
*/
public static AccessControlProtos.RevokeRequest buildRevokeRequest(
- String username, byte[] table, byte[] family, byte[] qualifier,
+ String username, TableName tableName, byte[] family, byte[] qualifier,
AccessControlProtos.Permission.Action... actions) {
AccessControlProtos.Permission.Builder permissionBuilder =
AccessControlProtos.Permission.newBuilder();
for (AccessControlProtos.Permission.Action a : actions) {
permissionBuilder.addAction(a);
}
- if (table != null) {
- permissionBuilder.setTable(ByteString.copyFrom(table));
+ if (tableName != null) {
+ permissionBuilder.setTableName(ProtobufUtil.toProtoTableName(tableName));
}
if (family != null) {
permissionBuilder.setFamily(ByteString.copyFrom(family));
@@ -1290,4 +1291,4 @@ public final class RequestConverter {
}
return builder.build();
}
-}
\ No newline at end of file
+}
Modified: hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/TablePermission.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/TablePermission.java?rev=1511577&r1=1511576&r2=1511577&view=diff
==============================================================================
--- hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/TablePermission.java (original)
+++ hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/TablePermission.java Thu Aug 8 04:19:49 2013
@@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.security
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.util.Bytes;
@@ -36,7 +37,7 @@ import java.io.IOException;
public class TablePermission extends Permission {
private static Log LOG = LogFactory.getLog(TablePermission.class);
- private byte[] table;
+ private TableName table;
private byte[] family;
private byte[] qualifier;
@@ -52,7 +53,7 @@ public class TablePermission extends Per
* @param family the family, can be null if a global permission on the table
* @param assigned the list of allowed actions
*/
- public TablePermission(byte[] table, byte[] family, Action... assigned) {
+ public TablePermission(TableName table, byte[] family, Action... assigned) {
this(table, family, null, assigned);
}
@@ -63,7 +64,7 @@ public class TablePermission extends Per
* @param family the family, can be null if a global permission on the table
* @param assigned the list of allowed actions
*/
- public TablePermission(byte[] table, byte[] family, byte[] qualifier,
+ public TablePermission(TableName table, byte[] family, byte[] qualifier,
Action... assigned) {
super(assigned);
this.table = table;
@@ -78,7 +79,7 @@ public class TablePermission extends Per
* @param family the family, can be null if a global permission on the table
* @param actionCodes the list of allowed action codes
*/
- public TablePermission(byte[] table, byte[] family, byte[] qualifier,
+ public TablePermission(TableName table, byte[] family, byte[] qualifier,
byte[] actionCodes) {
super(actionCodes);
this.table = table;
@@ -90,7 +91,7 @@ public class TablePermission extends Per
return table != null;
}
- public byte[] getTable() {
+ public TableName getTable() {
return table;
}
@@ -123,9 +124,9 @@ public class TablePermission extends Per
* @return <code>true</code> if the action within the given scope is allowed
* by this permission, <code>false</code>
*/
- public boolean implies(byte[] table, byte[] family, byte[] qualifier,
+ public boolean implies(TableName table, byte[] family, byte[] qualifier,
Action action) {
- if (!Bytes.equals(this.table, table)) {
+ if (!this.table.equals(table)) {
return false;
}
@@ -154,8 +155,8 @@ public class TablePermission extends Per
* @return <code>true</code> if the action is allowed over the given scope
* by this permission, otherwise <code>false</code>
*/
- public boolean implies(byte[] table, KeyValue kv, Action action) {
- if (!Bytes.equals(this.table, table)) {
+ public boolean implies(TableName table, KeyValue kv, Action action) {
+ if (!this.table.equals(table)) {
return false;
}
@@ -183,8 +184,8 @@ public class TablePermission extends Per
* column-qualifier specific permission, for example, implies() would still
* return false.
*/
- public boolean matchesFamily(byte[] table, byte[] family, Action action) {
- if (!Bytes.equals(this.table, table)) {
+ public boolean matchesFamily(TableName table, byte[] family, Action action) {
+ if (!this.table.equals(table)) {
return false;
}
@@ -208,7 +209,7 @@ public class TablePermission extends Per
* @return <code>true</code> if the table, family and qualifier match,
* otherwise <code>false</code>
*/
- public boolean matchesFamilyQualifier(byte[] table, byte[] family, byte[] qualifier,
+ public boolean matchesFamilyQualifier(TableName table, byte[] family, byte[] qualifier,
Action action) {
if (!matchesFamily(table, family, action)) {
return false;
@@ -229,7 +230,7 @@ public class TablePermission extends Per
}
TablePermission other = (TablePermission)obj;
- if (!(Bytes.equals(table, other.getTable()) &&
+ if (!(table.equals(other.getTable()) &&
((family == null && other.getFamily() == null) ||
Bytes.equals(family, other.getFamily())) &&
((qualifier == null && other.getQualifier() == null) ||
@@ -247,7 +248,7 @@ public class TablePermission extends Per
final int prime = 37;
int result = super.hashCode();
if (table != null) {
- result = prime * result + Bytes.hashCode(table);
+ result = prime * result + table.hashCode();
}
if (family != null) {
result = prime * result + Bytes.hashCode(family);
@@ -260,7 +261,7 @@ public class TablePermission extends Per
public String toString() {
StringBuilder str = new StringBuilder("[TablePermission: ")
- .append("table=").append(Bytes.toString(table))
+ .append("table=").append(table)
.append(", family=").append(Bytes.toString(family))
.append(", qualifier=").append(Bytes.toString(qualifier))
.append(", actions=");
@@ -282,7 +283,8 @@ public class TablePermission extends Per
@Override
public void readFields(DataInput in) throws IOException {
super.readFields(in);
- table = Bytes.readByteArray(in);
+ byte[] tableBytes = Bytes.readByteArray(in);
+ table = TableName.valueOf(tableBytes);
if (in.readBoolean()) {
family = Bytes.readByteArray(in);
}
@@ -294,7 +296,7 @@ public class TablePermission extends Per
@Override
public void write(DataOutput out) throws IOException {
super.write(out);
- Bytes.writeByteArray(out, table);
+ Bytes.writeByteArray(out, table.getName());
out.writeBoolean(family != null);
if (family != null) {
Bytes.writeByteArray(out, family);
Modified: hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/UserPermission.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/UserPermission.java?rev=1511577&r1=1511576&r2=1511577&view=diff
==============================================================================
--- hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/UserPermission.java (original)
+++ hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/UserPermission.java Thu Aug 8 04:19:49 2013
@@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.security
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.util.Bytes;
import java.io.DataInput;
@@ -69,7 +70,7 @@ public class UserPermission extends Tabl
* table
* @param assigned the list of allowed actions
*/
- public UserPermission(byte[] user, byte[] table, byte[] family,
+ public UserPermission(byte[] user, TableName table, byte[] family,
Action... assigned) {
super(table, family, assigned);
this.user = user;
@@ -86,7 +87,7 @@ public class UserPermission extends Tabl
* over the entire column family
* @param assigned the list of allowed actions
*/
- public UserPermission(byte[] user, byte[] table, byte[] family,
+ public UserPermission(byte[] user, TableName table, byte[] family,
byte[] qualifier, Action... assigned) {
super(table, family, qualifier, assigned);
this.user = user;
@@ -103,7 +104,7 @@ public class UserPermission extends Tabl
* over the entire column family
* @param actionCodes the list of allowed action codes
*/
- public UserPermission(byte[] user, byte[] table, byte[] family,
+ public UserPermission(byte[] user, TableName table, byte[] family,
byte[] qualifier, byte[] actionCodes) {
super(table, family, qualifier, actionCodes);
this.user = user;
@@ -117,8 +118,8 @@ public class UserPermission extends Tabl
* Returns true if this permission describes a global user permission.
*/
public boolean isGlobal() {
- byte[] tableName = getTable();
- return(tableName == null || tableName.length == 0);
+ TableName tableName = getTable();
+ return(tableName == null);
}
@Override
Modified: hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/ClientSnapshotDescriptionUtils.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/ClientSnapshotDescriptionUtils.java?rev=1511577&r1=1511576&r2=1511577&view=diff
==============================================================================
--- hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/ClientSnapshotDescriptionUtils.java (original)
+++ hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/ClientSnapshotDescriptionUtils.java Thu Aug 8 04:19:49 2013
@@ -20,7 +20,9 @@
package org.apache.hadoop.hbase.snapshot;
import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
import org.apache.hadoop.hbase.util.Bytes;
@@ -38,15 +40,16 @@ public class ClientSnapshotDescriptionUt
*/
public static void assertSnapshotRequestIsValid(HBaseProtos.SnapshotDescription snapshot)
throws IllegalArgumentException {
- // FIXME these method names is really bad - trunk will probably change
- // .META. and -ROOT- snapshots are not allowed
- if (HTableDescriptor.isMetaTable(Bytes.toBytes(snapshot.getTable()))) {
- throw new IllegalArgumentException(".META. and -ROOT- snapshots are not allowed");
- }
// make sure the snapshot name is valid
- HTableDescriptor.isLegalTableName(Bytes.toBytes(snapshot.getName()));
- // make sure the table name is valid
- HTableDescriptor.isLegalTableName(Bytes.toBytes(snapshot.getTable()));
+ TableName.isLegalTableQualifierName(Bytes.toBytes(snapshot.getName()));
+ if(snapshot.hasTable()) {
+ // make sure the table name is valid, this will implicitly check validity
+ TableName tableName = TableName.valueOf(snapshot.getTable());
+
+ if (HTableDescriptor.isSystemTable(tableName)) {
+ throw new IllegalArgumentException("System table snapshots are not allowed");
+ }
+ }
}
/**
@@ -60,7 +63,8 @@ public class ClientSnapshotDescriptionUt
if (ssd == null) {
return null;
}
- return "{ ss=" + ssd.getName() + " table=" + ssd.getTable()
- + " type=" + ssd.getType() + " }";
+ return "{ ss=" + ssd.getName() +
+ " table=" + (ssd.hasTable()?TableName.valueOf(ssd.getTable()):"") +
+ " type=" + ssd.getType() + " }";
}
}
Modified: hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/TablePartiallyOpenException.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/TablePartiallyOpenException.java?rev=1511577&r1=1511576&r2=1511577&view=diff
==============================================================================
--- hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/TablePartiallyOpenException.java (original)
+++ hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/TablePartiallyOpenException.java Thu Aug 8 04:19:49 2013
@@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.snapshot
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.util.Bytes;
import java.io.IOException;
@@ -45,7 +46,14 @@ public class TablePartiallyOpenException
/**
* @param tableName Name of table that is partial open
*/
- public TablePartiallyOpenException(byte[] tableName) {
- this(Bytes.toString(tableName));
+ public TablePartiallyOpenException(TableName tableName) {
+ this(tableName.getNameAsString());
}
+
+ /**
+ * @param tableName Name of table that is partial open
+ */
+ public TablePartiallyOpenException(byte[] tableName) {
+ this(Bytes.toString(tableName));
+ }
}
Modified: hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKTable.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKTable.java?rev=1511577&r1=1511576&r2=1511577&view=diff
==============================================================================
--- hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKTable.java (original)
+++ hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKTable.java Thu Aug 8 04:19:49 2013
@@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.zookeepe
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos;
import org.apache.zookeeper.KeeperException;
@@ -56,8 +57,8 @@ public class ZKTable {
* for every query. Synchronize access rather than use concurrent Map because
* synchronization needs to span query of zk.
*/
- private final Map<String, ZooKeeperProtos.Table.State> cache =
- new HashMap<String, ZooKeeperProtos.Table.State>();
+ private final Map<TableName, ZooKeeperProtos.Table.State> cache =
+ new HashMap<TableName, ZooKeeperProtos.Table.State>();
// TODO: Make it so always a table znode. Put table schema here as well as table state.
// Have watcher on table znode so all are notified of state or schema change.
@@ -78,8 +79,9 @@ public class ZKTable {
List<String> children = ZKUtil.listChildrenNoWatch(this.watcher, this.watcher.tableZNode);
if (children == null) return;
for (String child: children) {
- ZooKeeperProtos.Table.State state = ZKTableReadOnly.getTableState(this.watcher, child);
- if (state != null) this.cache.put(child, state);
+ TableName tableName = TableName.valueOf(child);
+ ZooKeeperProtos.Table.State state = ZKTableReadOnly.getTableState(this.watcher, tableName);
+ if (state != null) this.cache.put(tableName, state);
}
}
}
@@ -90,7 +92,7 @@ public class ZKTable {
* @param tableName
* @throws KeeperException unexpected zookeeper exception
*/
- public void setDisabledTable(String tableName)
+ public void setDisabledTable(TableName tableName)
throws KeeperException {
synchronized (this.cache) {
if (!isDisablingOrDisabledTable(tableName)) {
@@ -107,7 +109,7 @@ public class ZKTable {
* @param tableName
* @throws KeeperException unexpected zookeeper exception
*/
- public void setDisablingTable(final String tableName)
+ public void setDisablingTable(final TableName tableName)
throws KeeperException {
synchronized (this.cache) {
if (!isEnabledOrDisablingTable(tableName)) {
@@ -124,7 +126,7 @@ public class ZKTable {
* @param tableName
* @throws KeeperException unexpected zookeeper exception
*/
- public void setEnablingTable(final String tableName)
+ public void setEnablingTable(final TableName tableName)
throws KeeperException {
synchronized (this.cache) {
if (!isDisabledOrEnablingTable(tableName)) {
@@ -142,7 +144,7 @@ public class ZKTable {
* @return if the operation succeeds or not
* @throws KeeperException unexpected zookeeper exception
*/
- public boolean checkAndSetEnablingTable(final String tableName)
+ public boolean checkAndSetEnablingTable(final TableName tableName)
throws KeeperException {
synchronized (this.cache) {
if (isEnablingTable(tableName)) {
@@ -160,7 +162,7 @@ public class ZKTable {
* @return if the operation succeeds or not
* @throws KeeperException unexpected zookeeper exception
*/
- public boolean checkDisabledAndSetEnablingTable(final String tableName)
+ public boolean checkDisabledAndSetEnablingTable(final TableName tableName)
throws KeeperException {
synchronized (this.cache) {
if (!isDisabledTable(tableName)) {
@@ -178,7 +180,7 @@ public class ZKTable {
* @return if the operation succeeds or not
* @throws KeeperException unexpected zookeeper exception
*/
- public boolean checkEnabledAndSetDisablingTable(final String tableName)
+ public boolean checkEnabledAndSetDisablingTable(final TableName tableName)
throws KeeperException {
synchronized (this.cache) {
if (this.cache.get(tableName) != null && !isEnabledTable(tableName)) {
@@ -189,9 +191,9 @@ public class ZKTable {
}
}
- private void setTableState(final String tableName, final ZooKeeperProtos.Table.State state)
+ private void setTableState(final TableName tableName, final ZooKeeperProtos.Table.State state)
throws KeeperException {
- String znode = ZKUtil.joinZNode(this.watcher.tableZNode, tableName);
+ String znode = ZKUtil.joinZNode(this.watcher.tableZNode, tableName.getNameAsString());
if (ZKUtil.checkExists(this.watcher, znode) == -1) {
ZKUtil.createAndFailSilent(this.watcher, znode);
}
@@ -204,41 +206,41 @@ public class ZKTable {
}
}
- public boolean isDisabledTable(final String tableName) {
+ public boolean isDisabledTable(final TableName tableName) {
return isTableState(tableName, ZooKeeperProtos.Table.State.DISABLED);
}
- public boolean isDisablingTable(final String tableName) {
+ public boolean isDisablingTable(final TableName tableName) {
return isTableState(tableName, ZooKeeperProtos.Table.State.DISABLING);
}
- public boolean isEnablingTable(final String tableName) {
+ public boolean isEnablingTable(final TableName tableName) {
return isTableState(tableName, ZooKeeperProtos.Table.State.ENABLING);
}
- public boolean isEnabledTable(String tableName) {
+ public boolean isEnabledTable(TableName tableName) {
return isTableState(tableName, ZooKeeperProtos.Table.State.ENABLED);
}
- public boolean isDisablingOrDisabledTable(final String tableName) {
+ public boolean isDisablingOrDisabledTable(final TableName tableName) {
synchronized (this.cache) {
return isDisablingTable(tableName) || isDisabledTable(tableName);
}
}
- public boolean isEnabledOrDisablingTable(final String tableName) {
+ public boolean isEnabledOrDisablingTable(final TableName tableName) {
synchronized (this.cache) {
return isEnabledTable(tableName) || isDisablingTable(tableName);
}
}
- public boolean isDisabledOrEnablingTable(final String tableName) {
+ public boolean isDisabledOrEnablingTable(final TableName tableName) {
synchronized (this.cache) {
return isDisabledTable(tableName) || isEnablingTable(tableName);
}
}
- private boolean isTableState(final String tableName, final ZooKeeperProtos.Table.State state) {
+ private boolean isTableState(final TableName tableName, final ZooKeeperProtos.Table.State state) {
synchronized (this.cache) {
ZooKeeperProtos.Table.State currentState = this.cache.get(tableName);
return ZKTableReadOnly.isTableState(currentState, state);
@@ -251,7 +253,7 @@ public class ZKTable {
* @param tableName
* @throws KeeperException unexpected zookeeper exception
*/
- public void setDeletedTable(final String tableName)
+ public void setDeletedTable(final TableName tableName)
throws KeeperException {
synchronized (this.cache) {
if (this.cache.remove(tableName) == null) {
@@ -259,7 +261,7 @@ public class ZKTable {
"already deleted");
}
ZKUtil.deleteNodeFailSilent(this.watcher,
- ZKUtil.joinZNode(this.watcher.tableZNode, tableName));
+ ZKUtil.joinZNode(this.watcher.tableZNode, tableName.getNameAsString()));
}
}
@@ -270,7 +272,7 @@ public class ZKTable {
* @param tableName
* @throws KeeperException
*/
- public void setEnabledTable(final String tableName) throws KeeperException {
+ public void setEnabledTable(final TableName tableName) throws KeeperException {
setTableState(tableName, ZooKeeperProtos.Table.State.ENABLED);
}
@@ -280,7 +282,7 @@ public class ZKTable {
* @param tableName
* @return true if the table is present
*/
- public boolean isTablePresent(final String tableName) {
+ public boolean isTablePresent(final TableName tableName) {
synchronized (this.cache) {
ZooKeeperProtos.Table.State state = this.cache.get(tableName);
return !(state == null);
@@ -291,11 +293,11 @@ public class ZKTable {
* Gets a list of all the tables set as disabled in zookeeper.
* @return Set of disabled tables, empty Set if none
*/
- public Set<String> getDisabledTables() {
- Set<String> disabledTables = new HashSet<String>();
+ public Set<TableName> getDisabledTables() {
+ Set<TableName> disabledTables = new HashSet<TableName>();
synchronized (this.cache) {
- Set<String> tables = this.cache.keySet();
- for (String table: tables) {
+ Set<TableName> tables = this.cache.keySet();
+ for (TableName table: tables) {
if (isDisabledTable(table)) disabledTables.add(table);
}
}
@@ -307,7 +309,7 @@ public class ZKTable {
* @return Set of disabled tables, empty Set if none
* @throws KeeperException
*/
- public static Set<String> getDisabledTables(ZooKeeperWatcher zkw)
+ public static Set<TableName> getDisabledTables(ZooKeeperWatcher zkw)
throws KeeperException {
return getAllTables(zkw, ZooKeeperProtos.Table.State.DISABLED);
}
@@ -317,7 +319,7 @@ public class ZKTable {
* @return Set of disabling tables, empty Set if none
* @throws KeeperException
*/
- public static Set<String> getDisablingTables(ZooKeeperWatcher zkw)
+ public static Set<TableName> getDisablingTables(ZooKeeperWatcher zkw)
throws KeeperException {
return getAllTables(zkw, ZooKeeperProtos.Table.State.DISABLING);
}
@@ -327,7 +329,7 @@ public class ZKTable {
* @return Set of enabling tables, empty Set if none
* @throws KeeperException
*/
- public static Set<String> getEnablingTables(ZooKeeperWatcher zkw)
+ public static Set<TableName> getEnablingTables(ZooKeeperWatcher zkw)
throws KeeperException {
return getAllTables(zkw, ZooKeeperProtos.Table.State.ENABLING);
}
@@ -337,7 +339,7 @@ public class ZKTable {
* @return Set of disabled tables, empty Set if none
* @throws KeeperException
*/
- public static Set<String> getDisabledOrDisablingTables(ZooKeeperWatcher zkw)
+ public static Set<TableName> getDisabledOrDisablingTables(ZooKeeperWatcher zkw)
throws KeeperException {
return getAllTables(zkw, ZooKeeperProtos.Table.State.DISABLED,
ZooKeeperProtos.Table.State.DISABLING);
@@ -352,14 +354,14 @@ public class ZKTable {
* @param deleteZNode
* @throws KeeperException
*/
- public void removeEnablingTable(final String tableName, boolean deleteZNode)
+ public void removeEnablingTable(final TableName tableName, boolean deleteZNode)
throws KeeperException {
synchronized (this.cache) {
if (isEnablingTable(tableName)) {
this.cache.remove(tableName);
if (deleteZNode) {
ZKUtil.deleteNodeFailSilent(this.watcher,
- ZKUtil.joinZNode(this.watcher.tableZNode, tableName));
+ ZKUtil.joinZNode(this.watcher.tableZNode, tableName.getNameAsString()));
}
}
}
@@ -371,17 +373,18 @@ public class ZKTable {
* @return Set of tables of specified states, empty Set if none
* @throws KeeperException
*/
- static Set<String> getAllTables(final ZooKeeperWatcher zkw,
+ static Set<TableName> getAllTables(final ZooKeeperWatcher zkw,
final ZooKeeperProtos.Table.State... states) throws KeeperException {
- Set<String> allTables = new HashSet<String>();
+ Set<TableName> allTables = new HashSet<TableName>();
List<String> children =
ZKUtil.listChildrenNoWatch(zkw, zkw.tableZNode);
if(children == null) return allTables;
for (String child: children) {
- ZooKeeperProtos.Table.State state = ZKTableReadOnly.getTableState(zkw, child);
+ TableName tableName = TableName.valueOf(child);
+ ZooKeeperProtos.Table.State state = ZKTableReadOnly.getTableState(zkw, tableName);
for (ZooKeeperProtos.Table.State expectedState: states) {
if (state == expectedState) {
- allTables.add(child);
+ allTables.add(tableName);
break;
}
}
Modified: hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKTableReadOnly.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKTableReadOnly.java?rev=1511577&r1=1511576&r2=1511577&view=diff
==============================================================================
--- hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKTableReadOnly.java (original)
+++ hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKTableReadOnly.java Thu Aug 8 04:19:49 2013
@@ -20,6 +20,7 @@
package org.apache.hadoop.hbase.zookeeper;
import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos;
@@ -50,7 +51,7 @@ public class ZKTableReadOnly {
* @throws KeeperException
*/
public static boolean isDisabledTable(final ZooKeeperWatcher zkw,
- final String tableName)
+ final TableName tableName)
throws KeeperException {
ZooKeeperProtos.Table.State state = getTableState(zkw, tableName);
return isTableState(ZooKeeperProtos.Table.State.DISABLED, state);
@@ -66,7 +67,7 @@ public class ZKTableReadOnly {
* @throws KeeperException
*/
public static boolean isEnabledTable(final ZooKeeperWatcher zkw,
- final String tableName)
+ final TableName tableName)
throws KeeperException {
return getTableState(zkw, tableName) == ZooKeeperProtos.Table.State.ENABLED;
}
@@ -82,7 +83,7 @@ public class ZKTableReadOnly {
* @throws KeeperException
*/
public static boolean isDisablingOrDisabledTable(final ZooKeeperWatcher zkw,
- final String tableName)
+ final TableName tableName)
throws KeeperException {
ZooKeeperProtos.Table.State state = getTableState(zkw, tableName);
return isTableState(ZooKeeperProtos.Table.State.DISABLING, state) ||
@@ -94,14 +95,16 @@ public class ZKTableReadOnly {
* @return Set of disabled tables, empty Set if none
* @throws KeeperException
*/
- public static Set<String> getDisabledTables(ZooKeeperWatcher zkw)
+ public static Set<TableName> getDisabledTables(ZooKeeperWatcher zkw)
throws KeeperException {
- Set<String> disabledTables = new HashSet<String>();
+ Set<TableName> disabledTables = new HashSet<TableName>();
List<String> children =
ZKUtil.listChildrenNoWatch(zkw, zkw.tableZNode);
for (String child: children) {
- ZooKeeperProtos.Table.State state = getTableState(zkw, child);
- if (state == ZooKeeperProtos.Table.State.DISABLED) disabledTables.add(child);
+ TableName tableName =
+ TableName.valueOf(child);
+ ZooKeeperProtos.Table.State state = getTableState(zkw, tableName);
+ if (state == ZooKeeperProtos.Table.State.DISABLED) disabledTables.add(tableName);
}
return disabledTables;
}
@@ -111,16 +114,18 @@ public class ZKTableReadOnly {
* @return Set of disabled tables, empty Set if none
* @throws KeeperException
*/
- public static Set<String> getDisabledOrDisablingTables(ZooKeeperWatcher zkw)
+ public static Set<TableName> getDisabledOrDisablingTables(ZooKeeperWatcher zkw)
throws KeeperException {
- Set<String> disabledTables = new HashSet<String>();
+ Set<TableName> disabledTables = new HashSet<TableName>();
List<String> children =
ZKUtil.listChildrenNoWatch(zkw, zkw.tableZNode);
for (String child: children) {
- ZooKeeperProtos.Table.State state = getTableState(zkw, child);
+ TableName tableName =
+ TableName.valueOf(child);
+ ZooKeeperProtos.Table.State state = getTableState(zkw, tableName);
if (state == ZooKeeperProtos.Table.State.DISABLED ||
state == ZooKeeperProtos.Table.State.DISABLING)
- disabledTables.add(child);
+ disabledTables.add(tableName);
}
return disabledTables;
}
@@ -132,14 +137,14 @@ public class ZKTableReadOnly {
/**
* @param zkw
- * @param child
+ * @param tableName
* @return Null or {@link ZooKeeperProtos.Table.State} found in znode.
* @throws KeeperException
*/
static ZooKeeperProtos.Table.State getTableState(final ZooKeeperWatcher zkw,
- final String child)
+ final TableName tableName)
throws KeeperException {
- String znode = ZKUtil.joinZNode(zkw.tableZNode, child);
+ String znode = ZKUtil.joinZNode(zkw.tableZNode, tableName.getNameAsString());
byte [] data = ZKUtil.getData(zkw, znode);
if (data == null || data.length <= 0) return null;
try {
Modified: hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java?rev=1511577&r1=1511576&r2=1511577&view=diff
==============================================================================
--- hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java (original)
+++ hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java Thu Aug 8 04:19:49 2013
@@ -109,6 +109,9 @@ public class ZooKeeperWatcher implements
public String tableLockZNode;
// znode containing the state of recovering regions
public String recoveringRegionsZNode;
+ // znode containing namespace descriptors
+ public static String namespaceZNode = "namespace";
+
// Certain ZooKeeper nodes need to be world-readable
public static final ArrayList<ACL> CREATOR_ALL_AND_WORLD_READABLE =
@@ -231,7 +234,9 @@ public class ZooKeeperWatcher implements
tableLockZNode = ZKUtil.joinZNode(baseZNode,
conf.get("zookeeper.znode.tableLock", "table-lock"));
recoveringRegionsZNode = ZKUtil.joinZNode(baseZNode,
- conf.get("zookeeper.znode.recovering.regions", "recovering-regions"));
+ conf.get("zookeeper.znode.recovering.regions", "recovering-regions"));
+ namespaceZNode = ZKUtil.joinZNode(baseZNode,
+ conf.get("zookeeper.znode.namespace", "namespace"));
}
/**
Modified: hbase/trunk/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java?rev=1511577&r1=1511576&r2=1511577&view=diff
==============================================================================
--- hbase/trunk/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java (original)
+++ hbase/trunk/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java Thu Aug 8 04:19:49 2013
@@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.client;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HRegionLocation;
@@ -47,16 +48,17 @@ import java.util.concurrent.atomic.Atomi
@Category(MediumTests.class)
public class TestAsyncProcess {
- private static final byte[] DUMMY_TABLE = "DUMMY_TABLE".getBytes();
+ private static final TableName DUMMY_TABLE =
+ TableName.valueOf("DUMMY_TABLE");
private static final byte[] DUMMY_BYTES_1 = "DUMMY_BYTES_1".getBytes();
private static final byte[] DUMMY_BYTES_2 = "DUMMY_BYTES_2".getBytes();
private static final byte[] FAILS = "FAILS".getBytes();
private static final Configuration conf = new Configuration();
-
private static ServerName sn = new ServerName("localhost:10,1254");
- private static HRegionInfo hri1 = new HRegionInfo(DUMMY_BYTES_1);
- private static HRegionInfo hri2 = new HRegionInfo(DUMMY_BYTES_1);
+ private static HRegionInfo hri1 = new HRegionInfo(DUMMY_TABLE, DUMMY_BYTES_1, DUMMY_BYTES_2);
+ private static HRegionInfo hri2 =
+ new HRegionInfo(DUMMY_TABLE, DUMMY_BYTES_2, HConstants.EMPTY_END_ROW);
private static HRegionLocation loc1 = new HRegionLocation(hri1, sn);
private static HRegionLocation loc2 = new HRegionLocation(hri2, sn);
@@ -118,7 +120,8 @@ public class TestAsyncProcess {
}
@Override
- protected <R> AsyncProcess createAsyncProcess(byte[] tableName, ExecutorService pool,
+ protected <R> AsyncProcess createAsyncProcess(TableName tableName,
+ ExecutorService pool,
AsyncProcess.AsyncProcessCallback<R> callback,
Configuration conf) {
ap = new MyAsyncProcess<R>(this, callback, conf);
@@ -126,7 +129,7 @@ public class TestAsyncProcess {
}
@Override
- public HRegionLocation locateRegion(final byte[] tableName,
+ public HRegionLocation locateRegion(final TableName tableName,
final byte[] row) {
return loc1;
}
Modified: hbase/trunk/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java?rev=1511577&r1=1511576&r2=1511577&view=diff
==============================================================================
--- hbase/trunk/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java (original)
+++ hbase/trunk/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java Thu Aug 8 04:19:49 2013
@@ -26,6 +26,7 @@ import java.util.concurrent.ExecutorServ
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
@@ -82,7 +83,7 @@ public class TestClientNoCluster {
}
@Override
- public boolean isTableOnlineState(byte[] tableName, boolean enabled)
+ public boolean isTableOnlineState(TableName tableName, boolean enabled)
throws IOException {
return enabled;
}
@@ -103,7 +104,7 @@ public class TestClientNoCluster {
Configuration localConfig = HBaseConfiguration.create(this.conf);
// This override mocks up our exists/get call to throw a RegionServerStoppedException.
localConfig.set("hbase.client.connection.impl", RpcTimeoutConnection.class.getName());
- HTable table = new HTable(localConfig, HConstants.META_TABLE_NAME);
+ HTable table = new HTable(localConfig, TableName.META_TABLE_NAME);
Throwable t = null;
LOG.info("Start");
try {
@@ -140,7 +141,7 @@ public class TestClientNoCluster {
// and it has expired. Otherwise, if this functionality is broke, all retries will be run --
// all ten of them -- and we'll get the RetriesExhaustedException exception.
localConfig.setInt(HConstants.HBASE_CLIENT_META_OPERATION_TIMEOUT, pause - 1);
- HTable table = new HTable(localConfig, HConstants.META_TABLE_NAME);
+ HTable table = new HTable(localConfig, TableName.META_TABLE_NAME);
Throwable t = null;
try {
// An exists call turns into a get w/ a flag.
@@ -172,7 +173,7 @@ public class TestClientNoCluster {
// Go against meta else we will try to find first region for the table on construction which
// means we'll have to do a bunch more mocking. Tests that go against meta only should be
// good for a bit of testing.
- HTable table = new HTable(this.conf, HConstants.META_TABLE_NAME);
+ HTable table = new HTable(this.conf, TableName.META_TABLE_NAME);
ResultScanner scanner = table.getScanner(HConstants.CATALOG_FAMILY);
try {
Result result = null;
@@ -192,7 +193,7 @@ public class TestClientNoCluster {
// Go against meta else we will try to find first region for the table on construction which
// means we'll have to do a bunch more mocking. Tests that go against meta only should be
// good for a bit of testing.
- HTable table = new HTable(this.conf, HConstants.META_TABLE_NAME);
+ HTable table = new HTable(this.conf, TableName.META_TABLE_NAME);
ResultScanner scanner = table.getScanner(HConstants.CATALOG_FAMILY);
try {
Result result = null;
@@ -295,4 +296,4 @@ public class TestClientNoCluster {
return this.stub;
}
}
-}
\ No newline at end of file
+}
Modified: hbase/trunk/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java?rev=1511577&r1=1511576&r2=1511577&view=diff
==============================================================================
--- hbase/trunk/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java (original)
+++ hbase/trunk/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java Thu Aug 8 04:19:49 2013
@@ -22,12 +22,16 @@ import static org.junit.Assert.fail;
import java.io.IOException;
+import com.google.protobuf.ByteString;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.SmallTests;
+import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneRequest;
import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneResponse;
@@ -100,7 +104,7 @@ public class TestSnapshotFromAdmin {
// setup the admin and run the test
HBaseAdmin admin = new HBaseAdmin(mockConnection);
String snapshot = "snapshot";
- String table = "table";
+ TableName table = TableName.valueOf("table");
// get start time
long start = System.currentTimeMillis();
admin.snapshot(snapshot, table);
@@ -128,6 +132,7 @@ public class TestSnapshotFromAdmin {
failSnapshotStart(admin, builder.setName("-snapshot").build());
failSnapshotStart(admin, builder.setName("snapshot fails").build());
failSnapshotStart(admin, builder.setName("snap$hot").build());
+ failSnapshotStart(admin, builder.setName("snap:hot").build());
// check the table name also get verified
failSnapshotStart(admin, builder.setName("snapshot").setTable(".table").build());
failSnapshotStart(admin, builder.setName("snapshot").setTable("-table").build());
@@ -144,7 +149,7 @@ public class TestSnapshotFromAdmin {
IsSnapshotDoneResponse doneResponse = IsSnapshotDoneResponse.newBuilder().setDone(true).build();
Mockito.when(
master.isSnapshotDone((RpcController) Mockito.isNull(),
- Mockito.any(IsSnapshotDoneRequest.class))).thenReturn(doneResponse);
+ Mockito.any(IsSnapshotDoneRequest.class))).thenReturn(doneResponse);
// make sure that we can use valid names
admin.snapshot(builder.setName("snapshot").setTable("table").build());
Modified: hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java?rev=1511577&r1=1511576&r2=1511577&view=diff
==============================================================================
--- hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java (original)
+++ hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java Thu Aug 8 04:19:49 2013
@@ -38,9 +38,11 @@ import org.apache.hadoop.hbase.util.Byte
@InterfaceAudience.Public
@InterfaceStability.Stable
public final class HConstants {
+ //Bytes.UTF8_ENCODING should be updated if this changed
/** When we encode strings, we always specify UTF8 encoding */
public static final String UTF8_ENCODING = "UTF-8";
+ //Bytes.UTF8_CHARSET should be updated if this changed
/** When we encode strings, we always specify UTF8 encoding */
public static final Charset UTF8_CHARSET = Charset.forName(UTF8_ENCODING);
/**
@@ -104,9 +106,10 @@ public final class HConstants {
* Version 5 changes versions in catalog table regions.
* Version 6 enables blockcaching on catalog tables.
* Version 7 introduces hfile -- hbase 0.19 to 0.20..
+ * Version 8 introduces namespace
*/
// public static final String FILE_SYSTEM_VERSION = "6";
- public static final String FILE_SYSTEM_VERSION = "7";
+ public static final String FILE_SYSTEM_VERSION = "8";
// Configuration parameters
@@ -349,11 +352,7 @@ public final class HConstants {
// be the first to be reassigned if the server(s) they are being served by
// should go down.
- /** The root table's name.*/
- public static final byte [] ROOT_TABLE_NAME = Bytes.toBytes("-ROOT-");
-
- /** The META table's name. */
- public static final byte [] META_TABLE_NAME = Bytes.toBytes(".META.");
+ public static final String BASE_NAMESPACE_DIR = ".data";
/** delimiter used between portions of a region name */
public static final int META_ROW_DELIMITER = ',';
@@ -826,12 +825,12 @@ public final class HConstants {
Collections.unmodifiableList(Arrays.asList(new String[] { HREGION_LOGDIR_NAME,
HREGION_OLDLOGDIR_NAME, CORRUPT_DIR_NAME, SPLIT_LOGDIR_NAME,
HBCK_SIDELINEDIR_NAME, HFILE_ARCHIVE_DIRECTORY, SNAPSHOT_DIR_NAME, HBASE_TEMP_DIRECTORY,
- OLD_SNAPSHOT_DIR_NAME }));
+ OLD_SNAPSHOT_DIR_NAME, BASE_NAMESPACE_DIR}));
/** Directories that are not HBase user table directories */
public static final List<String> HBASE_NON_USER_TABLE_DIRS =
Collections.unmodifiableList(Arrays.asList((String[])ArrayUtils.addAll(
- new String[] { Bytes.toString(META_TABLE_NAME), Bytes.toString(ROOT_TABLE_NAME) },
+ new String[] { TableName.META_TABLE_NAME.getNameAsString(), TableName.ROOT_TABLE_NAME.getNameAsString() },
HBASE_NON_TABLE_DIRS.toArray())));
/** Health script related settings. */
Modified: hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java?rev=1511577&r1=1511576&r2=1511577&view=diff
==============================================================================
--- hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java (original)
+++ hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java Thu Aug 8 04:19:49 2013
@@ -68,6 +68,10 @@ import com.google.common.primitives.Long
@InterfaceStability.Evolving
public class KeyValue implements Cell, HeapSize, Cloneable {
static final Log LOG = LogFactory.getLog(KeyValue.class);
+
+ private static final int META_LENGTH =
+ TableName.META_TABLE_NAME.getName().length; // 'hbase.meta' length
+
// TODO: Group Key-only comparators and operations into a Key class, just
// for neatness sake, if can figure what to call it.
@@ -123,11 +127,11 @@ public class KeyValue implements Cell, H
* @param tableName The table name.
* @return The comparator.
*/
- public static KeyComparator getRowComparator(byte [] tableName) {
- if(Bytes.equals(HConstants.ROOT_TABLE_NAME,tableName)) {
+ public static KeyComparator getRowComparator(TableName tableName) {
+ if(TableName.ROOT_TABLE_NAME.equals(tableName)) {
return ROOT_COMPARATOR.getRawComparator();
}
- if(Bytes.equals(HConstants.META_TABLE_NAME, tableName)) {
+ if(TableName.META_TABLE_NAME.equals(tableName)) {
return META_COMPARATOR.getRawComparator();
}
return COMPARATOR.getRawComparator();
@@ -2399,14 +2403,13 @@ public class KeyValue implements Cell, H
// Rows look like this: .META.,ROW_FROM_META,RID
// LOG.info("ROOT " + Bytes.toString(left, loffset, llength) +
// "---" + Bytes.toString(right, roffset, rlength));
- final int metalength = 7; // '.META.' length
- int lmetaOffsetPlusDelimiter = loffset + metalength;
+ int lmetaOffsetPlusDelimiter = loffset + META_LENGTH + 1;
int leftFarDelimiter = getDelimiterInReverse(left,
lmetaOffsetPlusDelimiter,
- llength - metalength, HConstants.DELIMITER);
- int rmetaOffsetPlusDelimiter = roffset + metalength;
+ llength - META_LENGTH - 1, HConstants.DELIMITER);
+ int rmetaOffsetPlusDelimiter = roffset + META_LENGTH + 1;
int rightFarDelimiter = getDelimiterInReverse(right,
- rmetaOffsetPlusDelimiter, rlength - metalength,
+ rmetaOffsetPlusDelimiter, rlength - META_LENGTH - 1,
HConstants.DELIMITER);
if (leftFarDelimiter < 0 && rightFarDelimiter >= 0) {
// Nothing between .META. and regionid. Its first key.
Added: hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/NamespaceDescriptor.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/NamespaceDescriptor.java?rev=1511577&view=auto
==============================================================================
--- hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/NamespaceDescriptor.java (added)
+++ hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/NamespaceDescriptor.java Thu Aug 8 04:19:49 2013
@@ -0,0 +1,202 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.hbase.util.Bytes;
+
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeMap;
+import java.util.TreeSet;
+
+/**
+ * Namespace POJO class. Used to represent and define namespaces.
+ *
+ * Descriptors will be persisted in an hbase table.
+ * This works since namespaces are essentially metadata of a group of tables
+ * as opposed to a more tangible container.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+public class NamespaceDescriptor {
+
+ /** System namespace name. */
+ public static final byte [] SYSTEM_NAMESPACE_NAME = Bytes.toBytes("hbase");
+ public static final String SYSTEM_NAMESPACE_NAME_STR =
+ Bytes.toString(SYSTEM_NAMESPACE_NAME);
+ /** Default namespace name. */
+ public static final byte [] DEFAULT_NAMESPACE_NAME = Bytes.toBytes("default");
+ public static final String DEFAULT_NAMESPACE_NAME_STR =
+ Bytes.toString(DEFAULT_NAMESPACE_NAME);
+
+ public static final NamespaceDescriptor DEFAULT_NAMESPACE = NamespaceDescriptor.create(
+ DEFAULT_NAMESPACE_NAME_STR).build();
+ public static final NamespaceDescriptor SYSTEM_NAMESPACE = NamespaceDescriptor.create(
+ SYSTEM_NAMESPACE_NAME_STR).build();
+
+ public final static Set<String> RESERVED_NAMESPACES;
+ static {
+ Set<String> set = new HashSet<String>();
+ set.add(NamespaceDescriptor.DEFAULT_NAMESPACE_NAME_STR);
+ set.add(NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR);
+ RESERVED_NAMESPACES = Collections.unmodifiableSet(set);
+ }
+ public final static Set<byte[]> RESERVED_NAMESPACES_BYTES;
+ static {
+ Set<byte[]> set = new TreeSet<byte[]>(Bytes.BYTES_RAWCOMPARATOR);
+ for(String name: RESERVED_NAMESPACES) {
+ set.add(Bytes.toBytes(name));
+ }
+ RESERVED_NAMESPACES_BYTES = Collections.unmodifiableSet(set);
+ }
+
+ private String name;
+ private Map<String, String> configuration;
+
+ public static final Comparator<NamespaceDescriptor> NAMESPACE_DESCRIPTOR_COMPARATOR =
+ new Comparator<NamespaceDescriptor>() {
+ @Override
+ public int compare(NamespaceDescriptor namespaceDescriptor,
+ NamespaceDescriptor namespaceDescriptor2) {
+ return namespaceDescriptor.getName().compareTo(namespaceDescriptor2.getName());
+ }
+ };
+
+ private NamespaceDescriptor() {
+ }
+
+ private NamespaceDescriptor(String name) {
+ this.name = name;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ /**
+ * Getter for accessing the configuration value by key
+ */
+ public String getConfigurationValue(String key) {
+ return configuration.get(key);
+ }
+
+ /**
+ * Getter for fetching an unmodifiable {@link #configuration} map.
+ */
+ public Map<String, String> getConfiguration() {
+ // shallow pointer copy
+ return Collections.unmodifiableMap(configuration);
+ }
+
+ /**
+ * Setter for storing a configuration setting in {@link #configuration} map.
+ * @param key Config key. Same as XML config key e.g. hbase.something.or.other.
+ * @param value String value. If null, removes the setting.
+ */
+ public void setConfiguration(String key, String value) {
+ if (value == null) {
+ removeConfiguration(key);
+ } else {
+ configuration.put(key, value);
+ }
+ }
+
+ /**
+ * Remove a config setting represented by the key from the {@link #configuration} map
+ */
+ public void removeConfiguration(final String key) {
+ configuration.remove(key);
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder s = new StringBuilder();
+ s.append('{');
+ s.append(HConstants.NAME);
+ s.append(" => '");
+ s.append(name);
+ s.append("'");
+ for (Map.Entry<String, String> e : configuration.entrySet()) {
+ String key = e.getKey();
+ String value = e.getValue();
+ if (key == null) {
+ continue;
+ }
+ s.append(", ");
+ s.append(key);
+ s.append(" => '");
+ s.append(value);
+ s.append("'");
+ }
+ s.append('}');
+ return s.toString();
+ }
+
+ public static Builder create(String name) {
+ return new Builder(name);
+ }
+
+ public static Builder create(NamespaceDescriptor ns) {
+ return new Builder(ns);
+ }
+
+ public static class Builder {
+ private String bName;
+ private Map<String, String> bConfiguration = new TreeMap<String, String>();
+
+ private Builder(NamespaceDescriptor ns) {
+ this.bName = ns.name;
+ this.bConfiguration = ns.configuration;
+ }
+
+ private Builder(String name) {
+ this.bName = name;
+ }
+
+ public Builder addConfiguration(Map<String, String> configuration) {
+ this.bConfiguration.putAll(configuration);
+ return this;
+ }
+
+ public Builder addConfiguration(String key, String value) {
+ this.bConfiguration.put(key, value);
+ return this;
+ }
+
+ public Builder removeConfiguration(String key) {
+ this.bConfiguration.remove(key);
+ return this;
+ }
+
+ public NamespaceDescriptor build() {
+ if (this.bName == null){
+ throw new IllegalArgumentException("A name has to be specified in a namespace.");
+ }
+
+ NamespaceDescriptor desc = new NamespaceDescriptor(this.bName);
+ desc.configuration = this.bConfiguration;
+ return desc;
+ }
+ }
+}
|