hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From apurt...@apache.org
Subject svn commit: r932214 [2/4] - in /hadoop/hbase/branches/0.20_pre_durability: ./ src/contrib/stargate/ src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/ src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/auth/ src/contrib/stargat...
Date Fri, 09 Apr 2010 02:10:03 GMT
Modified: hadoop/hbase/branches/0.20_pre_durability/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/RowSpec.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_pre_durability/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/RowSpec.java?rev=932214&r1=932213&r2=932214&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20_pre_durability/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/RowSpec.java (original)
+++ hadoop/hbase/branches/0.20_pre_durability/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/RowSpec.java Fri Apr  9 02:10:02 2010
@@ -1,316 +1,381 @@
-/*
- * Copyright 2010 The Apache Software Foundation
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.stargate;
-
-import java.util.Collection;
-import java.util.TreeSet;
-
-import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.util.Bytes;
-
-/**
- * Parses a path based row/column/timestamp specification into its component
- * elements.
- * <p>
- *  
- */
-public class RowSpec {
-  public static final long DEFAULT_START_TIMESTAMP = 0;
-  public static final long DEFAULT_END_TIMESTAMP = Long.MAX_VALUE;
-
-  private byte[] row = HConstants.EMPTY_START_ROW;
-  private byte[] endRow = null;
-  private TreeSet<byte[]> columns =
-    new TreeSet<byte[]>(Bytes.BYTES_COMPARATOR);
-  private long startTime = DEFAULT_START_TIMESTAMP;
-  private long endTime = DEFAULT_END_TIMESTAMP;
-  private int maxVersions = HColumnDescriptor.DEFAULT_VERSIONS;
-
-  public RowSpec(String path) throws IllegalArgumentException {
-    int i = 0;
-    while (path.charAt(i) == '/') {
-      i++;
-    }
-    i = parseRowKeys(path, i);
-    i = parseColumns(path, i);
-    i = parseTimestamp(path, i);
-  }
-
-  private int parseRowKeys(String path, int i)
-      throws IllegalArgumentException {
-    StringBuilder startRow = new StringBuilder();
-    StringBuilder endRow = null;
-    try {
-      char c;
-      boolean doEndRow = false;
-      while (i < path.length() && (c = path.charAt(i)) != '/') {
-        if (c == ',') {
-          doEndRow = true;
-          i++;
-          break;
-        }
-        startRow.append(c);
-        i++;
-      }
-      i++;
-      this.row = Bytes.toBytes(startRow.toString());
-      if (doEndRow) {
-        endRow = new StringBuilder();
-        while ((c = path.charAt(i)) != '/') {
-          endRow.append(c);
-          i++;
-        }
-        i++;
-      }
-    } catch (IndexOutOfBoundsException e) {
-      throw new IllegalArgumentException(e);
-    }
-    // HBase does not support wildcards on row keys so we will emulate a
-    // suffix glob by synthesizing appropriate start and end row keys for
-    // table scanning
-    if (startRow.charAt(startRow.length() - 1) == '*') {
-      if (endRow != null)
-        throw new IllegalArgumentException("invalid path: start row "+
-          "specified with wildcard");
-      this.row = Bytes.toBytes(startRow.substring(0, 
-                   startRow.lastIndexOf("*")));
-      this.endRow = new byte[this.row.length + 1];
-      System.arraycopy(this.row, 0, this.endRow, 0, this.row.length);
-      this.endRow[this.row.length] = (byte)255;
-    } else {
-      this.row = Bytes.toBytes(startRow.toString());
-      if (endRow != null) {
-        this.endRow = Bytes.toBytes(endRow.toString());
-      }
-    }
-    return i;
-  }
-
-  private int parseColumns(String path, int i)
-      throws IllegalArgumentException {
-    if (i >= path.length()) {
-      return i;
-    }
-    try {
-      char c;
-      StringBuilder column = new StringBuilder();
-      boolean hasColon = false;
-      while (i < path.length() && (c = path.charAt(i)) != '/') {
-        if (c == ',') {
-          if (column.length() < 1) {
-            throw new IllegalArgumentException("invalid path");
-          }
-          if (!hasColon) {
-            column.append(':');
-          }
-          this.columns.add(Bytes.toBytes(column.toString()));
-          column = new StringBuilder();
-          hasColon = false;
-          i++;
-          continue;
-        }
-        if (c == ':') {
-          hasColon = true;
-        }
-        column.append(c);
-        i++;
-      }
-      i++;
-      // trailing list entry
-      if (column.length() > 1) {
-        if (!hasColon) {
-          column.append(':');
-        }
-        this.columns.add(Bytes.toBytes(column.toString()));
-      }
-    } catch (IndexOutOfBoundsException e) {
-      throw new IllegalArgumentException(e);
-    }
-    return i;
-  }
-
-  private int parseTimestamp(String path, int i)
-      throws IllegalArgumentException {
-    if (i >= path.length()) {
-      return i;
-    }
-    long time0 = 0, time1 = 0;
-    try {
-      char c = 0;
-      StringBuilder stamp = new StringBuilder();
-      while (i < path.length()) {
-        c = path.charAt(i);
-        if (c == '/' || c == ',') {
-          break;
-        }
-        stamp.append(c);
-        i++;
-      }
-      try {
-        time0 = Long.valueOf(stamp.toString());
-      } catch (NumberFormatException e) {
-        throw new IllegalArgumentException(e);
-      }
-      if (c == ',') {
-        stamp = new StringBuilder();
-        i++;
-        while (i < path.length() && ((c = path.charAt(i)) != '/')) {
-          stamp.append(c);
-          i++;
-        }
-        try {
-          time1 = Long.valueOf(stamp.toString());
-        } catch (NumberFormatException e) {
-          throw new IllegalArgumentException(e);
-        }
-      }
-      if (c == '/') {
-        i++;
-      }
-    } catch (IndexOutOfBoundsException e) {
-      throw new IllegalArgumentException(e);
-    }
-    if (time1 != 0) {
-      startTime = time0;
-      endTime = time1;
-    } else {
-      endTime = time0;
-    }
-    return i;
-  }
-
-  public RowSpec(byte[] startRow, byte[] endRow, byte[][] columns,
-      long startTime, long endTime, int maxVersions) {
-    this.row = startRow;
-    this.endRow = endRow;
-    if (columns != null) {
-      for (byte[] col: columns) {
-        this.columns.add(col);
-      }
-    }
-    this.startTime = startTime;
-    this.endTime = endTime;
-    this.maxVersions = maxVersions;
-  }
-
-  public RowSpec(byte[] startRow, byte[] endRow, Collection<byte[]> columns,
-      long startTime, long endTime, int maxVersions) {
-    this.row = startRow;
-    this.endRow = endRow;
-    if (columns != null) {
-      this.columns.addAll(columns);
-    }
-    this.startTime = startTime;
-    this.endTime = endTime;
-    this.maxVersions = maxVersions;
-  }
-
-  public boolean isSingleRow() {
-    return endRow == null;
-  }
-
-  public int getMaxVersions() {
-    return maxVersions;
-  }
-
-  public void setMaxVersions(int maxVersions) {
-    this.maxVersions = maxVersions;
-  }
-
-  public boolean hasColumns() {
-    return !columns.isEmpty();
-  }
-
-  public byte[] getRow() {
-    return row;
-  }
-
-  public byte[] getStartRow() {
-    return row;
-  }
-
-  public boolean hasEndRow() {
-    return endRow != null;
-  }
-
-  public byte[] getEndRow() {
-    return endRow;
-  }
-
-  public void addColumn(byte[] column) {
-    columns.add(column);
-  }
-
-  public byte[][] getColumns() {
-    return columns.toArray(new byte[columns.size()][]);
-  }
-
-  public boolean hasTimestamp() {
-    return (startTime == 0) && (endTime != Long.MAX_VALUE);
-  }
-
-  public long getTimestamp() {
-    return endTime;
-  }
-
-  public long getStartTime() {
-    return startTime;
-  }
-
-  public void setStartTime(long startTime) {
-    this.startTime = startTime;
-  }
-
-  public long getEndTime() {
-    return endTime;
-  }
-
-  public void setEndTime(long endTime) {
-    this.endTime = endTime;
-  }
-
-  public String toString() {
-    StringBuilder result = new StringBuilder();
-    result.append("{startRow => '");
-    if (row != null) {
-      result.append(Bytes.toString(row));
-    }
-    result.append("', endRow => '");
-    if (endRow != null)  {
-      result.append(Bytes.toString(endRow));
-    }
-    result.append("', columns => [");
-    for (byte[] col: columns) {
-      result.append(" '");
-      result.append(Bytes.toString(col));
-      result.append("'");
-    }
-    result.append(" ], startTime => ");
-    result.append(Long.toString(startTime));
-    result.append(", endTime => ");
-    result.append(Long.toString(endTime));
-    result.append(", maxVersions => ");
-    result.append(Integer.toString(maxVersions));
-    result.append("}");
-    return result.toString();
-  }
-}
+/*
+ * Copyright 2010 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.stargate;
+
+import java.util.Collection;
+import java.util.TreeSet;
+
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.util.Bytes;
+
+/**
+ * Parses a path based row/column/timestamp specification into its component
+ * elements.
+ * <p>
+ *  
+ */
+public class RowSpec {
+  public static final long DEFAULT_START_TIMESTAMP = 0;
+  public static final long DEFAULT_END_TIMESTAMP = Long.MAX_VALUE;
+  
+  private byte[] row = HConstants.EMPTY_START_ROW;
+  private byte[] endRow = null;
+  private TreeSet<byte[]> columns =
+    new TreeSet<byte[]>(Bytes.BYTES_COMPARATOR);
+  private long startTime = DEFAULT_START_TIMESTAMP;
+  private long endTime = DEFAULT_END_TIMESTAMP;
+  private int maxVersions = HColumnDescriptor.DEFAULT_VERSIONS;
+  private int maxValues = Integer.MAX_VALUE;
+
+  public RowSpec(String path) throws IllegalArgumentException {
+    int i = 0;
+    while (path.charAt(i) == '/') {
+      i++;
+    }
+    i = parseRowKeys(path, i);
+    i = parseColumns(path, i);
+    i = parseTimestamp(path, i);
+    i = parseQueryParams(path, i);
+  }
+
+  private int parseRowKeys(final String path, int i)
+      throws IllegalArgumentException {
+    StringBuilder startRow = new StringBuilder();
+    StringBuilder endRow = null;
+    try {
+      char c;
+      boolean doEndRow = false;
+      while (i < path.length() && (c = path.charAt(i)) != '/') {
+        if (c == ',') {
+          doEndRow = true;
+          i++;
+          break;
+        }
+        startRow.append(c);
+        i++;
+      }
+      i++;
+      this.row = Bytes.toBytes(startRow.toString());
+      if (doEndRow) {
+        endRow = new StringBuilder();
+        while ((c = path.charAt(i)) != '/') {
+          endRow.append(c);
+          i++;
+        }
+        i++;
+      }
+    } catch (IndexOutOfBoundsException e) {
+      throw new IllegalArgumentException(e);
+    }
+    // HBase does not support wildcards on row keys so we will emulate a
+    // suffix glob by synthesizing appropriate start and end row keys for
+    // table scanning
+    if (startRow.charAt(startRow.length() - 1) == '*') {
+      if (endRow != null)
+        throw new IllegalArgumentException("invalid path: start row "+
+          "specified with wildcard");
+      this.row = Bytes.toBytes(startRow.substring(0, 
+                   startRow.lastIndexOf("*")));
+      this.endRow = new byte[this.row.length + 1];
+      System.arraycopy(this.row, 0, this.endRow, 0, this.row.length);
+      this.endRow[this.row.length] = (byte)255;
+    } else {
+      this.row = Bytes.toBytes(startRow.toString());
+      if (endRow != null) {
+        this.endRow = Bytes.toBytes(endRow.toString());
+      }
+    }
+    return i;
+  }
+
+  private int parseColumns(final String path, int i)
+      throws IllegalArgumentException {
+    if (i >= path.length()) {
+      return i;
+    }
+    try {
+      char c;
+      StringBuilder column = new StringBuilder();
+      boolean hasColon = false;
+      while (i < path.length() && (c = path.charAt(i)) != '/') {
+        if (c == ',') {
+          if (column.length() < 1) {
+            throw new IllegalArgumentException("invalid path");
+          }
+          if (!hasColon) {
+            column.append(':');
+          }
+          this.columns.add(Bytes.toBytes(column.toString()));
+          column = new StringBuilder();
+          hasColon = false;
+          i++;
+          continue;
+        }
+        if (c == ':') {
+          hasColon = true;
+        }
+        column.append(c);
+        i++;
+      }
+      i++;
+      // trailing list entry
+      if (column.length() > 1) {
+        if (!hasColon) {
+          column.append(':');
+        }
+        this.columns.add(Bytes.toBytes(column.toString()));
+      }
+    } catch (IndexOutOfBoundsException e) {
+      throw new IllegalArgumentException(e);
+    }
+    return i;
+  }
+
+  private int parseTimestamp(final String path, int i)
+      throws IllegalArgumentException {
+    if (i >= path.length()) {
+      return i;
+    }
+    long time0 = 0, time1 = 0;
+    try {
+      char c = 0;
+      StringBuilder stamp = new StringBuilder();
+      while (i < path.length()) {
+        c = path.charAt(i);
+        if (c == '/' || c == ',') {
+          break;
+        }
+        stamp.append(c);
+        i++;
+      }
+      try {
+        time0 = Long.valueOf(stamp.toString());
+      } catch (NumberFormatException e) {
+        throw new IllegalArgumentException(e);
+      }
+      if (c == ',') {
+        stamp = new StringBuilder();
+        i++;
+        while (i < path.length() && ((c = path.charAt(i)) != '/')) {
+          stamp.append(c);
+          i++;
+        }
+        try {
+          time1 = Long.valueOf(stamp.toString());
+        } catch (NumberFormatException e) {
+          throw new IllegalArgumentException(e);
+        }
+      }
+      if (c == '/') {
+        i++;
+      }
+    } catch (IndexOutOfBoundsException e) {
+      throw new IllegalArgumentException(e);
+    }
+    if (time1 != 0) {
+      startTime = time0;
+      endTime = time1;
+    } else {
+      endTime = time0;
+    }
+    return i;
+  }
+
+  private int parseQueryParams(final String path, int i) {
+    while (i < path.length()) {
+      char c = path.charAt(i);
+      if (c != '?' && c != '&') {
+        break;
+      }
+      if (++i > path.length()) {
+        break;
+      }
+      char what = path.charAt(i);
+      if (++i > path.length()) {
+        break;
+      }
+      c = path.charAt(i);
+      if (c != '=') {
+        throw new IllegalArgumentException("malformed query parameter");
+      }
+      if (++i > path.length()) {
+        break;
+      }
+      switch (what) {
+      case 'm': {
+        StringBuilder sb = new StringBuilder();
+        while (i <= path.length()) {
+          c = path.charAt(i);
+          if (c < '0' || c > '9') {
+            i--;
+            break;
+          }
+          sb.append(c);
+        }
+        maxVersions = Integer.valueOf(sb.toString());
+      } break;
+      case 'n': {
+        StringBuilder sb = new StringBuilder();
+        while (i <= path.length()) {
+          c = path.charAt(i);
+          if (c < '0' || c > '9') {
+            i--;
+            break;
+          }
+          sb.append(c);
+        }
+        maxValues = Integer.valueOf(sb.toString());
+      } break;
+      default:
+        throw new IllegalArgumentException("unknown parameter '" + c + "'");
+      }
+    }
+    return i;
+  }
+
+  public RowSpec(byte[] startRow, byte[] endRow, byte[][] columns,
+      long startTime, long endTime, int maxVersions) {
+    this.row = startRow;
+    this.endRow = endRow;
+    if (columns != null) {
+      for (byte[] col: columns) {
+        this.columns.add(col);
+      }
+    }
+    this.startTime = startTime;
+    this.endTime = endTime;
+    this.maxVersions = maxVersions;
+  }
+
+  public RowSpec(byte[] startRow, byte[] endRow, Collection<byte[]> columns,
+      long startTime, long endTime, int maxVersions) {
+    this.row = startRow;
+    this.endRow = endRow;
+    if (columns != null) {
+      this.columns.addAll(columns);
+    }
+    this.startTime = startTime;
+    this.endTime = endTime;
+    this.maxVersions = maxVersions;
+  }
+
+  public boolean isSingleRow() {
+    return endRow == null;
+  }
+
+  public int getMaxVersions() {
+    return maxVersions;
+  }
+
+  public void setMaxVersions(final int maxVersions) {
+    this.maxVersions = maxVersions;
+  }
+
+  public int getMaxValues() {
+    return maxValues;
+  }
+
+  public void setMaxValues(final int maxValues) {
+    this.maxValues = maxValues;
+  }
+
+  public boolean hasColumns() {
+    return !columns.isEmpty();
+  }
+
+  public byte[] getRow() {
+    return row;
+  }
+
+  public byte[] getStartRow() {
+    return row;
+  }
+
+  public boolean hasEndRow() {
+    return endRow != null;
+  }
+
+  public byte[] getEndRow() {
+    return endRow;
+  }
+
+  public void addColumn(final byte[] column) {
+    columns.add(column);
+  }
+
+  public byte[][] getColumns() {
+    return columns.toArray(new byte[columns.size()][]);
+  }
+
+  public boolean hasTimestamp() {
+    return (startTime == 0) && (endTime != Long.MAX_VALUE);
+  }
+
+  public long getTimestamp() {
+    return endTime;
+  }
+
+  public long getStartTime() {
+    return startTime;
+  }
+
+  public void setStartTime(final long startTime) {
+    this.startTime = startTime;
+  }
+
+  public long getEndTime() {
+    return endTime;
+  }
+
+  public void setEndTime(long endTime) {
+    this.endTime = endTime;
+  }
+
+  public String toString() {
+    StringBuilder result = new StringBuilder();
+    result.append("{startRow => '");
+    if (row != null) {
+      result.append(Bytes.toString(row));
+    }
+    result.append("', endRow => '");
+    if (endRow != null)  {
+      result.append(Bytes.toString(endRow));
+    }
+    result.append("', columns => [");
+    for (byte[] col: columns) {
+      result.append(" '");
+      result.append(Bytes.toString(col));
+      result.append("'");
+    }
+    result.append(" ], startTime => ");
+    result.append(Long.toString(startTime));
+    result.append(", endTime => ");
+    result.append(Long.toString(endTime));
+    result.append(", maxVersions => ");
+    result.append(Integer.toString(maxVersions));
+    result.append(", maxValues => ");
+    result.append(Integer.toString(maxValues));
+    result.append("}");
+    return result.toString();
+  }
+
+}

Modified: hadoop/hbase/branches/0.20_pre_durability/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/ScannerInstanceResource.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_pre_durability/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/ScannerInstanceResource.java?rev=932214&r1=932213&r2=932214&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20_pre_durability/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/ScannerInstanceResource.java (original)
+++ hadoop/hbase/branches/0.20_pre_durability/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/ScannerInstanceResource.java Fri Apr  9 02:10:02 2010
@@ -1,149 +1,172 @@
-/*
- * Copyright 2010 The Apache Software Foundation
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.stargate;
-
-import java.io.IOException;
-
-import javax.ws.rs.DELETE;
-import javax.ws.rs.GET;
-import javax.ws.rs.Produces;
-import javax.ws.rs.WebApplicationException;
-import javax.ws.rs.core.CacheControl;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.core.Response.ResponseBuilder;
-import javax.ws.rs.core.UriInfo;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.stargate.model.CellModel;
-import org.apache.hadoop.hbase.stargate.model.CellSetModel;
-import org.apache.hadoop.hbase.stargate.model.RowModel;
-import org.apache.hadoop.hbase.util.Bytes;
-
-import com.sun.jersey.core.util.Base64;
-
-public class ScannerInstanceResource implements Constants {
-  private static final Log LOG =
-    LogFactory.getLog(ScannerInstanceResource.class);
-
-  ResultGenerator generator;
-  String id;
-  int batch;
-  RESTServlet servlet;
-  CacheControl cacheControl;
-
-  public ScannerInstanceResource(String table, String id, 
-      ResultGenerator generator, int batch) throws IOException {
-    this.id = id;
-    this.generator = generator;
-    this.batch = batch;
-    servlet = RESTServlet.getInstance();
-    cacheControl = new CacheControl();
-    cacheControl.setNoCache(true);
-    cacheControl.setNoTransform(false);
-  }
-
-  @GET
-  @Produces({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF})
-  public Response get(@Context UriInfo uriInfo) {
-    if (LOG.isDebugEnabled()) {
-      LOG.debug("GET " + uriInfo.getAbsolutePath());
-    }
-    servlet.getMetrics().incrementRequests(1);
-    CellSetModel model = new CellSetModel();
-    RowModel rowModel = null;
-    byte[] rowKey = null;
-    int count = batch;
-    do {
-      KeyValue value = null;
-      try {
-        value = generator.next();
-      } catch (IllegalStateException e) {
-        ScannerResource.delete(id);
-        throw new WebApplicationException(Response.Status.GONE);
-      }
-      if (value == null) {
-        LOG.info("generator exhausted");
-        // respond with 204 (No Content) if an empty cell set would be
-        // returned
-        if (count == batch) {
-          return Response.noContent().build();
-        }
-        break;
-      }
-      if (rowKey == null) {
-        rowKey = value.getRow();
-        rowModel = new RowModel(rowKey);
-      }
-      if (!Bytes.equals(value.getRow(), rowKey)) {
-        model.addRow(rowModel);
-        rowKey = value.getRow();
-        rowModel = new RowModel(rowKey);
-      }
-      rowModel.addCell(
-        new CellModel(value.getColumn(), value.getTimestamp(),
-              value.getValue()));
-    } while (--count > 0);
-    model.addRow(rowModel);
-    ResponseBuilder response = Response.ok(model);
-    response.cacheControl(cacheControl);
-    return response.build();
-  }
-
-  @GET
-  @Produces(MIMETYPE_BINARY)
-  public Response getBinary(@Context UriInfo uriInfo) {
-    if (LOG.isDebugEnabled()) {
-      LOG.debug("GET " + uriInfo.getAbsolutePath() + " as " +
-        MIMETYPE_BINARY);
-    }
-    servlet.getMetrics().incrementRequests(1);
-    try {
-      KeyValue value = generator.next();
-      if (value == null) {
-        LOG.info("generator exhausted");
-        return Response.noContent().build();
-      }
-      ResponseBuilder response = Response.ok(value.getValue());
-      response.cacheControl(cacheControl);
-      response.header("X-Row", Base64.encode(value.getRow()));
-      response.header("X-Column", Base64.encode(value.getColumn()));
-      response.header("X-Timestamp", value.getTimestamp());
-      return response.build();
-    } catch (IllegalStateException e) {
-      ScannerResource.delete(id);
-      throw new WebApplicationException(Response.Status.GONE);
-    }
-  }
-
-  @DELETE
-  public Response delete(@Context UriInfo uriInfo) {
-    if (LOG.isDebugEnabled()) {
-      LOG.debug("DELETE " + uriInfo.getAbsolutePath());
-    }
-    servlet.getMetrics().incrementRequests(1);
-    ScannerResource.delete(id);
-    return Response.ok().build();
-  }
-}
+/*
+ * Copyright 2010 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.stargate;
+
+import java.io.IOException;
+
+import javax.ws.rs.DELETE;
+import javax.ws.rs.GET;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.CacheControl;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.Response.ResponseBuilder;
+import javax.ws.rs.core.UriInfo;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.stargate.model.CellModel;
+import org.apache.hadoop.hbase.stargate.model.CellSetModel;
+import org.apache.hadoop.hbase.stargate.model.RowModel;
+import org.apache.hadoop.hbase.util.Bytes;
+
+import com.sun.jersey.core.util.Base64;
+
+public class ScannerInstanceResource implements Constants {
+  private static final Log LOG =
+    LogFactory.getLog(ScannerInstanceResource.class);
+
+  User user;
+  ResultGenerator generator;
+  String id;
+  int batch = 1;
+  RESTServlet servlet;
+  CacheControl cacheControl;
+
+  public ScannerInstanceResource(User user, String table, String id, 
+      ResultGenerator generator, int batch) throws IOException {
+    this.user = user;
+    this.id = id;
+    this.generator = generator;
+    this.batch = batch;
+    servlet = RESTServlet.getInstance();
+    cacheControl = new CacheControl();
+    cacheControl.setNoCache(true);
+    cacheControl.setNoTransform(false);
+  }
+
+  @GET
+  @Produces({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF})
+  public Response get(final @Context UriInfo uriInfo, 
+      @QueryParam("n") int maxRows, final @QueryParam("c") int maxValues)
+      throws IOException {
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("GET " + uriInfo.getAbsolutePath());
+    }
+    servlet.getMetrics().incrementRequests(1);
+    CellSetModel model = new CellSetModel();
+    RowModel rowModel = null;
+    byte[] rowKey = null;
+    int limit = batch;
+    if (maxValues > 0) {
+      limit = maxValues;
+    }
+    int count = limit;
+    do {
+      KeyValue value = null;
+      try {
+        value = generator.next();
+      } catch (IllegalStateException e) {
+        ScannerResource.delete(id);
+        throw new WebApplicationException(Response.Status.GONE);
+      }
+      if (value == null) {
+        LOG.info("generator exhausted");
+        // respond with 204 (No Content) if an empty cell set would be
+        // returned
+        if (count == limit) {
+          return Response.noContent().build();
+        }
+        break;
+      }
+      if (rowKey == null) {
+        rowKey = value.getRow();
+        rowModel = new RowModel(rowKey);
+      }
+      if (!Bytes.equals(value.getRow(), rowKey)) {
+        // the user request limit is a transaction limit, so we need to
+        // account for scanner.next()
+        if (user != null && !servlet.userRequestLimit(user, 1)) {
+          generator.putBack(value);
+          break;
+        }
+        // if maxRows was given as a query param, stop if we would exceed the
+        // specified number of rows
+        if (maxRows > 0) { 
+          if (--maxRows == 0) {
+            generator.putBack(value);
+            break;
+          }
+        }
+        model.addRow(rowModel);
+        rowKey = value.getRow();
+        rowModel = new RowModel(rowKey);
+      }
+      rowModel.addCell(
+        new CellModel(value.getColumn(), value.getTimestamp(),
+              value.getValue()));
+    } while (--count > 0);
+    model.addRow(rowModel);
+    ResponseBuilder response = Response.ok(model);
+    response.cacheControl(cacheControl);
+    return response.build();
+  }
+
+  @GET
+  @Produces(MIMETYPE_BINARY)
+  public Response getBinary(final @Context UriInfo uriInfo) {
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("GET " + uriInfo.getAbsolutePath() + " as " +
+        MIMETYPE_BINARY);
+    }
+    servlet.getMetrics().incrementRequests(1);
+    try {
+      KeyValue value = generator.next();
+      if (value == null) {
+        LOG.info("generator exhausted");
+        return Response.noContent().build();
+      }
+      ResponseBuilder response = Response.ok(value.getValue());
+      response.cacheControl(cacheControl);
+      response.header("X-Row", Base64.encode(value.getRow()));
+      response.header("X-Column", Base64.encode(value.getColumn()));
+      response.header("X-Timestamp", value.getTimestamp());
+      return response.build();
+    } catch (IllegalStateException e) {
+      ScannerResource.delete(id);
+      throw new WebApplicationException(Response.Status.GONE);
+    }
+  }
+
+  @DELETE
+  public Response delete(final @Context UriInfo uriInfo) {
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("DELETE " + uriInfo.getAbsolutePath());
+    }
+    servlet.getMetrics().incrementRequests(1);
+    ScannerResource.delete(id);
+    return Response.ok().build();
+  }
+}

Modified: hadoop/hbase/branches/0.20_pre_durability/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/ScannerResource.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_pre_durability/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/ScannerResource.java?rev=932214&r1=932213&r2=932214&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20_pre_durability/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/ScannerResource.java (original)
+++ hadoop/hbase/branches/0.20_pre_durability/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/ScannerResource.java Fri Apr  9 02:10:02 2010
@@ -1,139 +1,141 @@
-/*
- * Copyright 2010 The Apache Software Foundation
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.stargate;
-
-import java.io.IOException;
-import java.net.URI;
-import java.util.HashMap;
-import java.util.Map;
-
-import javax.ws.rs.Consumes;
-import javax.ws.rs.POST;
-import javax.ws.rs.PUT;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.WebApplicationException;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.core.UriBuilder;
-import javax.ws.rs.core.UriInfo;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-import org.apache.hadoop.hbase.filter.Filter;
-
-import org.apache.hadoop.hbase.stargate.auth.User;
-import org.apache.hadoop.hbase.stargate.model.ScannerModel;
-
-public class ScannerResource implements Constants {
-
-  private static final Log LOG = LogFactory.getLog(ScannerResource.class);
-
-  static final Map<String,ScannerInstanceResource> scanners = 
-    new HashMap<String,ScannerInstanceResource>();
-
-  User user;
-  String tableName;
-  String actualTableName;
-  RESTServlet servlet;
-
-  public ScannerResource(User user, String table) throws IOException {
-    if (user != null) {
-      this.user = user;
-      this.actualTableName = 
-        !user.isAdmin() ? user.getName() + "." + table : table;
-    } else {
-      this.actualTableName = table;
-    }
-    this.tableName = table;
-    servlet = RESTServlet.getInstance();
-  }
-
-  static void delete(String id) {
-    synchronized (scanners) {
-      ScannerInstanceResource instance = scanners.remove(id);
-      if (instance != null) {
-        instance.generator.close();
-      }
-    }
-  }
-
-  Response update(ScannerModel model, boolean replace, UriInfo uriInfo) {
-    servlet.getMetrics().incrementRequests(1);
-    byte[] endRow = model.hasEndRow() ? model.getEndRow() : null;
-    RowSpec spec = new RowSpec(model.getStartRow(), endRow,
-      model.getColumns(), model.getStartTime(), model.getEndTime(), 1);
-    try {
-      Filter filter = ScannerResultGenerator.buildFilterFromModel(model);
-      ScannerResultGenerator gen = 
-        new ScannerResultGenerator(actualTableName, spec, filter);
-      String id = gen.getID();
-      ScannerInstanceResource instance = 
-        new ScannerInstanceResource(actualTableName, id, gen, model.getBatch());
-      synchronized (scanners) {
-        scanners.put(id, instance);
-      }
-      if (LOG.isDebugEnabled()) {
-        LOG.debug("new scanner: " + id);
-      }
-      UriBuilder builder = uriInfo.getAbsolutePathBuilder();
-      URI uri = builder.path(id).build();
-      return Response.created(uri).build();
-    } catch (IOException e) {
-      throw new WebApplicationException(e,
-              Response.Status.SERVICE_UNAVAILABLE);
-    } catch (Exception e) {
-      throw new WebApplicationException(e, Response.Status.BAD_REQUEST);
-    }
-  }
-
-  @PUT
-  @Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF})
-  public Response put(ScannerModel model, @Context UriInfo uriInfo) {
-    if (LOG.isDebugEnabled()) {
-      LOG.debug("PUT " + uriInfo.getAbsolutePath());
-    }
-    return update(model, true, uriInfo);
-  }
-
-  @POST
-  @Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF})
-  public Response post(ScannerModel model, @Context UriInfo uriInfo) {
-    if (LOG.isDebugEnabled()) {
-      LOG.debug("POST " + uriInfo.getAbsolutePath());
-    }
-    return update(model, false, uriInfo);
-  }
-
-  @Path("{scanner: .+}")
-  public ScannerInstanceResource getScannerInstanceResource(
-      @PathParam("scanner") String id) {
-    synchronized (scanners) {
-      ScannerInstanceResource instance = scanners.get(id);
-      if (instance == null) {
-        throw new WebApplicationException(Response.Status.NOT_FOUND);
-      }
-      return instance;
-    }
-  }
-
-}
+/*
+ * Copyright 2010 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.stargate;
+
+import java.io.IOException;
+import java.net.URI;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+
+import javax.ws.rs.Consumes;
+import javax.ws.rs.POST;
+import javax.ws.rs.PUT;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriBuilder;
+import javax.ws.rs.core.UriInfo;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+import org.apache.hadoop.hbase.filter.Filter;
+
+import org.apache.hadoop.hbase.stargate.User;
+import org.apache.hadoop.hbase.stargate.model.ScannerModel;
+
+public class ScannerResource implements Constants {
+
+  private static final Log LOG = LogFactory.getLog(ScannerResource.class);
+
+  static final Map<String,ScannerInstanceResource> scanners =
+   Collections.synchronizedMap(new HashMap<String,ScannerInstanceResource>());
+
+  User user;
+  String tableName;
+  String actualTableName;
+  RESTServlet servlet;
+
+  public ScannerResource(User user, String table) throws IOException {
+    if (user != null) {
+      this.user = user;
+      this.actualTableName = 
+        !user.isAdmin() ? user.getName() + "." + table : table;
+    } else {
+      this.actualTableName = table;
+    }
+    this.tableName = table;
+    servlet = RESTServlet.getInstance();
+  }
+
+  static void delete(final String id) {
+    ScannerInstanceResource instance = scanners.remove(id);
+    if (instance != null) {
+      instance.generator.close();
+    }
+  }
+
+  Response update(final ScannerModel model, final boolean replace, 
+      final UriInfo uriInfo) throws IOException {
+    if (!servlet.userRequestLimit(user, 1)) {
+      return Response.status(509).build();
+    }
+    servlet.getMetrics().incrementRequests(1);
+    byte[] endRow = model.hasEndRow() ? model.getEndRow() : null;
+    RowSpec spec = new RowSpec(model.getStartRow(), endRow,
+      model.getColumns(), model.getStartTime(), model.getEndTime(), 1);
+    try {
+      Filter filter = ScannerResultGenerator.buildFilterFromModel(model);
+      ScannerResultGenerator gen = 
+        new ScannerResultGenerator(actualTableName, spec, filter);
+      String id = gen.getID();
+      ScannerInstanceResource instance = 
+        new ScannerInstanceResource(user, actualTableName, id, gen, 
+          model.getBatch());
+      scanners.put(id, instance);
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("new scanner: " + id);
+      }
+      UriBuilder builder = uriInfo.getAbsolutePathBuilder();
+      URI uri = builder.path(id).build();
+      return Response.created(uri).build();
+    } catch (IOException e) {
+      throw new WebApplicationException(e,
+              Response.Status.SERVICE_UNAVAILABLE);
+    } catch (Exception e) {
+      throw new WebApplicationException(e, Response.Status.BAD_REQUEST);
+    }
+  }
+
+  @PUT
+  @Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF})
+  public Response put(final ScannerModel model, 
+      final @Context UriInfo uriInfo) throws IOException {
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("PUT " + uriInfo.getAbsolutePath());
+    }
+    return update(model, true, uriInfo);
+  }
+
+  @POST
+  @Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF})
+  public Response post(final ScannerModel model,
+      final @Context UriInfo uriInfo) throws IOException {
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("POST " + uriInfo.getAbsolutePath());
+    }
+    return update(model, false, uriInfo);
+  }
+
+  @Path("{scanner: .+}")
+  public ScannerInstanceResource getScannerInstanceResource(
+      final @PathParam("scanner") String id) {
+    ScannerInstanceResource instance = scanners.get(id);
+    if (instance == null) {
+      throw new WebApplicationException(Response.Status.NOT_FOUND);
+    }
+    return instance;
+  }
+
+}

Modified: hadoop/hbase/branches/0.20_pre_durability/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/ScannerResultGenerator.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_pre_durability/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/ScannerResultGenerator.java?rev=932214&r1=932213&r2=932214&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20_pre_durability/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/ScannerResultGenerator.java (original)
+++ hadoop/hbase/branches/0.20_pre_durability/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/ScannerResultGenerator.java Fri Apr  9 02:10:02 2010
@@ -1,166 +1,179 @@
-/*
- * Copyright 2010 The Apache Software Foundation
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.stargate;
-
-import java.io.IOException;
-import java.util.Iterator;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.UnknownScannerException;
-import org.apache.hadoop.hbase.client.HTable;
-import org.apache.hadoop.hbase.client.HTablePool;
-import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.ResultScanner;
-import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.filter.Filter;
-import org.apache.hadoop.hbase.stargate.model.ScannerModel;
-import org.apache.hadoop.util.StringUtils;
-
-public class ScannerResultGenerator extends ResultGenerator {
-
-  private static final Log LOG =
-    LogFactory.getLog(ScannerResultGenerator.class);
-
-  public static Filter buildFilterFromModel(ScannerModel model) 
-      throws Exception {
-    String filter = model.getFilter();
-    if (filter == null || filter.length() == 0) {
-      return null;
-    }
-    return buildFilter(filter);
-  }
-
-  private String id;
-  private Iterator<KeyValue> rowI;
-  private ResultScanner scanner;
-  private Result cached;
-
-  public ScannerResultGenerator(final String tableName, final RowSpec rowspec,
-      final Filter filter) throws IllegalArgumentException, IOException {
-    HTablePool pool = RESTServlet.getInstance().getTablePool(); 
-    HTable table = pool.getTable(tableName);
-    try {
-      Scan scan;
-      if (rowspec.hasEndRow()) {
-        scan = new Scan(rowspec.getStartRow(), rowspec.getEndRow());
-      } else {
-        scan = new Scan(rowspec.getStartRow());
-      }
-      if (rowspec.hasColumns()) {
-        byte[][] columns = rowspec.getColumns();
-        for (byte[] column: columns) {
-          byte[][] split = KeyValue.parseColumn(column);
-          if (split.length > 1 && (split[1] != null && split[1].length != 0)) {
-            scan.addColumn(split[0], split[1]);
-          } else {
-            scan.addFamily(split[0]);
-          }
-        }
-      } else {
-        for (HColumnDescriptor family: 
-            table.getTableDescriptor().getFamilies()) {
-          scan.addFamily(family.getName());
-        }
-      }
-      scan.setTimeRange(rowspec.getStartTime(), rowspec.getEndTime());          
-      scan.setMaxVersions(rowspec.getMaxVersions());
-      if (filter != null) {
-        scan.setFilter(filter);
-      }
-      // always disable block caching on the cluster
-      scan.setCacheBlocks(false);
-      scanner = table.getScanner(scan);
-      cached = null;
-      id = Long.toString(System.currentTimeMillis()) +
-             Integer.toHexString(scanner.hashCode());
-    } finally {
-      pool.putTable(table);
-    }
-  }
-
-  public String getID() {
-    return id;
-  }
-
-  public void close() {
-  }
-
-  public boolean hasNext() {
-    if (rowI != null && rowI.hasNext()) {
-      return true;
-    }
-    if (cached != null) {
-      return true;
-    }
-    try {
-      Result result = scanner.next();
-      if (result != null && !result.isEmpty()) {
-        cached = result;
-      }
-    } catch (UnknownScannerException e) {
-      throw new IllegalArgumentException(e);
-    } catch (IOException e) {
-      LOG.error(StringUtils.stringifyException(e));
-    }
-    return cached != null;
-  }
-
-  public KeyValue next() {
-    boolean loop;
-    do {
-      loop = false;
-      if (rowI != null) {
-        if (rowI.hasNext()) {
-          return rowI.next();
-        } else {
-          rowI = null;
-        }
-      }
-      if (cached != null) {
-        rowI = cached.list().iterator();
-        loop = true;
-        cached = null;
-      } else {
-        Result result = null;
-        try {
-          result = scanner.next();
-        } catch (UnknownScannerException e) {
-          throw new IllegalArgumentException(e);
-        } catch (IOException e) {
-          LOG.error(StringUtils.stringifyException(e));
-        }
-        if (result != null && !result.isEmpty()) {
-          rowI = result.list().iterator();
-          loop = true;
-        }
-      }
-    } while (loop);
-    return null;
-  }
-
-  public void remove() {
-    throw new UnsupportedOperationException("remove not supported");
-  }
-
-}
+/*
+ * Copyright 2010 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.stargate;
+
+import java.io.IOException;
+import java.util.Iterator;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.UnknownScannerException;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.HTablePool;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.filter.Filter;
+import org.apache.hadoop.hbase.stargate.model.ScannerModel;
+import org.apache.hadoop.util.StringUtils;
+
+public class ScannerResultGenerator extends ResultGenerator {
+
+  private static final Log LOG =
+    LogFactory.getLog(ScannerResultGenerator.class);
+
+  public static Filter buildFilterFromModel(final ScannerModel model) 
+      throws Exception {
+    String filter = model.getFilter();
+    if (filter == null || filter.length() == 0) {
+      return null;
+    }
+    return buildFilter(filter);
+  }
+
+  private String id;
+  private Iterator<KeyValue> rowI;
+  private KeyValue cache;
+  private ResultScanner scanner;
+  private Result cached;
+
+  public ScannerResultGenerator(final String tableName, final RowSpec rowspec,
+      final Filter filter) throws IllegalArgumentException, IOException {
+    HTablePool pool = RESTServlet.getInstance().getTablePool(); 
+    HTable table = pool.getTable(tableName);
+    try {
+      Scan scan;
+      if (rowspec.hasEndRow()) {
+        scan = new Scan(rowspec.getStartRow(), rowspec.getEndRow());
+      } else {
+        scan = new Scan(rowspec.getStartRow());
+      }
+      if (rowspec.hasColumns()) {
+        byte[][] columns = rowspec.getColumns();
+        for (byte[] column: columns) {
+          byte[][] split = KeyValue.parseColumn(column);
+          if (split.length > 1 && (split[1] != null && split[1].length != 0)) {
+            scan.addColumn(split[0], split[1]);
+          } else {
+            scan.addFamily(split[0]);
+          }
+        }
+      } else {
+        for (HColumnDescriptor family: 
+            table.getTableDescriptor().getFamilies()) {
+          scan.addFamily(family.getName());
+        }
+      }
+      scan.setTimeRange(rowspec.getStartTime(), rowspec.getEndTime());          
+      scan.setMaxVersions(rowspec.getMaxVersions());
+      if (filter != null) {
+        scan.setFilter(filter);
+      }
+      // always disable block caching on the cluster when scanning
+      scan.setCacheBlocks(false);
+      scanner = table.getScanner(scan);
+      cached = null;
+      id = Long.toString(System.currentTimeMillis()) +
+             Integer.toHexString(scanner.hashCode());
+    } finally {
+      pool.putTable(table);
+    }
+  }
+
+  public String getID() {
+    return id;
+  }
+
+  public void close() {
+  }
+
+  public boolean hasNext() {
+    if (cache != null) {
+      return true;
+    }
+    if (rowI != null && rowI.hasNext()) {
+      return true;
+    }
+    if (cached != null) {
+      return true;
+    }
+    try {
+      Result result = scanner.next();
+      if (result != null && !result.isEmpty()) {
+        cached = result;
+      }
+    } catch (UnknownScannerException e) {
+      throw new IllegalArgumentException(e);
+    } catch (IOException e) {
+      LOG.error(StringUtils.stringifyException(e));
+    }
+    return cached != null;
+  }
+
+  public KeyValue next() {
+    if (cache != null) {
+      KeyValue kv = cache;
+      cache = null;
+      return kv;
+    }
+    boolean loop;
+    do {
+      loop = false;
+      if (rowI != null) {
+        if (rowI.hasNext()) {
+          return rowI.next();
+        } else {
+          rowI = null;
+        }
+      }
+      if (cached != null) {
+        rowI = cached.list().iterator();
+        loop = true;
+        cached = null;
+      } else {
+        Result result = null;
+        try {
+          result = scanner.next();
+        } catch (UnknownScannerException e) {
+          throw new IllegalArgumentException(e);
+        } catch (IOException e) {
+          LOG.error(StringUtils.stringifyException(e));
+        }
+        if (result != null && !result.isEmpty()) {
+          rowI = result.list().iterator();
+          loop = true;
+        }
+      }
+    } while (loop);
+    return null;
+  }
+
+  public void putBack(KeyValue kv) {
+    this.cache = kv;
+  }
+
+  public void remove() {
+    throw new UnsupportedOperationException("remove not supported");
+  }
+
+}

Modified: hadoop/hbase/branches/0.20_pre_durability/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/SchemaResource.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_pre_durability/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/SchemaResource.java?rev=932214&r1=932213&r2=932214&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20_pre_durability/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/SchemaResource.java (original)
+++ hadoop/hbase/branches/0.20_pre_durability/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/SchemaResource.java Fri Apr  9 02:10:02 2010
@@ -1,255 +1,252 @@
-/*
- * Copyright 2010 The Apache Software Foundation
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.stargate;
-
-import java.io.IOException;
-import java.util.Map;
-
-import javax.ws.rs.Consumes;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.PUT;
-import javax.ws.rs.Produces;
-import javax.ws.rs.WebApplicationException;
-import javax.ws.rs.core.CacheControl;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.core.UriInfo;
-import javax.ws.rs.core.Response.ResponseBuilder;
-import javax.xml.namespace.QName;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.TableExistsException;
-import org.apache.hadoop.hbase.TableNotFoundException;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.hadoop.hbase.client.HTable;
-import org.apache.hadoop.hbase.client.HTablePool;
-import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
-import org.apache.hadoop.hbase.stargate.auth.User;
-import org.apache.hadoop.hbase.stargate.model.ColumnSchemaModel;
-import org.apache.hadoop.hbase.stargate.model.TableSchemaModel;
-import org.apache.hadoop.hbase.util.Bytes;
-
-public class SchemaResource implements Constants {
-  private static final Log LOG = LogFactory.getLog(SchemaResource.class);
-
-  User user;
-  String tableName;
-  String actualTableName;
-  CacheControl cacheControl;
-  RESTServlet servlet;
-
-  public SchemaResource(User user, String table) throws IOException {
-    if (user != null) {
-      this.user = user;
-      this.actualTableName = 
-        !user.isAdmin() ? (user.getName() + "." + table) : table;
-    } else {
-      this.actualTableName = table;
-    }
-    this.tableName = table;
-    servlet = RESTServlet.getInstance();
-    cacheControl = new CacheControl();
-    cacheControl.setNoCache(true);
-    cacheControl.setNoTransform(false);
-  }
-
-  private HTableDescriptor getTableSchema() throws IOException,
-      TableNotFoundException {
-    HTablePool pool = servlet.getTablePool();
-    HTable table = pool.getTable(actualTableName);
-    try {
-      return table.getTableDescriptor();
-    } finally {
-      pool.putTable(table);
-    }
-  }
-
-  @GET
-  @Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF})
-  public Response get(@Context UriInfo uriInfo) {
-    if (LOG.isDebugEnabled()) {
-      LOG.debug("GET " + uriInfo.getAbsolutePath());
-    }
-    servlet.getMetrics().incrementRequests(1);
-    try {
-      HTableDescriptor htd = getTableSchema();
-      TableSchemaModel model = new TableSchemaModel();
-      model.setName(tableName);
-      for (Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> e:
-          htd.getValues().entrySet()) {
-        model.addAttribute(Bytes.toString(e.getKey().get()), 
-            Bytes.toString(e.getValue().get()));
-      }
-      for (HColumnDescriptor hcd: htd.getFamilies()) {
-        ColumnSchemaModel columnModel = new ColumnSchemaModel();
-        columnModel.setName(hcd.getNameAsString());
-        for (Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> e:
-          hcd.getValues().entrySet()) {
-        columnModel.addAttribute(Bytes.toString(e.getKey().get()), 
-          Bytes.toString(e.getValue().get()));
-      }
-        model.addColumnFamily(columnModel);
-      }
-      ResponseBuilder response = Response.ok(model);
-      response.cacheControl(cacheControl);
-      return response.build();
-    } catch (TableNotFoundException e) {
-      throw new WebApplicationException(Response.Status.NOT_FOUND);
-    } catch (IOException e) {
-      throw new WebApplicationException(e,
-                  Response.Status.SERVICE_UNAVAILABLE);
-    }
-  }
-
-  private Response replace(byte[] tableName, TableSchemaModel model,
-      UriInfo uriInfo, HBaseAdmin admin) {
-    try {
-      HTableDescriptor htd = new HTableDescriptor(tableName);
-      for (Map.Entry<QName,Object> e: model.getAny().entrySet()) {
-        htd.setValue(e.getKey().getLocalPart(), e.getValue().toString());
-      }
-      for (ColumnSchemaModel family: model.getColumns()) {
-        HColumnDescriptor hcd = new HColumnDescriptor(family.getName());
-        for (Map.Entry<QName,Object> e: family.getAny().entrySet()) {
-          hcd.setValue(e.getKey().getLocalPart(), e.getValue().toString());
-        }
-        htd.addFamily(hcd);
-      }
-      if (admin.tableExists(tableName)) {
-        admin.disableTable(tableName);
-        admin.modifyTable(tableName, htd);
-        admin.enableTable(tableName);
-      } else try {
-        admin.createTable(htd);
-      } catch (TableExistsException e) {
-        // race, someone else created a table with the same name
-        throw new WebApplicationException(e, Response.Status.NOT_MODIFIED);
-      }
-      return Response.created(uriInfo.getAbsolutePath()).build();
-    } catch (IOException e) {
-      throw new WebApplicationException(e, 
-            Response.Status.SERVICE_UNAVAILABLE);
-    }      
-  } 
-
-  private Response update(byte[] tableName, TableSchemaModel model,
-      UriInfo uriInfo, HBaseAdmin admin) {
-    try {
-      HTableDescriptor htd = admin.getTableDescriptor(tableName);
-      admin.disableTable(tableName);
-      try {
-        for (ColumnSchemaModel family: model.getColumns()) {
-          HColumnDescriptor hcd = new HColumnDescriptor(family.getName());
-          for (Map.Entry<QName,Object> e: family.getAny().entrySet()) {
-            hcd.setValue(e.getKey().getLocalPart(), e.getValue().toString());
-          }
-          if (htd.hasFamily(hcd.getName())) {
-            admin.modifyColumn(tableName, hcd.getName(), hcd);
-          } else {
-            admin.addColumn(model.getName(), hcd);            
-          }
-        }
-      } catch (IOException e) {
-        throw new WebApplicationException(e, 
-            Response.Status.INTERNAL_SERVER_ERROR);
-      } finally {
-        admin.enableTable(tableName);
-      }
-      return Response.ok().build();
-    } catch (IOException e) {
-      throw new WebApplicationException(e,
-          Response.Status.SERVICE_UNAVAILABLE);
-    }
-  }
-
-  private Response update(TableSchemaModel model, boolean replace,
-      UriInfo uriInfo) {
-    try {
-      servlet.invalidateMaxAge(tableName);
-      byte[] tableName = Bytes.toBytes(actualTableName);
-      HBaseAdmin admin = new HBaseAdmin(servlet.getConfiguration());
-      if (replace || !admin.tableExists(tableName)) {
-        return replace(tableName, model, uriInfo, admin);
-      } else {
-        return update(tableName, model, uriInfo, admin);
-      }
-    } catch (IOException e) {
-      throw new WebApplicationException(e, 
-            Response.Status.SERVICE_UNAVAILABLE);
-    }
-  }
-
-  @PUT
-  @Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF})
-  public Response put(TableSchemaModel model, @Context UriInfo uriInfo) {
-    if (LOG.isDebugEnabled()) {
-      LOG.debug("PUT " + uriInfo.getAbsolutePath());
-    }
-    servlet.getMetrics().incrementRequests(1);
-    // use the name given in the path, but warn if the name on the path and
-    // the name in the schema are different
-    if (model.getName() != tableName) {
-      LOG.warn("table name mismatch: path='" + tableName + "', schema='" +
-        model.getName() + "'");
-    }
-    return update(model, true, uriInfo);
-  }
-
-  @POST
-  @Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF})
-  public Response post(TableSchemaModel model, @Context UriInfo uriInfo) {
-    if (LOG.isDebugEnabled()) {
-      LOG.debug("PUT " + uriInfo.getAbsolutePath());
-    }
-    servlet.getMetrics().incrementRequests(1);
-    // use the name given in the path, but warn if the name on the path and
-    // the name in the schema are different
-    if (model.getName() != tableName) {
-      LOG.warn("table name mismatch: path='" + tableName + "', schema='" +
-        model.getName() + "'");
-    }
-    return update(model, false, uriInfo);
-  }
-
-  @DELETE
-  public Response delete(@Context UriInfo uriInfo) {     
-    if (LOG.isDebugEnabled()) {
-      LOG.debug("DELETE " + uriInfo.getAbsolutePath());
-    }
-    servlet.getMetrics().incrementRequests(1);
-    try {
-      HBaseAdmin admin = new HBaseAdmin(servlet.getConfiguration());
-      admin.disableTable(actualTableName);
-      admin.deleteTable(actualTableName);
-      return Response.ok().build();
-    } catch (TableNotFoundException e) {
-      throw new WebApplicationException(Response.Status.NOT_FOUND);
-    } catch (IOException e) {
-      throw new WebApplicationException(e, 
-            Response.Status.SERVICE_UNAVAILABLE);
-    }
-  }
-}
+/*
+ * Copyright 2010 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.stargate;
+
+import java.io.IOException;
+import java.util.Map;
+
+import javax.ws.rs.Consumes;
+import javax.ws.rs.DELETE;
+import javax.ws.rs.GET;
+import javax.ws.rs.POST;
+import javax.ws.rs.PUT;
+import javax.ws.rs.Produces;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.CacheControl;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriInfo;
+import javax.ws.rs.core.Response.ResponseBuilder;
+
+import javax.xml.namespace.QName;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.TableExistsException;
+import org.apache.hadoop.hbase.TableNotFoundException;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.HTablePool;
+import org.apache.hadoop.hbase.stargate.User;
+import org.apache.hadoop.hbase.stargate.model.ColumnSchemaModel;
+import org.apache.hadoop.hbase.stargate.model.TableSchemaModel;
+import org.apache.hadoop.hbase.util.Bytes;
+
+public class SchemaResource implements Constants {
+  private static final Log LOG = LogFactory.getLog(SchemaResource.class);
+
+  User user;
+  String tableName;
+  String actualTableName;
+  CacheControl cacheControl;
+  RESTServlet servlet;
+
+  public SchemaResource(User user, String table) throws IOException {
+    if (user != null) {
+      this.user = user;
+      this.actualTableName = 
+        !user.isAdmin() ? (user.getName() + "." + table) : table;
+    } else {
+      this.actualTableName = table;
+    }
+    this.tableName = table;
+    servlet = RESTServlet.getInstance();
+    cacheControl = new CacheControl();
+    cacheControl.setNoCache(true);
+    cacheControl.setNoTransform(false);
+  }
+
+  private HTableDescriptor getTableSchema() throws IOException,
+      TableNotFoundException {
+    HTablePool pool = servlet.getTablePool();
+    HTable table = pool.getTable(actualTableName);
+    try {
+      return table.getTableDescriptor();
+    } finally {
+      pool.putTable(table);
+    }
+  }
+
+  @GET
+  @Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF})
+  public Response get(final @Context UriInfo uriInfo) throws IOException {
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("GET " + uriInfo.getAbsolutePath());
+    }
+    if (!servlet.userRequestLimit(user, 1)) {
+      return Response.status(509).build();
+    }
+    servlet.getMetrics().incrementRequests(1);
+    try {
+      ResponseBuilder response =
+        Response.ok(new TableSchemaModel(getTableSchema()));
+      response.cacheControl(cacheControl);
+      return response.build();
+    } catch (TableNotFoundException e) {
+      throw new WebApplicationException(Response.Status.NOT_FOUND);
+    } catch (IOException e) {
+      throw new WebApplicationException(e,
+                  Response.Status.SERVICE_UNAVAILABLE);
+    }
+  }
+
+  private Response replace(final byte[] tableName, 
+      final TableSchemaModel model, final UriInfo uriInfo,
+      final HBaseAdmin admin) {
+    try {
+      HTableDescriptor htd = new HTableDescriptor(tableName);
+      for (Map.Entry<QName,Object> e: model.getAny().entrySet()) {
+        htd.setValue(e.getKey().getLocalPart(), e.getValue().toString());
+      }
+      for (ColumnSchemaModel family: model.getColumns()) {
+        HColumnDescriptor hcd = new HColumnDescriptor(family.getName());
+        for (Map.Entry<QName,Object> e: family.getAny().entrySet()) {
+          hcd.setValue(e.getKey().getLocalPart(), e.getValue().toString());
+        }
+        htd.addFamily(hcd);
+      }
+      if (admin.tableExists(tableName)) {
+        admin.disableTable(tableName);
+        admin.modifyTable(tableName, htd);
+        admin.enableTable(tableName);
+      } else try {
+        admin.createTable(htd);
+      } catch (TableExistsException e) {
+        // race, someone else created a table with the same name
+        throw new WebApplicationException(e, Response.Status.NOT_MODIFIED);
+      }
+      return Response.created(uriInfo.getAbsolutePath()).build();
+    } catch (IOException e) {
+      throw new WebApplicationException(e, 
+            Response.Status.SERVICE_UNAVAILABLE);
+    }      
+  } 
+
+  private Response update(final byte[] tableName,final TableSchemaModel model,
+      final UriInfo uriInfo, final HBaseAdmin admin) {
+    try {
+      HTableDescriptor htd = admin.getTableDescriptor(tableName);
+      admin.disableTable(tableName);
+      try {
+        for (ColumnSchemaModel family: model.getColumns()) {
+          HColumnDescriptor hcd = new HColumnDescriptor(family.getName());
+          for (Map.Entry<QName,Object> e: family.getAny().entrySet()) {
+            hcd.setValue(e.getKey().getLocalPart(), e.getValue().toString());
+          }
+          if (htd.hasFamily(hcd.getName())) {
+            admin.modifyColumn(tableName, hcd.getName(), hcd);
+          } else {
+            admin.addColumn(model.getName(), hcd);            
+          }
+        }
+      } catch (IOException e) {
+        throw new WebApplicationException(e, 
+            Response.Status.INTERNAL_SERVER_ERROR);
+      } finally {
+        admin.enableTable(tableName);
+      }
+      return Response.ok().build();
+    } catch (IOException e) {
+      throw new WebApplicationException(e,
+          Response.Status.SERVICE_UNAVAILABLE);
+    }
+  }
+
+  private Response update(final TableSchemaModel model, final boolean replace,
+      final UriInfo uriInfo) {
+    try {
+      servlet.invalidateMaxAge(tableName);
+      byte[] tableName = Bytes.toBytes(actualTableName);
+      HBaseAdmin admin = new HBaseAdmin(servlet.getConfiguration());
+      if (replace || !admin.tableExists(tableName)) {
+        return replace(tableName, model, uriInfo, admin);
+      } else {
+        return update(tableName, model, uriInfo, admin);
+      }
+    } catch (IOException e) {
+      throw new WebApplicationException(e, 
+            Response.Status.SERVICE_UNAVAILABLE);
+    }
+  }
+
+  @PUT
+  @Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF})
+  public Response put(final TableSchemaModel model, 
+      final @Context UriInfo uriInfo) throws IOException {
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("PUT " + uriInfo.getAbsolutePath());
+    }
+    if (!servlet.userRequestLimit(user, 1)) {
+      return Response.status(509).build();
+    }
+    servlet.getMetrics().incrementRequests(1);
+    return update(model, true, uriInfo);
+  }
+
+  @POST
+  @Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF})
+  public Response post(final TableSchemaModel model, 
+      final @Context UriInfo uriInfo) throws IOException {
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("PUT " + uriInfo.getAbsolutePath());
+    }
+    if (!servlet.userRequestLimit(user, 1)) {
+      return Response.status(509).build();
+    }
+    servlet.getMetrics().incrementRequests(1);
+    return update(model, false, uriInfo);
+  }
+
+  @DELETE
+  public Response delete(final @Context UriInfo uriInfo) throws IOException {
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("DELETE " + uriInfo.getAbsolutePath());
+    }
+    if (!servlet.userRequestLimit(user, 1)) {
+      return Response.status(509).build();
+    }
+    servlet.getMetrics().incrementRequests(1);
+    try {
+      HBaseAdmin admin = new HBaseAdmin(servlet.getConfiguration());
+      boolean success = false;
+      for (int i = 0; i < 10; i++) try {
+        admin.disableTable(actualTableName);
+        success = true;
+        break;
+      } catch (IOException e) {
+      }
+      if (!success) {
+        throw new IOException("could not disable table");
+      }
+      admin.deleteTable(actualTableName);
+      return Response.ok().build();
+    } catch (TableNotFoundException e) {
+      throw new WebApplicationException(Response.Status.NOT_FOUND);
+    } catch (IOException e) {
+      throw new WebApplicationException(e, 
+            Response.Status.SERVICE_UNAVAILABLE);
+    }
+  }
+
+}

Modified: hadoop/hbase/branches/0.20_pre_durability/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/StorageClusterStatusResource.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_pre_durability/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/StorageClusterStatusResource.java?rev=932214&r1=932213&r2=932214&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20_pre_durability/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/StorageClusterStatusResource.java (original)
+++ hadoop/hbase/branches/0.20_pre_durability/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/StorageClusterStatusResource.java Fri Apr  9 02:10:02 2010
@@ -44,22 +44,27 @@ public class StorageClusterStatusResourc
   private static final Log LOG =
     LogFactory.getLog(StorageClusterStatusResource.class);
 
+  private User user;
   private CacheControl cacheControl;
   private RESTServlet servlet;
 
-  public StorageClusterStatusResource() throws IOException {
-    servlet = RESTServlet.getInstance();
-    cacheControl = new CacheControl();
-    cacheControl.setNoCache(true);
-    cacheControl.setNoTransform(false);
+  public StorageClusterStatusResource(User user) throws IOException {
+    this.user = user;
+    this.servlet = RESTServlet.getInstance();
+    this.cacheControl = new CacheControl();
+    this.cacheControl.setNoCache(true);
+    this.cacheControl.setNoTransform(false);
   }
 
   @GET
   @Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF})
-  public Response get(@Context UriInfo uriInfo) {
+  public Response get(final @Context UriInfo uriInfo) throws IOException {
     if (LOG.isDebugEnabled()) {
       LOG.debug("GET " + uriInfo.getAbsolutePath());
     }
+    if (!servlet.userRequestLimit(user, 1)) {
+      Response.status(509).build();
+    }
     servlet.getMetrics().incrementRequests(1);
     try {
       HBaseAdmin admin = new HBaseAdmin(servlet.getConfiguration());

Modified: hadoop/hbase/branches/0.20_pre_durability/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/StorageClusterVersionResource.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_pre_durability/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/StorageClusterVersionResource.java?rev=932214&r1=932213&r2=932214&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20_pre_durability/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/StorageClusterVersionResource.java (original)
+++ hadoop/hbase/branches/0.20_pre_durability/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/StorageClusterVersionResource.java Fri Apr  9 02:10:02 2010
@@ -53,7 +53,7 @@ public class StorageClusterVersionResour
 
   @GET
   @Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON})
-  public Response get(@Context UriInfo uriInfo) {
+  public Response get(final @Context UriInfo uriInfo) {
     if (LOG.isDebugEnabled()) {
       LOG.debug("GET " + uriInfo.getAbsolutePath());
     }

Modified: hadoop/hbase/branches/0.20_pre_durability/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/TableResource.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_pre_durability/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/TableResource.java?rev=932214&r1=932213&r2=932214&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20_pre_durability/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/TableResource.java (original)
+++ hadoop/hbase/branches/0.20_pre_durability/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/TableResource.java Fri Apr  9 02:10:02 2010
@@ -1,68 +1,75 @@
-/*
- * Copyright 2010 The Apache Software Foundation
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.stargate;
-
-import java.io.IOException;
-
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.WebApplicationException;
-import javax.ws.rs.core.Response;
-
-import org.apache.hadoop.hbase.stargate.auth.User;
-
-public class TableResource implements Constants {
-
-  User user;
-  String table;
-
-  public TableResource(User user, String table) {
-    this.user = user;
-    this.table = table;
-  }
-
-  @Path("regions")
-  public RegionsResource getRegionsResource() throws IOException {
-    return new RegionsResource(user, table);
-  }
-
-  @Path("scanner")
-  public ScannerResource getScannerResource() throws IOException {
-    return new ScannerResource(user, table);
-  }
-
-  @Path("schema")
-  public SchemaResource getSchemaResource() throws IOException {
-    return new SchemaResource(user, table);
-  }
-
-  @Path("{rowspec: .+}")
-  public RowResource getRowResource(@PathParam("rowspec") String rowspec,
-      @QueryParam("v") String versions) {
-    try {
-      return new RowResource(user, table, rowspec, versions);
-    } catch (IOException e) {
-      throw new WebApplicationException(e, 
-                  Response.Status.INTERNAL_SERVER_ERROR);
-    }
-  }
-}
+/*
+ * Copyright 2010 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.stargate;
+
+import java.io.IOException;
+
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.Response;
+
+import org.apache.hadoop.hbase.stargate.User;
+
+public class TableResource implements Constants {
+
+  User user;
+  String table;
+
+  public TableResource(User user, String table) {
+    this.user = user;
+    this.table = table;
+  }
+
+  @Path("exists")
+  public ExistsResource getExistsResource() throws IOException {
+    return new ExistsResource(user, table);
+  }
+
+  @Path("regions")
+  public RegionsResource getRegionsResource() throws IOException {
+    return new RegionsResource(user, table);
+  }
+
+  @Path("scanner")
+  public ScannerResource getScannerResource() throws IOException {
+    return new ScannerResource(user, table);
+  }
+
+  @Path("schema")
+  public SchemaResource getSchemaResource() throws IOException {
+    return new SchemaResource(user, table);
+  }
+
+  @Path("{rowspec: .+}")
+  public RowResource getRowResource(
+      final @PathParam("rowspec") String rowspec,
+      final @QueryParam("v") String versions) {
+    try {
+      return new RowResource(user, table, rowspec, versions);
+    } catch (IOException e) {
+      throw new WebApplicationException(e, 
+                  Response.Status.INTERNAL_SERVER_ERROR);
+    }
+  }
+
+}

Added: hadoop/hbase/branches/0.20_pre_durability/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/User.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_pre_durability/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/User.java?rev=932214&view=auto
==============================================================================
--- hadoop/hbase/branches/0.20_pre_durability/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/User.java (added)
+++ hadoop/hbase/branches/0.20_pre_durability/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/User.java Fri Apr  9 02:10:02 2010
@@ -0,0 +1,175 @@
+/*
+ * Copyright 2010 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.stargate;
+
+import java.security.MessageDigest;
+
+import org.apache.hadoop.hbase.util.Bytes;
+
+/** Representation of an authorized user */
+public class User implements Constants {
+
+  public static final User DEFAULT_USER = new User("default",
+    "00000000000000000000000000000000", true, true);
+
+  private String name;
+  private String token;
+  private boolean admin;
+  private boolean disabled = false;
+
+  /**
+   * Constructor
+   * <p>
+   * Creates an access token. (Normally, you don't want this.)
+   * @param name user name
+   * @param admin true if user has administrator privilege
+   * @throws Exception 
+   */
+  public User(String name, boolean admin) throws Exception {
+    this.name = name;
+    this.admin = admin;
+    byte[] digest = MessageDigest.getInstance("MD5")
+      .digest(Bytes.toBytes(name));
+    StringBuffer sb = new StringBuffer();
+    for (int i = 0; i < digest.length; i++) {
+      sb.append(Integer.toHexString(0xff & digest[i]));
+    }
+    this.token = sb.toString();
+  }
+
+  /**
+   * Constructor
+   * @param name user name
+   * @param token access token, a 16 char hex string
+   * @param admin true if user has administrator privilege
+   */
+  public User(String name, String token, boolean admin) {
+    this(name, token, admin, false);
+  }
+
+  /**
+   * Constructor
+   * @param name user name
+   * @param token access token, a 16 char hex string
+   * @param admin true if user has administrator privilege
+   * @param disabled true if user is disabled
+   */
+  public User(String name, String token, boolean admin, boolean disabled) {
+    this.name = name;
+    this.token = token;
+    this.admin = admin;
+    this.disabled = disabled;
+  }
+
+  /**
+   * @return user name
+   */
+  public String getName() {
+    return name;
+  }
+
+  /**
+   * @param name user name
+   */
+  public void setName(final String name) {
+    this.name = name;
+  }
+
+  /**
+   * @return access token, a 16 char hex string
+   */
+  public String getToken() {
+    return token;
+  }
+
+  /**
+   * @param token access token, a 16 char hex string
+   */
+  public void setToken(final String token) {
+    this.token = token;
+  }
+
+  /**
+   * @return true if user has administrator privilege
+   */
+  public boolean isAdmin() {
+    return admin;
+  }
+
+  /**
+   * @param admin true if user has administrator privilege
+   */
+  public void setAdmin(final boolean admin) {
+    this.admin = admin;
+  }
+
+  /**
+   * @return true if user is disabled
+   */
+  public boolean isDisabled() {
+    return disabled;
+  }
+
+  /**
+   * @param disabled true if user is disabled
+   */
+  public void setDisabled(boolean disabled) {
+    this.disabled = disabled;
+  }
+
+  @Override
+  public int hashCode() {
+    final int prime = 31;
+    int result = 1;
+    result = prime * result + (admin ? 1231 : 1237);
+    result = prime * result + (disabled ? 1231 : 1237);
+    result = prime * result + ((name == null) ? 0 : name.hashCode());
+    result = prime * result + ((token == null) ? 0 : token.hashCode());
+    return result;
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    if (this == obj)
+      return true;
+    if (obj == null)
+      return false;
+    if (getClass() != obj.getClass())
+      return false;
+    User other = (User) obj;
+    if (admin != other.admin)
+      return false;
+    if (disabled != other.disabled)
+      return false;
+    if (name == null) {
+      if (other.name != null)
+        return false;
+    } else if (!name.equals(other.name))
+      return false;
+    if (token == null) {
+      if (other.token != null)
+        return false;
+    } else if (!token.equals(other.token))
+      return false;
+    return true;
+  }
+
+}



Mime
View raw message