accumulo-dev mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From keith-turner <...@git.apache.org>
Subject [GitHub] accumulo pull request: Implementation of SeekingFilter, CfCqSliceF...
Date Fri, 14 Aug 2015 13:08:10 GMT
Github user keith-turner commented on a diff in the pull request:

    https://github.com/apache/accumulo/pull/42#discussion_r37074032
  
    --- Diff: test/src/test/java/org/apache/accumulo/core/iterators/user/TestCfCqSlice.java
---
    @@ -0,0 +1,378 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one or more
    + * contributor license agreements.  See the NOTICE file distributed with
    + * this work for additional information regarding copyright ownership.
    + * The ASF licenses this file to You under the Apache License, Version 2.0
    + * (the "License"); you may not use this file except in compliance with
    + * the License.  You may obtain a copy of the License at
    + *
    + *     http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +package org.apache.accumulo.core.iterators.user;
    +
    +import org.apache.accumulo.core.client.AccumuloException;
    +import org.apache.accumulo.core.client.AccumuloSecurityException;
    +import org.apache.accumulo.core.client.BatchScanner;
    +import org.apache.accumulo.core.client.BatchWriter;
    +import org.apache.accumulo.core.client.BatchWriterConfig;
    +import org.apache.accumulo.core.client.Connector;
    +import org.apache.accumulo.core.client.IteratorSetting;
    +import org.apache.accumulo.core.client.lexicoder.Lexicoder;
    +import org.apache.accumulo.core.data.Key;
    +import org.apache.accumulo.core.data.Mutation;
    +import org.apache.accumulo.core.data.Range;
    +import org.apache.accumulo.core.data.Value;
    +import org.apache.accumulo.core.iterators.SortedKeyValueIterator;
    +import org.apache.accumulo.core.iterators.ValueFormatException;
    +import org.apache.accumulo.core.security.Authorizations;
    +import org.apache.accumulo.minicluster.MiniAccumuloCluster;
    +import org.apache.accumulo.minicluster.MiniAccumuloConfig;
    +import org.apache.hadoop.io.Text;
    +import org.junit.AfterClass;
    +import org.junit.BeforeClass;
    +import org.junit.Test;
    +
    +import java.nio.file.Files;
    +import java.nio.file.Path;
    +import java.util.Collection;
    +import java.util.Collections;
    +import java.util.HashMap;
    +import java.util.LinkedList;
    +import java.util.List;
    +import java.util.Map;
    +import java.util.SortedSet;
    +import java.util.TreeSet;
    +import java.util.concurrent.atomic.AtomicLong;
    +
    +import static java.nio.charset.StandardCharsets.UTF_8;
    +import static org.junit.Assert.assertTrue;
    +import static org.junit.Assert.assertFalse;
    +
    +public abstract class TestCfCqSlice {
    +
    +  private static final String TABLE_NAME = "TestColumnSliceFilter";
    +  private static final Collection<Range> INFINITY = Collections.singletonList(new
Range());
    +  private static final Lexicoder<Long> LONG_LEX = new ReadableLongLexicoder(4);
    +  private static final AtomicLong ROW_ID_GEN = new AtomicLong();
    +
    +  private static final boolean easyThereSparky = false;
    +  private static final int LR_DIM = easyThereSparky ? 5 : 50;
    +
    +  private static MiniAccumuloCluster mac;
    +
    +  protected abstract Class<? extends SortedKeyValueIterator<Key,Value>> getFilterClass();
    +
    +  @BeforeClass
    +  public static void setupMAC() throws Exception {
    +    Path macPath = Files.createTempDirectory("mac");
    +    System.out.println("MAC running at " + macPath);
    +    MiniAccumuloConfig macCfg = new MiniAccumuloConfig(macPath.toFile(), "password");
    +    macCfg.setNumTservers(easyThereSparky ? 1 : 4);
    +    mac = new MiniAccumuloCluster(macCfg);
    +    mac.start();
    +    Collection<Mutation> largeRows = createMutations(LR_DIM, LR_DIM, LR_DIM);
    +    Connector conn = newConnector();
    +    conn.tableOperations().create(TABLE_NAME);
    +    if (!easyThereSparky) {
    +      SortedSet<Text> largeRowSplits = getSplits(0, LR_DIM - 1, 5);
    +      conn.tableOperations().addSplits(TABLE_NAME, largeRowSplits);
    +    }
    +    BatchWriter bw = conn.createBatchWriter(TABLE_NAME, new BatchWriterConfig());
    +    bw.addMutations(largeRows);
    +    bw.flush();
    +  }
    +
    +  private static Connector newConnector() throws AccumuloException, AccumuloSecurityException
{
    +    return mac.getConnector("root", "password");
    +  }
    +
    +  @AfterClass
    +  public static void tearDownMAC() throws Exception {
    +    mac.stop();
    +  }
    +
    +  @Test
    +  public void testAllRowsFullSlice() throws Exception {
    +    boolean[][][] foundKvs = new boolean[LR_DIM][LR_DIM][LR_DIM];
    +    BatchScanner bs = newConnector().createBatchScanner(TABLE_NAME, new Authorizations(),
5);
    +    bs.addScanIterator(new IteratorSetting(50, getFilterClass().getName(), getFilterClass()));
    +    bs.setRanges(INFINITY);
    +    loadKvs(foundKvs, bs);
    +    for (int i = 0; i < LR_DIM; i++) {
    +      for (int j = 0; j < LR_DIM; j++) {
    +        for (int k = 0; k < LR_DIM; k++) {
    +          assertTrue("(r, cf, cq) == (" + i + ", " + j + ", " + k + ") must be found
in scan", foundKvs[i][j][k]);
    +        }
    +      }
    +    }
    +  }
    +
    +  @Test
    +  public void testSingleRowFullSlice() throws Exception {
    +    boolean[][][] foundKvs = new boolean[LR_DIM][LR_DIM][LR_DIM];
    +    BatchScanner bs = newConnector().createBatchScanner(TABLE_NAME, new Authorizations(),
5);
    +    bs.addScanIterator(new IteratorSetting(50, getFilterClass().getName(), getFilterClass()));
    +    int rowId = LR_DIM / 2;
    +    bs.setRanges(Collections.singletonList(Range.exact(new Text(LONG_LEX.encode((long)
rowId)))));
    +    loadKvs(foundKvs, bs);
    +    for (int i = 0; i < LR_DIM; i++) {
    +      for (int j = 0; j < LR_DIM; j++) {
    +        for (int k = 0; k < LR_DIM; k++) {
    +          if (rowId == i) {
    +            assertTrue("(r, cf, cq) == (" + i + ", " + j + ", " + k + ") must be found
in scan", foundKvs[i][j][k]);
    +          } else {
    +            assertFalse("(r, cf, cq) == (" + i + ", " + j + ", " + k + ") must not be
found in scan", foundKvs[i][j][k]);
    +          }
    +        }
    +      }
    +    }
    +  }
    +
    +  @Test
    +  public void testAllRowsSlice() throws Exception {
    +    boolean[][][] foundKvs = new boolean[LR_DIM][LR_DIM][LR_DIM];
    +    long sliceMinCf = 20;
    +    long sliceMinCq = 30;
    +    long sliceMaxCf = 25;
    +    long sliceMaxCq = 35;
    +    assertTrue("slice param must be less than LR_DIM", sliceMinCf < LR_DIM);
    +    assertTrue("slice param must be less than LR_DIM", sliceMinCq < LR_DIM);
    +    assertTrue("slice param must be less than LR_DIM", sliceMaxCf < LR_DIM);
    +    assertTrue("slice param must be less than LR_DIM", sliceMaxCq < LR_DIM);
    +    Map<String,String> opts = new HashMap<String,String>();
    +    opts.put(CfCqSliceOpts.OPT_MIN_CF, new String(LONG_LEX.encode(sliceMinCf), UTF_8));
    +    opts.put(CfCqSliceOpts.OPT_MIN_CQ, new String(LONG_LEX.encode(sliceMinCq), UTF_8));
    +    opts.put(CfCqSliceOpts.OPT_MAX_CF, new String(LONG_LEX.encode(sliceMaxCf), UTF_8));
    +    opts.put(CfCqSliceOpts.OPT_MAX_CQ, new String(LONG_LEX.encode(sliceMaxCq), UTF_8));
    +    BatchScanner bs = newConnector().createBatchScanner(TABLE_NAME, new Authorizations(),
5);
    +    bs.addScanIterator(new IteratorSetting(50, getFilterClass().getName(), getFilterClass(),
opts));
    +    bs.setRanges(INFINITY);
    +    loadKvs(foundKvs, bs);
    +    for (int i = 0; i < LR_DIM; i++) {
    +      for (int j = 0; j < LR_DIM; j++) {
    +        for (int k = 0; k < LR_DIM; k++) {
    +          if (j >= sliceMinCf && j <= sliceMaxCf && k >= sliceMinCq
&& k <= sliceMaxCq) {
    +            assertTrue("(r, cf, cq) == (" + i + ", " + j + ", " + k + ") must be found
in scan", foundKvs[i][j][k]);
    +          } else {
    +            assertFalse("(r, cf, cq) == (" + i + ", " + j + ", " + k + ") must not be
found in scan", foundKvs[i][j][k]);
    +          }
    +        }
    +      }
    +    }
    +  }
    +
    +  @Test
    +  public void testSingleColumnSlice() throws Exception {
    +    boolean[][][] foundKvs = new boolean[LR_DIM][LR_DIM][LR_DIM];
    +    long sliceMinCf = 20;
    +    long sliceMinCq = 20;
    +    long sliceMaxCf = 20;
    +    long sliceMaxCq = 20;
    +    Map<String,String> opts = new HashMap<String,String>();
    +    opts.put(CfCqSliceOpts.OPT_MIN_CF, new String(LONG_LEX.encode(sliceMinCf), UTF_8));
    +    opts.put(CfCqSliceOpts.OPT_MIN_CQ, new String(LONG_LEX.encode(sliceMinCq), UTF_8));
    +    opts.put(CfCqSliceOpts.OPT_MAX_CF, new String(LONG_LEX.encode(sliceMaxCf), UTF_8));
    +    opts.put(CfCqSliceOpts.OPT_MAX_CQ, new String(LONG_LEX.encode(sliceMaxCq), UTF_8));
    +    BatchScanner bs = newConnector().createBatchScanner(TABLE_NAME, new Authorizations(),
5);
    +    bs.addScanIterator(new IteratorSetting(50, getFilterClass().getName(), getFilterClass(),
opts));
    +    bs.setRanges(INFINITY);
    +    loadKvs(foundKvs, bs);
    +    for (int i = 0; i < LR_DIM; i++) {
    +      for (int j = 0; j < LR_DIM; j++) {
    +        for (int k = 0; k < LR_DIM; k++) {
    +          if (j == sliceMinCf && k == sliceMinCq) {
    +            assertTrue("(r, cf, cq) == (" + i + ", " + j + ", " + k + ") must be found
in scan", foundKvs[i][j][k]);
    +          } else {
    +            assertFalse("(r, cf, cq) == (" + i + ", " + j + ", " + k + ") must not be
found in scan", foundKvs[i][j][k]);
    +          }
    +        }
    +      }
    +    }
    +  }
    +
    +  @Test
    +  public void testSingleColumnSliceByExclude() throws Exception {
    +    boolean[][][] foundKvs = new boolean[LR_DIM][LR_DIM][LR_DIM];
    +    long sliceMinCf = 20;
    +    long sliceMinCq = 20;
    +    long sliceMaxCf = 22;
    +    long sliceMaxCq = 22;
    +    Map<String,String> opts = new HashMap<String,String>();
    +    opts.put(CfCqSliceOpts.OPT_MIN_CF, new String(LONG_LEX.encode(sliceMinCf), UTF_8));
    +    opts.put(CfCqSliceOpts.OPT_MIN_CQ, new String(LONG_LEX.encode(sliceMinCq), UTF_8));
    +    opts.put(CfCqSliceOpts.OPT_MAX_CF, new String(LONG_LEX.encode(sliceMaxCf), UTF_8));
    +    opts.put(CfCqSliceOpts.OPT_MAX_CQ, new String(LONG_LEX.encode(sliceMaxCq), UTF_8));
    +    opts.put(CfCqSliceOpts.OPT_MAX_INCLUSIVE, "false");
    +    opts.put(CfCqSliceOpts.OPT_MIN_INCLUSIVE, "false");
    +    BatchScanner bs = newConnector().createBatchScanner(TABLE_NAME, new Authorizations(),
5);
    +    bs.addScanIterator(new IteratorSetting(50, getFilterClass().getName(), getFilterClass(),
opts));
    +    bs.setRanges(INFINITY);
    +    loadKvs(foundKvs, bs);
    +    for (int i = 0; i < LR_DIM; i++) {
    +      for (int j = 0; j < LR_DIM; j++) {
    +        for (int k = 0; k < LR_DIM; k++) {
    +          if (j == 21 && k == 21) {
    +            assertTrue("(r, cf, cq) == (" + i + ", " + j + ", " + k + ") must be found
in scan", foundKvs[i][j][k]);
    +          } else {
    +            assertFalse("(r, cf, cq) == (" + i + ", " + j + ", " + k + ") must not be
found in scan", foundKvs[i][j][k]);
    +          }
    +        }
    +      }
    +    }
    +  }
    +
    +  @Test
    +  public void testAllCfsCqSlice() throws Exception {
    +    boolean[][][] foundKvs = new boolean[LR_DIM][LR_DIM][LR_DIM];
    +    long sliceMinCq = 10;
    +    long sliceMaxCq = 30;
    +    Map<String,String> opts = new HashMap<String,String>();
    +    opts.put(CfCqSliceOpts.OPT_MIN_CQ, new String(LONG_LEX.encode(sliceMinCq), UTF_8));
    +    opts.put(CfCqSliceOpts.OPT_MAX_CQ, new String(LONG_LEX.encode(sliceMaxCq), UTF_8));
    +    BatchScanner bs = newConnector().createBatchScanner(TABLE_NAME, new Authorizations(),
5);
    +    bs.addScanIterator(new IteratorSetting(50, getFilterClass().getName(), getFilterClass(),
opts));
    +    bs.setRanges(INFINITY);
    +    loadKvs(foundKvs, bs);
    +    for (int i = 0; i < LR_DIM; i++) {
    +      for (int j = 0; j < LR_DIM; j++) {
    +        for (int k = 0; k < LR_DIM; k++) {
    +          if (k >= sliceMinCq && k <= sliceMaxCq) {
    +            assertTrue("(r, cf, cq) == (" + i + ", " + j + ", " + k + ") must be found
in scan", foundKvs[i][j][k]);
    +          } else {
    +            assertFalse("(r, cf, cq) == (" + i + ", " + j + ", " + k + ") must not be
found in scan", foundKvs[i][j][k]);
    +          }
    +        }
    +      }
    +    }
    +  }
    +
    +  @Test
    +  public void testSliceCfsAllCqs() throws Exception {
    +    boolean[][][] foundKvs = new boolean[LR_DIM][LR_DIM][LR_DIM];
    +    long sliceMinCf = 10;
    +    long sliceMaxCf = 30;
    +    Map<String,String> opts = new HashMap<String,String>();
    +    opts.put(CfCqSliceOpts.OPT_MIN_CF, new String(LONG_LEX.encode(sliceMinCf), UTF_8));
    +    opts.put(CfCqSliceOpts.OPT_MAX_CF, new String(LONG_LEX.encode(sliceMaxCf), UTF_8));
    +    BatchScanner bs = newConnector().createBatchScanner(TABLE_NAME, new Authorizations(),
5);
    +    bs.addScanIterator(new IteratorSetting(50, getFilterClass().getName(), getFilterClass(),
opts));
    +    bs.setRanges(INFINITY);
    +    loadKvs(foundKvs, bs);
    +    for (int i = 0; i < LR_DIM; i++) {
    +      for (int j = 0; j < LR_DIM; j++) {
    +        for (int k = 0; k < LR_DIM; k++) {
    +          if (j >= sliceMinCf && j <= sliceMaxCf) {
    +            assertTrue("(r, cf, cq) == (" + i + ", " + j + ", " + k + ") must be found
in scan", foundKvs[i][j][k]);
    +          } else {
    +            assertFalse("(r, cf, cq) == (" + i + ", " + j + ", " + k + ") must not be
found in scan", foundKvs[i][j][k]);
    +          }
    +        }
    +      }
    +    }
    +  }
    +
    +  @Test
    +  public void testEmptySlice() throws Exception {
    +    boolean[][][] foundKvs = new boolean[LR_DIM][LR_DIM][LR_DIM];
    +    // TODO: test with a batch scanner
    +    BatchScanner bs = newConnector().createBatchScanner(TABLE_NAME, new Authorizations(),
5);
    +    long sliceMinCf = LR_DIM + 1;
    +    long sliceMinCq = LR_DIM + 1;
    +    long sliceMaxCf = LR_DIM + 1;
    +    long sliceMaxCq = LR_DIM + 1;
    +    Map<String,String> opts = new HashMap<String,String>();
    +    opts.put(CfCqSliceOpts.OPT_MIN_CF, new String(LONG_LEX.encode(sliceMinCf), UTF_8));
    +    opts.put(CfCqSliceOpts.OPT_MIN_CQ, new String(LONG_LEX.encode(sliceMinCq), UTF_8));
    +    opts.put(CfCqSliceOpts.OPT_MAX_CF, new String(LONG_LEX.encode(sliceMaxCf), UTF_8));
    +    opts.put(CfCqSliceOpts.OPT_MAX_CQ, new String(LONG_LEX.encode(sliceMaxCq), UTF_8));
    +    opts.put(CfCqSliceOpts.OPT_MAX_INCLUSIVE, "false");
    +    opts.put(CfCqSliceOpts.OPT_MIN_INCLUSIVE, "false");
    +    bs.addScanIterator(new IteratorSetting(50, getFilterClass().getName(), getFilterClass(),
opts));
    +    bs.setRanges(INFINITY);
    +    loadKvs(foundKvs, bs);
    +    for (int i = 0; i < LR_DIM; i++) {
    +      for (int j = 0; j < LR_DIM; j++) {
    +        for (int k = 0; k < LR_DIM; k++) {
    +          assertFalse("(r, cf, cq) == (" + i + ", " + j + ", " + k + ") must not be found
in scan", foundKvs[i][j][k]);
    +        }
    +      }
    +    }
    +  }
    +
    +  private void loadKvs(boolean[][][] foundKvs, BatchScanner bs) {
    +    try {
    +      for (Map.Entry<Key,Value> kvPair : bs) {
    +        Key k = kvPair.getKey();
    --- End diff --
    
    Could validate the value here.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

Mime
View raw message