incubator-blur-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From amccu...@apache.org
Subject [2/2] Cleaning up a bunch of old or obsolete code.
Date Tue, 11 Dec 2012 02:37:32 GMT
http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/dbb5df6d/src/blur-mapred/src/test/java/org/apache/blur/mapreduce/lib/BlurInputFormatTest.java
----------------------------------------------------------------------
diff --git a/src/blur-mapred/src/test/java/org/apache/blur/mapreduce/lib/BlurInputFormatTest.java b/src/blur-mapred/src/test/java/org/apache/blur/mapreduce/lib/BlurInputFormatTest.java
deleted file mode 100644
index 1d77c36..0000000
--- a/src/blur-mapred/src/test/java/org/apache/blur/mapreduce/lib/BlurInputFormatTest.java
+++ /dev/null
@@ -1,145 +0,0 @@
-package org.apache.blur.mapreduce.lib;
-
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import static org.junit.Assert.assertEquals;
-
-import java.io.IOException;
-import java.util.List;
-import java.util.UUID;
-
-import org.apache.blur.analysis.BlurAnalyzer;
-import org.apache.blur.mapreduce.BlurRecord;
-import org.apache.blur.mapreduce.lib.BlurInputFormat;
-import org.apache.blur.mapreduce.lib.BlurInputSplit;
-import org.apache.blur.store.hdfs.HdfsDirectory;
-import org.apache.blur.thrift.generated.Column;
-import org.apache.blur.thrift.generated.Record;
-import org.apache.blur.thrift.generated.Row;
-import org.apache.blur.utils.BlurConstants;
-import org.apache.blur.utils.BlurUtil;
-import org.apache.blur.utils.RowIndexWriter;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.JobContext;
-import org.apache.hadoop.mapreduce.JobID;
-import org.apache.hadoop.mapreduce.RecordReader;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.hadoop.mapreduce.TaskAttemptID;
-import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
-import org.apache.lucene.analysis.standard.StandardAnalyzer;
-import org.apache.lucene.index.IndexWriter;
-import org.apache.lucene.index.IndexWriterConfig;
-import org.apache.lucene.store.NoLockFactory;
-import org.apache.lucene.util.Version;
-import org.junit.Before;
-import org.junit.Test;
-
-
-public abstract class BlurInputFormatTest {
-
-//  private Path indexPath = new Path(TMPDIR, "./tmp/test-indexes/newapi");
-//  private int numberOfShards = 13;
-//  private int rowsPerIndex = 10;
-//
-//  @Before
-//  public void setup() throws IOException {
-//    buildTestIndexes(indexPath, numberOfShards, rowsPerIndex);
-//  }
-//
-//  public static void buildTestIndexes(Path indexPath, int numberOfShards, int rowsPerIndex) throws IOException {
-//    Configuration configuration = new Configuration();
-//    FileSystem fileSystem = indexPath.getFileSystem(configuration);
-//    fileSystem.delete(indexPath, true);
-//    for (int i = 0; i < numberOfShards; i++) {
-//      String shardName = BlurUtil.getShardName(BlurConstants.SHARD_PREFIX, i);
-//      buildIndex(fileSystem, configuration, new Path(indexPath, shardName), rowsPerIndex);
-//    }
-//  }
-//
-//  public static void buildIndex(FileSystem fileSystem, Configuration configuration, Path path, int rowsPerIndex) throws IOException {
-//    HdfsDirectory directory = new HdfsDirectory(path);
-//    directory.setLockFactory(NoLockFactory.getNoLockFactory());
-//    BlurAnalyzer analyzer = new BlurAnalyzer(new StandardAnalyzer(Version.LUCENE_35));
-//    IndexWriterConfig conf = new IndexWriterConfig(Version.LUCENE_35, analyzer);
-//    IndexWriter indexWriter = new IndexWriter(directory, conf);
-//    RowIndexWriter writer = new RowIndexWriter(indexWriter, analyzer);
-//    for (int i = 0; i < rowsPerIndex; i++) {
-//      writer.add(false, genRow());
-//    }
-//    indexWriter.close();
-//  }
-//
-//  public static Row genRow() {
-//    Row row = new Row();
-//    row.setId(UUID.randomUUID().toString());
-//    for (int i = 0; i < 10; i++) {
-//      row.addToRecords(genRecord());
-//    }
-//    return row;
-//  }
-//
-//  public static Record genRecord() {
-//    Record record = new Record();
-//    record.setRecordId(UUID.randomUUID().toString());
-//    record.setFamily("cf");
-//    record.addToColumns(new Column("name", UUID.randomUUID().toString()));
-//    return record;
-//  }
-//
-//  @Test
-//  public void testGetSplits() throws IOException, InterruptedException {
-//    BlurInputFormat format = new BlurInputFormat();
-//    Configuration conf = new Configuration();
-//    Job job = new Job(conf);
-//    FileInputFormat.addInputPath(job, indexPath);
-//    JobID jobId = new JobID();
-//    JobContext context = new JobContext(job.getConfiguration(), jobId);
-//    List<InputSplit> list = format.getSplits(context);
-//    for (int i = 0; i < list.size(); i++) {
-//      BlurInputSplit split = (BlurInputSplit) list.get(i);
-//      Path path = new Path(indexPath, BlurUtil.getShardName(BlurConstants.SHARD_PREFIX, i));
-//      FileSystem fileSystem = path.getFileSystem(conf);
-//      assertEquals(new BlurInputSplit(fileSystem.makeQualified(path), "_0", 0, Integer.MAX_VALUE), split);
-//    }
-//  }
-//
-//  @Test
-//  public void testCreateRecordReader() throws IOException, InterruptedException {
-//    BlurInputFormat format = new BlurInputFormat();
-//    Configuration conf = new Configuration();
-//    Job job = new Job(conf);
-//    FileInputFormat.addInputPath(job, indexPath);
-//    JobID jobId = new JobID();
-//    JobContext context = new JobContext(job.getConfiguration(), jobId);
-//    List<InputSplit> list = format.getSplits(context);
-//    for (int i = 0; i < list.size(); i++) {
-//      BlurInputSplit split = (BlurInputSplit) list.get(i);
-//      TaskAttemptID taskId = new TaskAttemptID();
-//      TaskAttemptContext taskContext = new TaskAttemptContext(conf, taskId);
-//      RecordReader<Text, BlurRecord> reader = format.createRecordReader(split, taskContext);
-//      while (reader.nextKeyValue()) {
-//        System.out.println(reader.getProgress() + " " + reader.getCurrentKey() + " " + reader.getCurrentValue());
-//      }
-//    }
-//  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/dbb5df6d/src/blur-mapred/src/test/java/org/apache/blur/mapreduce/lib/BlurRecordWriterTest.java
----------------------------------------------------------------------
diff --git a/src/blur-mapred/src/test/java/org/apache/blur/mapreduce/lib/BlurRecordWriterTest.java b/src/blur-mapred/src/test/java/org/apache/blur/mapreduce/lib/BlurRecordWriterTest.java
deleted file mode 100644
index 4cd3e4c..0000000
--- a/src/blur-mapred/src/test/java/org/apache/blur/mapreduce/lib/BlurRecordWriterTest.java
+++ /dev/null
@@ -1,87 +0,0 @@
-package org.apache.blur.mapreduce.lib;
-
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import static org.junit.Assert.*;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.UUID;
-
-import org.apache.blur.mapreduce.BlurRecord;
-import org.apache.blur.mapreduce.lib.BlurRecordWriter;
-import org.apache.blur.store.hdfs.HdfsDirectory;
-import org.apache.blur.utils.BlurConstants;
-import org.apache.blur.utils.BlurUtil;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapreduce.JobID;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.hadoop.mapreduce.TaskAttemptID;
-import org.apache.hadoop.mapreduce.TaskID;
-import org.apache.lucene.index.IndexReader;
-import org.junit.Test;
-
-
-public abstract class BlurRecordWriterTest {
-
-//  @Test
-//  public void testBlurRecordWriter() throws IOException, InterruptedException {
-//    JobID jobId = new JobID();
-//    TaskID tId = new TaskID(jobId, false, 13);
-//    TaskAttemptID taskId = new TaskAttemptID(tId, 0);
-//    Configuration conf = new Configuration();
-//    String pathStr = TMPDIR, "./tmp/output-record-writer-test-newapi";
-//    rm(new File(pathStr));
-//    conf.set("mapred.output.dir", pathStr);
-//    TaskAttemptContext context = new TaskAttemptContext(conf, taskId);
-//    BlurRecordWriter writer = new BlurRecordWriter(context);
-//
-//    Text key = new Text();
-//    BlurRecord value = new BlurRecord();
-//
-//    for (int i = 0; i < 10; i++) {
-//      String rowId = UUID.randomUUID().toString();
-//      key.set(rowId);
-//      value.setFamily("cf");
-//      value.setRowId(rowId);
-//      value.setRecordId(UUID.randomUUID().toString());
-//      value.addColumn("name", "value");
-//      writer.write(key, value);
-//    }
-//
-//    writer.close(context);
-//
-//    // assert index exists and has document
-//
-//    HdfsDirectory dir = new HdfsDirectory(new Path(pathStr, BlurUtil.getShardName(BlurConstants.SHARD_PREFIX, 13)));
-//    assertTrue(IndexReader.indexExists(dir));
-//    IndexReader reader = IndexReader.open(dir);
-//    assertEquals(10, reader.numDocs());
-//  }
-//
-//  private void rm(File file) {
-//    if (file.isDirectory()) {
-//      for (File f : file.listFiles()) {
-//        rm(f);
-//      }
-//    }
-//    file.delete();
-//  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/dbb5df6d/src/blur-mapred/src/test/java/org/apache/blur/mr/BlurInputFormatTest.java
----------------------------------------------------------------------
diff --git a/src/blur-mapred/src/test/java/org/apache/blur/mr/BlurInputFormatTest.java b/src/blur-mapred/src/test/java/org/apache/blur/mr/BlurInputFormatTest.java
deleted file mode 100644
index 8e7d00f..0000000
--- a/src/blur-mapred/src/test/java/org/apache/blur/mr/BlurInputFormatTest.java
+++ /dev/null
@@ -1,55 +0,0 @@
-package org.apache.blur.mr;
-
-import static org.junit.Assert.assertEquals;
-
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.blur.thrift.generated.QuerySession;
-import org.apache.blur.thrift.generated.Session;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.mapreduce.Job;
-import org.junit.Before;
-import org.junit.Test;
-
-
-public class BlurInputFormatTest {
-  
-  private QuerySession querySession;
-  
- 
-  private Job job;
-  
-  @Before
-  public void setup() throws IOException{
-    String queryId = "BB2410XXY";
-    String sessionId = "AA0231XZ";
-    Map<String,String> properties = new HashMap<String,String>();
-    properties.put("SampleKey1", "SampleValue1");
-    properties.put("SampleKey2", "SampleValue2"); 
-    querySession = new QuerySession(new Session(sessionId, properties), queryId);
-    job = BlurInputFormat.configureJob(new Configuration(), querySession);
-  }
-  
-  @Test
-  public void testReadQuerySession(){
-    BlurInputFormat blurInputFormat = new BlurInputFormat();
-    QuerySession querySessionTest = blurInputFormat.getReadQuerySession(job);
-    assertEquals(querySession.getQueryId(),querySessionTest.getQueryId());
-    assertEquals(querySessionTest.getSession().getPropertiesSize(), querySession.getSession().getPropertiesSize());
-  }
-  
-  /*@Test
-  public void testGetSplits() throws BlurException, TException, IOException, InterruptedException{
-    String[] shardServers = {"ShardServer1"};
-    BlurInputFormat blurInputFormat = new BlurInputFormat();
-    controllerServer.setClusterStatus(clusterStatus);
-    Mockito.when(clusterStatus.getShardServerList(Mockito.anyString())).thenReturn(Arrays.asList(shardServers));
-    
-    List<InputSplit> splits = blurInputFormat.getSplits(job);
-    
-    assertTrue(splits.size() > 0);
-    assertEquals(((BlurInputSplit)splits.get(0)).getShardServerName(), "ShardServer1");
-  }*/
-}

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/dbb5df6d/src/blur-mapred/src/test/java/org/apache/blur/mr/BlurInputSplitTest.java
----------------------------------------------------------------------
diff --git a/src/blur-mapred/src/test/java/org/apache/blur/mr/BlurInputSplitTest.java b/src/blur-mapred/src/test/java/org/apache/blur/mr/BlurInputSplitTest.java
deleted file mode 100644
index e38a702..0000000
--- a/src/blur-mapred/src/test/java/org/apache/blur/mr/BlurInputSplitTest.java
+++ /dev/null
@@ -1,46 +0,0 @@
-package org.apache.blur.mr;
-
-import static org.junit.Assert.assertEquals;
-
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.blur.thrift.generated.QuerySession;
-import org.apache.blur.thrift.generated.Session;
-import org.apache.hadoop.io.DataInputBuffer;
-import org.apache.hadoop.io.DataOutputBuffer;
-import org.junit.Before;
-import org.junit.Test;
-
-public class BlurInputSplitTest {
-
-  private QuerySession querySession;
-  
-  private BlurInputSplit blurInputSplitBefore;
-  @Before
-  public void setup(){
-    String queryId = "BB2410XXY";
-    String sessionId = "AA0231XZ";
-    Map<String,String> properties = new HashMap<String,String>();
-    properties.put("SampleKey1", "SampleValue1");
-    properties.put("SampleKey2", "SampleValue2"); 
-    querySession = new QuerySession(new Session(sessionId, properties), queryId);
-    blurInputSplitBefore = new BlurInputSplit("ShardServer1",new BlurQuerySession(querySession), 0, Integer.MAX_VALUE);
-  }
- 
-  @Test
-  public void testBlurInputSplitWritable() throws IOException{
-    DataOutputBuffer dob = new DataOutputBuffer();
-    blurInputSplitBefore.write(dob);
-
-    DataInputBuffer dib = new DataInputBuffer();
-    dib.reset(dob.getData(), dob.getLength());
-    
-    BlurInputSplit blurInputSplitAfter = new BlurInputSplit();
-    blurInputSplitAfter.readFields(dib);
-    
-    assertEquals(blurInputSplitBefore.getLength(), blurInputSplitAfter.getLength());
-    assertEquals(blurInputSplitBefore.getShardServerName(), blurInputSplitAfter.getShardServerName());
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/dbb5df6d/src/blur-mapred/src/test/java/org/apache/blur/mr/BlurQuerySessionTest.java
----------------------------------------------------------------------
diff --git a/src/blur-mapred/src/test/java/org/apache/blur/mr/BlurQuerySessionTest.java b/src/blur-mapred/src/test/java/org/apache/blur/mr/BlurQuerySessionTest.java
deleted file mode 100644
index fbd0364..0000000
--- a/src/blur-mapred/src/test/java/org/apache/blur/mr/BlurQuerySessionTest.java
+++ /dev/null
@@ -1,45 +0,0 @@
-package org.apache.blur.mr;
-
-
-import static org.junit.Assert.assertEquals;
-
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.blur.thrift.generated.QuerySession;
-import org.apache.blur.thrift.generated.Session;
-import org.apache.hadoop.io.DataInputBuffer;
-import org.apache.hadoop.io.DataOutputBuffer;
-import org.junit.Test;
-
-public class BlurQuerySessionTest {
-  
-  private BlurQuerySession blurQuerySessionBefore;
-  
-  private void createBlurQuerySessionBeforeObject(){
-    String queryId = "BB2410XXY";
-    String sessionId = "AA0231XZ";
-    Map<String,String> properties = new HashMap<String,String>();
-    properties.put("SampleKey1", "SampleValue1");
-    properties.put("SampleKey2", "SampleValue2"); 
-    blurQuerySessionBefore = new BlurQuerySession(new QuerySession(new Session(sessionId, properties), queryId));
-  }
-  
-  @Test
-  public void testBlurSessionWritable() throws IOException{
-    createBlurQuerySessionBeforeObject();
-    
-    DataOutputBuffer dob = new DataOutputBuffer();
-    blurQuerySessionBefore.write(dob);
-
-    DataInputBuffer dib = new DataInputBuffer();
-    dib.reset(dob.getData(), dob.getLength());
-    
-    BlurQuerySession blurQuerySessionAfter = new BlurQuerySession();
-    blurQuerySessionAfter.readFields(dib);
-    
-    assertEquals(blurQuerySessionBefore.getQueryId(), blurQuerySessionAfter.getQueryId());
-    assertEquals(blurQuerySessionBefore.getBlurSession().getSessionId(), blurQuerySessionAfter.getBlurSession().getSessionId());
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/dbb5df6d/src/blur-mapred/src/test/java/org/apache/blur/mr/BlurSessionTest.java
----------------------------------------------------------------------
diff --git a/src/blur-mapred/src/test/java/org/apache/blur/mr/BlurSessionTest.java b/src/blur-mapred/src/test/java/org/apache/blur/mr/BlurSessionTest.java
deleted file mode 100644
index 577a9ea..0000000
--- a/src/blur-mapred/src/test/java/org/apache/blur/mr/BlurSessionTest.java
+++ /dev/null
@@ -1,61 +0,0 @@
-package org.apache.blur.mr;
-
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.blur.thrift.generated.Session;
-import org.apache.hadoop.io.DataInputBuffer;
-import org.apache.hadoop.io.DataOutputBuffer;
-import org.junit.Before;
-import org.junit.Test;
-
-public class BlurSessionTest {
-  
-  private BlurSession blurSessionBefore;
-  private BlurSession blurSession;
-  
-  @Before
-  public void createBlurSessionBeforeObject(){
-    String sessionId = "AA0231XZ";
-    Map<String,String> properties = new HashMap<String,String>();
-    properties.put("SampleKey1", "SampleValue1");
-    properties.put("SampleKey2", "SampleValue2"); 
-    blurSession = new BlurSession(new Session(sessionId, properties));
-
-    Map<String,String> properties1 = new HashMap<String,String>();
-    properties.put("SampleKey3", "SampleValue3");
-    properties.put("SampleKey4", "SampleValue4"); 
-    
-    blurSessionBefore = new BlurSession(new Session(sessionId, properties1));
-  }
-  
-  @Test
-  public void testBlurSessionWritable() throws IOException{
-    
-    DataOutputBuffer dob = new DataOutputBuffer();
-    blurSessionBefore.write(dob);
-
-    DataInputBuffer dib = new DataInputBuffer();
-    dib.reset(dob.getData(), dob.getLength());
-    
-    BlurSession blurSessionAfter = new BlurSession();
-    blurSessionAfter.readFields(dib);
-    
-    assertEquals(blurSessionBefore.getSessionId(),blurSessionAfter.getSessionId());
-    assertEquals(blurSessionBefore.getProperties().size(), blurSessionAfter.getProperties().size());
-    assertEquals(blurSessionBefore.getProperties().get("SampleKey1"), blurSessionAfter.getProperties().get("SampleKey1"));
-  }
-  
-  @Test
-  public void testEquals(){
-    assertTrue(blurSession.equals(blurSession));
-    assertFalse(blurSession.equals(blurSessionBefore));
-    
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/dbb5df6d/src/blur-new-api-prototype/pom.xml
----------------------------------------------------------------------
diff --git a/src/blur-new-api-prototype/pom.xml b/src/blur-new-api-prototype/pom.xml
deleted file mode 100644
index 9b18606..0000000
--- a/src/blur-new-api-prototype/pom.xml
+++ /dev/null
@@ -1,160 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" ?>
-<!-- Licensed to the Apache Software Foundation (ASF) under one or more contributor 
-	license agreements. See the NOTICE file distributed with this work for additional 
-	information regarding copyright ownership. The ASF licenses this file to 
-	you under the Apache License, Version 2.0 (the "License"); you may not use 
-	this file except in compliance with the License. You may obtain a copy of 
-	the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required 
-	by applicable law or agreed to in writing, software distributed under the 
-	License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS 
-	OF ANY KIND, either express or implied. See the License for the specific 
-	language governing permissions and limitations under the License. -->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-	<modelVersion>4.0.0</modelVersion>
-	<groupId>org.apache.blur</groupId>
-	<artifactId>blur-new-api-prototype</artifactId>
-	<packaging>jar</packaging>
-	<name>Blur New API Prototype</name>
-	<version>0.2.0-SNAPSHOT</version>
-
-	<dependencies>
-		<dependency>
-			<groupId>com.googlecode.concurrentlinkedhashmap</groupId>
-			<artifactId>concurrentlinkedhashmap-lru</artifactId>
-			<version>1.3.1</version>
-		</dependency>
-		<dependency>
-			<groupId>org.apache.lucene</groupId>
-			<artifactId>lucene-core</artifactId>
-			<version>4.0.0</version>
-		</dependency>
-		<dependency>
-			<groupId>org.apache.lucene</groupId>
-			<artifactId>lucene-codecs</artifactId>
-			<version>4.0.0</version>
-		</dependency>
-		<dependency>
-			<groupId>org.apache.lucene</groupId>
-			<artifactId>lucene-analyzers-common</artifactId>
-			<version>4.0.0</version>
-		</dependency>
-		<dependency>
-			<groupId>org.apache.lucene</groupId>
-			<artifactId>lucene-queryparser</artifactId>
-			<version>4.0.0</version>
-		</dependency>
-		<dependency>
-			<groupId>org.apache.hadoop</groupId>
-			<artifactId>hadoop-core</artifactId>
-			<version>1.0.3</version>
-		</dependency>
-		<dependency>
-			<groupId>org.apache.thrift</groupId>
-			<artifactId>libthrift</artifactId>
-			<version>0.7.0</version>
-		</dependency>
-		<dependency>
-			<groupId>junit</groupId>
-			<artifactId>junit</artifactId>
-			<version>4.7</version>
-			<scope>test</scope>
-		</dependency>
-		<dependency>
-			<groupId>org.slf4j</groupId>
-			<artifactId>slf4j-api</artifactId>
-			<version>1.6.1</version>
-			<scope>compile</scope>
-		</dependency>
-		<dependency>
-			<groupId>org.slf4j</groupId>
-			<artifactId>slf4j-log4j12</artifactId>
-			<version>1.6.1</version>
-			<scope>provided</scope>
-		</dependency>
-		<dependency>
-			<groupId>log4j</groupId>
-			<artifactId>log4j</artifactId>
-			<version>1.2.15</version>
-			<scope>provided</scope>
-			<exclusions>
-				<exclusion>
-					<groupId>javax.mail</groupId>
-					<artifactId>mail</artifactId>
-				</exclusion>
-				<exclusion>
-					<groupId>javax.jms</groupId>
-					<artifactId>jms</artifactId>
-				</exclusion>
-				<exclusion>
-					<groupId>com.sun.jdmk</groupId>
-					<artifactId>jmxtools</artifactId>
-				</exclusion>
-				<exclusion>
-					<groupId>com.sun.jmx</groupId>
-					<artifactId>jmxri</artifactId>
-				</exclusion>
-			</exclusions>
-		</dependency>
-	</dependencies>
-
-	<build>
-		<pluginManagement>
-			<plugins>
-
-				<plugin>
-					<groupId>org.apache.maven.plugins</groupId>
-					<artifactId>maven-compiler-plugin</artifactId>
-					<configuration>
-						<source>1.6</source>
-						<target>1.6</target>
-					</configuration>
-				</plugin>
-				<plugin>
-					<groupId>org.codehaus.mojo</groupId>
-					<artifactId>exec-maven-plugin</artifactId>
-				</plugin>
-				<plugin>
-					<groupId>org.apache.maven.plugins</groupId>
-					<artifactId>maven-dependency-plugin</artifactId>
-					<executions>
-						<execution>
-							<id>copy-dependencies</id>
-							<phase>package</phase>
-							<goals>
-								<goal>copy-dependencies</goal>
-							</goals>
-							<configuration>
-								<outputDirectory>${project.build.directory}/../../../lib
-								</outputDirectory>
-								<overWriteReleases>false</overWriteReleases>
-								<overWriteSnapshots>false</overWriteSnapshots>
-								<overWriteIfNewer>true</overWriteIfNewer>
-								<excludeTransitive>true</excludeTransitive>
-								<excludeArtifactIds>junit,commons-cli,commons-logging,hadoop-core,slf4j-api,slf4j-log4j12</excludeArtifactIds>
-							</configuration>
-						</execution>
-					</executions>
-				</plugin>
-				<plugin>
-					<artifactId>maven-assembly-plugin</artifactId>
-					<configuration>
-						<descriptorRefs>
-							<descriptorRef>jar-with-dependencies</descriptorRef>
-						</descriptorRefs>
-					</configuration>
-
-					<executions>
-						<execution>
-							<id>make-assembly</id>
-							<phase>package</phase>
-							<goals>
-								<goal>attached</goal>
-							</goals>
-						</execution>
-					</executions>
-				</plugin>
-			</plugins>
-		</pluginManagement>
-	</build>
-</project>

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/dbb5df6d/src/blur-new-api-prototype/src/main/java/org/apache/blur/parser/BqlException.java
----------------------------------------------------------------------
diff --git a/src/blur-new-api-prototype/src/main/java/org/apache/blur/parser/BqlException.java b/src/blur-new-api-prototype/src/main/java/org/apache/blur/parser/BqlException.java
deleted file mode 100644
index 9090dcf..0000000
--- a/src/blur-new-api-prototype/src/main/java/org/apache/blur/parser/BqlException.java
+++ /dev/null
@@ -1,32 +0,0 @@
-package org.apache.blur.parser;
-
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-public class BqlException extends Exception {
-
-  private static final long serialVersionUID = 5871985403028318296L;
-
-  public BqlException(String message) {
-    super(message);
-  }
-
-  public BqlException(Exception e) {
-    super(e);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/dbb5df6d/src/blur-new-api-prototype/src/main/java/org/apache/blur/parser/BqlParser.java
----------------------------------------------------------------------
diff --git a/src/blur-new-api-prototype/src/main/java/org/apache/blur/parser/BqlParser.java b/src/blur-new-api-prototype/src/main/java/org/apache/blur/parser/BqlParser.java
deleted file mode 100644
index 3c9ab92..0000000
--- a/src/blur-new-api-prototype/src/main/java/org/apache/blur/parser/BqlParser.java
+++ /dev/null
@@ -1,259 +0,0 @@
-package org.apache.blur.parser;
-
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.HashSet;
-import java.util.List;
-import java.util.StringTokenizer;
-
-import org.apache.lucene.analysis.Analyzer;
-import org.apache.lucene.queryparser.classic.ParseException;
-import org.apache.lucene.queryparser.classic.QueryParser;
-import org.apache.lucene.search.Query;
-import org.apache.lucene.util.Version;
-
-public class BqlParser {
-
-  private static final String LIMIT = "limit";
-  private static final String BY = "by";
-  private static final String ORDER = "order";
-  private static final String WHERE = "where";
-  private static final String FROM = "from";
-  private static final String SELECT = "select";
-  private static final String AS = "as";
-  private static final String ASC = "asc";
-  private static final String DESC = "desc";
-  private static final Collection<String> STOP_WORDS = new HashSet<String>(Arrays.asList(ORDER, WHERE, FROM, SELECT, LIMIT));
-
-  public enum QueryType {
-    SELECT
-  }
-
-  public enum OrderBy {
-    ASC, DESC
-  }
-
-  private List<String> attributeTokens = new ArrayList<String>();
-  private List<String> attributeTokenAliases = new ArrayList<String>();
-  private QueryType type;
-  private String tableName;
-  private String tableNameAlias;
-  private Analyzer analyzer;
-  private Query luceneQuery;
-  private List<String> orderByAttributes = new ArrayList<String>();
-  private List<OrderBy> orderByAttributesDirection = new ArrayList<OrderBy>();
-  private long limit = Long.MAX_VALUE;
-
-  public long getLimit() {
-    return limit;
-  }
-
-  public BqlParser(Analyzer analyzer) {
-    this.analyzer = analyzer;
-  }
-
-  public void parse(String queryString) throws BqlException {
-    int end = queryString.indexOf(';');
-    if (end < 0) {
-      end = queryString.length();
-    }
-    String lowerCase = queryString.substring(0, end).toLowerCase();
-    if (parseSelect(lowerCase) && parseFrom(lowerCase) && parseWhere(lowerCase) && parseOrderBy(lowerCase) && parserLimit(lowerCase)) {
-      type = QueryType.SELECT;
-      return;
-    }
-    throw new RuntimeException("Not supported [" + queryString + "]");
-  }
-
-  private boolean parserLimit(String query) throws BqlException {
-    StringTokenizer tokenizer = new StringTokenizer(query, " ,");
-    boolean limitTokenFound = false;
-    while (tokenizer.hasMoreTokens()) {
-      String token = tokenizer.nextToken();
-      if (!limitTokenFound && token.equals(LIMIT)) {
-        limitTokenFound = true;
-      } else if (limitTokenFound) {
-        if (STOP_WORDS.contains(token)) {
-          return true;
-        }
-        limit = Long.parseLong(token);
-        return true;
-      }
-    }
-    return true;
-  }
-
-  private boolean parseOrderBy(String query) throws BqlException {
-    StringTokenizer tokenizer = new StringTokenizer(query, " ,");
-    boolean orderByTokenFound = false;
-    while (tokenizer.hasMoreTokens()) {
-      String token = tokenizer.nextToken();
-      if (!orderByTokenFound && token.equals(ORDER)) {
-        orderByTokenFound = true;
-      } else if (orderByTokenFound) {
-        if (STOP_WORDS.contains(token)) {
-          return true;
-        }
-        if (token.equals(BY)) {
-          continue;
-        } else if (token.equals(ASC)) {
-          orderByAttributesDirection.add(OrderBy.ASC);
-        } else if (token.equals(DESC)) {
-          orderByAttributesDirection.add(OrderBy.DESC);
-        } else {
-          orderByAttributes.add(token);
-          addDefaultAsc();
-        }
-      }
-    }
-    return true;
-  }
-
-  private void addDefaultAsc() {
-    if (orderByAttributesDirection.size() < orderByAttributes.size()) {
-      orderByAttributesDirection.add(OrderBy.ASC);
-    }
-  }
-
-  private boolean parseWhere(String query) throws BqlException {
-    int index = query.indexOf(WHERE);
-    if (index < 0) {
-      return true;
-    }
-    int fromIndex = notToMaxInt(query.indexOf(FROM, index));
-    int selectIndex = notToMaxInt(query.indexOf(SELECT, index));
-    int orderIndex = notToMaxInt(query.indexOf(ORDER));
-    int endingIndex = Math.min(fromIndex, Math.min(selectIndex, orderIndex));
-    if (endingIndex < 0 || endingIndex == Integer.MAX_VALUE) {
-      luceneQuery = luceneParse(query.substring(index + WHERE.length()));
-    } else {
-      luceneQuery = luceneParse(query.substring(index + WHERE.length(), endingIndex));
-    }
-    return true;
-  }
-
-  private int notToMaxInt(int i) {
-    if (i < 0) {
-      return Integer.MAX_VALUE;
-    }
-    return i;
-  }
-
-  private Query luceneParse(String luceneQuery) throws BqlException {
-    QueryParser parser = new QueryParser(Version.LUCENE_40, "default", analyzer);
-    try {
-      return parser.parse(luceneQuery);
-    } catch (ParseException e) {
-      throw new BqlException(e);
-    }
-  }
-
-  private boolean parseFrom(String query) throws BqlException {
-    StringTokenizer tokenizer = new StringTokenizer(query, " ,");
-    boolean fromTokenFound = false;
-    while (tokenizer.hasMoreTokens()) {
-      String token = tokenizer.nextToken();
-      if (!fromTokenFound && token.equals(FROM)) {
-        fromTokenFound = true;
-      } else if (fromTokenFound) {
-        if (STOP_WORDS.contains(token)) {
-          return true;
-        }
-        if (tableName == null) {
-          tableName = token;
-        } else if (token.equals(AS)) {
-          tableNameAlias = tokenizer.nextToken();
-        } else {
-          throw new BqlException("Table [" + tableName + "] already set, only a single table is allowed.");
-        }
-
-      }
-    }
-    return fromTokenFound;
-  }
-
-  private boolean parseSelect(String query) {
-    StringTokenizer tokenizer = new StringTokenizer(query, " ,");
-    boolean selectTokenFound = false;
-    while (tokenizer.hasMoreTokens()) {
-      String token = tokenizer.nextToken();
-      if (!selectTokenFound && token.equals(SELECT)) {
-        selectTokenFound = true;
-      } else if (selectTokenFound) {
-        if (STOP_WORDS.contains(token)) {
-          addNullAlias();
-          return true;
-        }
-        if (token.equals(AS)) {
-          attributeTokenAliases.add(tokenizer.nextToken());
-        } else {
-          addNullAlias();
-          attributeTokens.add(token);
-        }
-      }
-    }
-    addNullAlias();
-    return selectTokenFound;
-  }
-
-  private void addNullAlias() {
-    if (attributeTokenAliases.size() != attributeTokens.size()) {
-      attributeTokenAliases.add(null);
-    }
-  }
-
-  public QueryType getQueryType() {
-    return type;
-  }
-
-  public String getTableName() {
-    return tableName;
-  }
-
-  public String getTableNameAlias() {
-    return tableNameAlias;
-  }
-
-  public List<String> getAttributeTokens() {
-    return attributeTokens;
-  }
-
-  public List<String> getAttributeTokenAliases() {
-    return attributeTokenAliases;
-  }
-
-  public Query getLuceneQuery() {
-    return luceneQuery;
-  }
-
-  public Analyzer getAnalyzer() {
-    return analyzer;
-  }
-
-  public List<String> getOrderByAttributes() {
-    return orderByAttributes;
-  }
-
-  public List<OrderBy> getOrderByAttributesDirection() {
-    return orderByAttributesDirection;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/dbb5df6d/src/blur-new-api-prototype/src/test/java/org/apache/blur/parser/BqlParserTest.java
----------------------------------------------------------------------
diff --git a/src/blur-new-api-prototype/src/test/java/org/apache/blur/parser/BqlParserTest.java b/src/blur-new-api-prototype/src/test/java/org/apache/blur/parser/BqlParserTest.java
deleted file mode 100644
index febed3a..0000000
--- a/src/blur-new-api-prototype/src/test/java/org/apache/blur/parser/BqlParserTest.java
+++ /dev/null
@@ -1,194 +0,0 @@
-package org.apache.blur.parser;
-
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import static org.junit.Assert.*;
-
-import java.util.Arrays;
-
-import org.apache.blur.parser.BqlParser.OrderBy;
-import org.apache.blur.parser.BqlParser.QueryType;
-import org.apache.lucene.analysis.Analyzer;
-import org.apache.lucene.analysis.standard.StandardAnalyzer;
-import org.apache.lucene.queryparser.classic.ParseException;
-import org.apache.lucene.queryparser.classic.QueryParser;
-import org.apache.lucene.search.Query;
-import org.apache.lucene.util.Version;
-import org.junit.Test;
-
-public class BqlParserTest {
-
-  @Test
-  public void testBqlSimpleSelect() throws BqlException {
-    BqlParser parser = new BqlParser(new StandardAnalyzer(Version.LUCENE_40));
-    parser.parse("select * from tablename");
-    assertEquals(QueryType.SELECT, parser.getQueryType());
-    assertEquals(Arrays.asList("*"), parser.getAttributeTokens());
-    assertEquals(Arrays.asList((String) null), parser.getAttributeTokenAliases());
-    assertEquals("tablename", parser.getTableName());
-    assertNull(parser.getTableNameAlias());
-    assertNull(parser.getLuceneQuery());
-    assertTrue(parser.getOrderByAttributes().isEmpty());
-    assertTrue(parser.getOrderByAttributesDirection().isEmpty());
-  }
-
-  @Test
-  public void testBqlSimpleSelectWithTableAlias() throws BqlException {
-    BqlParser parser = new BqlParser(new StandardAnalyzer(Version.LUCENE_40));
-    parser.parse("select * from tablename as t1");
-    assertEquals(QueryType.SELECT, parser.getQueryType());
-    assertEquals(Arrays.asList("*"), parser.getAttributeTokens());
-    assertEquals(Arrays.asList((String) null), parser.getAttributeTokenAliases());
-    assertEquals("tablename", parser.getTableName());
-    assertEquals("t1", parser.getTableNameAlias());
-    assertNull(parser.getLuceneQuery());
-    assertTrue(parser.getOrderByAttributes().isEmpty());
-    assertTrue(parser.getOrderByAttributesDirection().isEmpty());
-  }
-
-  @Test
-  public void testBqlSelectSingleAttributeWithTableAlias() throws BqlException {
-    BqlParser parser = new BqlParser(new StandardAnalyzer(Version.LUCENE_40));
-    parser.parse("select col1 from tablename as t1");
-    assertEquals(QueryType.SELECT, parser.getQueryType());
-    assertEquals(Arrays.asList("col1"), parser.getAttributeTokens());
-    assertEquals(Arrays.asList((String) null), parser.getAttributeTokenAliases());
-    assertEquals("tablename", parser.getTableName());
-    assertEquals("t1", parser.getTableNameAlias());
-    assertNull(parser.getLuceneQuery());
-    assertTrue(parser.getOrderByAttributes().isEmpty());
-    assertTrue(parser.getOrderByAttributesDirection().isEmpty());
-  }
-
-  @Test
-  public void testBqlTwoAttributeWithTableAlias() throws BqlException {
-    BqlParser parser = new BqlParser(new StandardAnalyzer(Version.LUCENE_40));
-    parser.parse("select col1,col2 from tablename as t1");
-    assertEquals(QueryType.SELECT, parser.getQueryType());
-    assertEquals(Arrays.asList("col1", "col2"), parser.getAttributeTokens());
-    assertEquals(Arrays.asList(null, null), parser.getAttributeTokenAliases());
-    assertEquals("tablename", parser.getTableName());
-    assertEquals("t1", parser.getTableNameAlias());
-    assertNull(parser.getLuceneQuery());
-    assertTrue(parser.getOrderByAttributes().isEmpty());
-    assertTrue(parser.getOrderByAttributesDirection().isEmpty());
-  }
-
-  @Test
-  public void testBqlTwoAttributeWithSpaceWithTableAlias() throws BqlException {
-    BqlParser parser = new BqlParser(new StandardAnalyzer(Version.LUCENE_40));
-    parser.parse("select col1, col2 from tablename as t1");
-    assertEquals(QueryType.SELECT, parser.getQueryType());
-    assertEquals(Arrays.asList("col1", "col2"), parser.getAttributeTokens());
-    assertEquals(Arrays.asList(null, null), parser.getAttributeTokenAliases());
-    assertEquals("tablename", parser.getTableName());
-    assertEquals("t1", parser.getTableNameAlias());
-    assertNull(parser.getLuceneQuery());
-    assertTrue(parser.getOrderByAttributes().isEmpty());
-    assertTrue(parser.getOrderByAttributesDirection().isEmpty());
-  }
-
-  @Test
-  public void testBqlTwoAttributeWithSpaceAndAliasWithTableAlias() throws BqlException {
-    BqlParser parser = new BqlParser(new StandardAnalyzer(Version.LUCENE_40));
-    parser.parse("select col1, col2 as c2 from tablename as t1");
-    assertEquals(QueryType.SELECT, parser.getQueryType());
-    assertEquals(Arrays.asList("col1", "col2"), parser.getAttributeTokens());
-    assertEquals(Arrays.asList(null, "c2"), parser.getAttributeTokenAliases());
-    assertEquals("tablename", parser.getTableName());
-    assertEquals("t1", parser.getTableNameAlias());
-    assertNull(parser.getLuceneQuery());
-    assertTrue(parser.getOrderByAttributes().isEmpty());
-    assertTrue(parser.getOrderByAttributesDirection().isEmpty());
-  }
-
-  @Test
-  public void testBqlTwoAttributeWithSpaceAndAliasWithTableAliasWithWhere() throws BqlException, ParseException {
-    BqlParser parser = new BqlParser(new StandardAnalyzer(Version.LUCENE_40));
-    parser.parse("select col1, col2 as c2 from tablename as t1 where col2:val1 col3:val2");
-    assertEquals(QueryType.SELECT, parser.getQueryType());
-    assertEquals(Arrays.asList("col1", "col2"), parser.getAttributeTokens());
-    assertEquals(Arrays.asList(null, "c2"), parser.getAttributeTokenAliases());
-    assertEquals("tablename", parser.getTableName());
-    assertEquals("t1", parser.getTableNameAlias());
-    assertEquals(luceneParse(parser.getAnalyzer(), "col2:val1 col3:val2"), parser.getLuceneQuery());
-    assertTrue(parser.getOrderByAttributes().isEmpty());
-    assertTrue(parser.getOrderByAttributesDirection().isEmpty());
-  }
-
-  @Test
-  public void testBqlTwoAttributeWithSpaceAndAliasWithTableAliasWithWhereWithOrderBy() throws BqlException, ParseException {
-    BqlParser parser = new BqlParser(new StandardAnalyzer(Version.LUCENE_40));
-    parser.parse("select col1, col2 as c2 from tablename as t1 where col2:val1 col3:val2 order by col1");
-    assertEquals(QueryType.SELECT, parser.getQueryType());
-    assertEquals(Arrays.asList("col1", "col2"), parser.getAttributeTokens());
-    assertEquals(Arrays.asList(null, "c2"), parser.getAttributeTokenAliases());
-    assertEquals("tablename", parser.getTableName());
-    assertEquals("t1", parser.getTableNameAlias());
-    assertEquals(luceneParse(parser.getAnalyzer(), "col2:val1 col3:val2"), parser.getLuceneQuery());
-    assertEquals(Arrays.asList("col1"), parser.getOrderByAttributes());
-    assertEquals(Arrays.asList(OrderBy.ASC), parser.getOrderByAttributesDirection());
-  }
-
-  @Test
-  public void testBqlTwoAttributeWithSpaceAndAliasWithTableAliasWithWhereWithOrderByAsc() throws BqlException, ParseException {
-    BqlParser parser = new BqlParser(new StandardAnalyzer(Version.LUCENE_40));
-    parser.parse("select col1, col2 as c2 from tablename as t1 where col2:val1 col3:val2 order by asc col1");
-    assertEquals(QueryType.SELECT, parser.getQueryType());
-    assertEquals(Arrays.asList("col1", "col2"), parser.getAttributeTokens());
-    assertEquals(Arrays.asList(null, "c2"), parser.getAttributeTokenAliases());
-    assertEquals("tablename", parser.getTableName());
-    assertEquals("t1", parser.getTableNameAlias());
-    assertEquals(luceneParse(parser.getAnalyzer(), "col2:val1 col3:val2"), parser.getLuceneQuery());
-    assertEquals(Arrays.asList("col1"), parser.getOrderByAttributes());
-    assertEquals(Arrays.asList(OrderBy.ASC), parser.getOrderByAttributesDirection());
-  }
-
-  @Test
-  public void testBqlTwoAttributeWithSpaceAndAliasWithTableAliasWithWhereWithOrderByDesc() throws BqlException, ParseException {
-    BqlParser parser = new BqlParser(new StandardAnalyzer(Version.LUCENE_40));
-    parser.parse("select col1, col2 as c2 from tablename as t1 where col2:val1 col3:val2 order by desc col1");
-    assertEquals(QueryType.SELECT, parser.getQueryType());
-    assertEquals(Arrays.asList("col1", "col2"), parser.getAttributeTokens());
-    assertEquals(Arrays.asList(null, "c2"), parser.getAttributeTokenAliases());
-    assertEquals("tablename", parser.getTableName());
-    assertEquals("t1", parser.getTableNameAlias());
-    assertEquals(luceneParse(parser.getAnalyzer(), "col2:val1 col3:val2"), parser.getLuceneQuery());
-    assertEquals(Arrays.asList("col1"), parser.getOrderByAttributes());
-    assertEquals(Arrays.asList(OrderBy.DESC), parser.getOrderByAttributesDirection());
-  }
-
-  @Test
-  public void testBqlTwoAttributeWithSpaceAndAliasWithTableAliasWithWhereWithOrderByMulti() throws BqlException, ParseException {
-    BqlParser parser = new BqlParser(new StandardAnalyzer(Version.LUCENE_40));
-    parser.parse("select col1, col2 as c2 from tablename as t1 where col2:val1 col3:val2 order by col1, desc col2");
-    assertEquals(QueryType.SELECT, parser.getQueryType());
-    assertEquals(Arrays.asList("col1", "col2"), parser.getAttributeTokens());
-    assertEquals(Arrays.asList(null, "c2"), parser.getAttributeTokenAliases());
-    assertEquals("tablename", parser.getTableName());
-    assertEquals("t1", parser.getTableNameAlias());
-    assertEquals(luceneParse(parser.getAnalyzer(), "col2:val1 col3:val2"), parser.getLuceneQuery());
-    assertEquals(Arrays.asList("col1", "col2"), parser.getOrderByAttributes());
-    assertEquals(Arrays.asList(OrderBy.ASC, OrderBy.DESC), parser.getOrderByAttributesDirection());
-  }
-  
-  private Query luceneParse(Analyzer analyzer, String luceneQuery) throws ParseException {
-    QueryParser parser = new QueryParser(Version.LUCENE_40, "default", analyzer);
-    return parser.parse(luceneQuery);
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/dbb5df6d/src/blur-testsuite/src/main/java/org/apache/blur/testsuite/AddDocumentsLuceneApiTable.java
----------------------------------------------------------------------
diff --git a/src/blur-testsuite/src/main/java/org/apache/blur/testsuite/AddDocumentsLuceneApiTable.java b/src/blur-testsuite/src/main/java/org/apache/blur/testsuite/AddDocumentsLuceneApiTable.java
deleted file mode 100644
index ae9c71b..0000000
--- a/src/blur-testsuite/src/main/java/org/apache/blur/testsuite/AddDocumentsLuceneApiTable.java
+++ /dev/null
@@ -1,85 +0,0 @@
-package org.apache.blur.testsuite;
-
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import java.io.IOException;
-import java.net.InetSocketAddress;
-import java.net.Socket;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.UUID;
-
-import org.apache.blur.thrift.generated.BlurException;
-import org.apache.blur.thrift.generated.Document;
-import org.apache.blur.thrift.generated.Field;
-import org.apache.blur.thrift.generated.Blur.Client;
-import org.apache.blur.thrift.generated.MutateOptions;
-import org.apache.blur.thrift.generated.TYPE;
-import org.apache.lucene.queryparser.classic.ParseException;
-import org.apache.thrift.TException;
-import org.apache.thrift.protocol.TBinaryProtocol;
-import org.apache.thrift.protocol.TProtocol;
-import org.apache.thrift.transport.TFramedTransport;
-import org.apache.thrift.transport.TSocket;
-
-public class AddDocumentsLuceneApiTable {
-
-  public static void main(String[] args) throws BlurException, TException, IOException, ParseException {
-    TSocket trans;
-    Socket socket = new Socket();
-    socket.setTcpNoDelay(true);
-    socket.connect(new InetSocketAddress("127.0.0.1", 40020));
-    trans = new TSocket(socket);
-
-    TProtocol proto = new TBinaryProtocol(new TFramedTransport(trans));
-    Client client = new Client(proto);
-    int batch = 100;
-    List<Document> docs = new ArrayList<Document>();
-    long start = System.nanoTime();
-    int total = 0;
-    MutateOptions options = new MutateOptions();
-    options.setTable("test_table");
-    options.setShardIndex(0);
-    while (true) {
-      long now = System.nanoTime();
-      if (start + 5000000000l < now) {
-        System.out.println(total);
-        start = System.nanoTime();
-      }
-      Document doc = genDoc();
-      docs.add(doc);
-      if (docs.size() >= batch) {
-        client.addDocuments(options, docs);
-        docs.clear();
-        break;
-      }
-      total++;
-      
-    }
-
-  }
-
-  private static Document genDoc() {
-    Document document = new Document();
-    Field field = new Field();
-    field.setName("test");
-    field.setType(TYPE.STRING);
-    field.setValue(new String(UUID.randomUUID().toString()).getBytes());
-    document.addToFields(field);
-    return document;
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/dbb5df6d/src/blur-testsuite/src/main/java/org/apache/blur/testsuite/SearchLuceneApiTable.java
----------------------------------------------------------------------
diff --git a/src/blur-testsuite/src/main/java/org/apache/blur/testsuite/SearchLuceneApiTable.java b/src/blur-testsuite/src/main/java/org/apache/blur/testsuite/SearchLuceneApiTable.java
deleted file mode 100644
index 980394e..0000000
--- a/src/blur-testsuite/src/main/java/org/apache/blur/testsuite/SearchLuceneApiTable.java
+++ /dev/null
@@ -1,105 +0,0 @@
-package org.apache.blur.testsuite;
-
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import java.io.IOException;
-import java.net.InetSocketAddress;
-import java.net.Socket;
-import java.util.List;
-
-import org.apache.blur.lucene.serializer.QueryWritable;
-import org.apache.blur.thrift.generated.Blur.Client;
-import org.apache.blur.thrift.generated.BlurException;
-import org.apache.blur.thrift.generated.Document;
-import org.apache.blur.thrift.generated.QueryArgs;
-import org.apache.blur.thrift.generated.Session;
-import org.apache.blur.thrift.generated.TopFieldDocs;
-import org.apache.blur.utils.ThriftLuceneConversion;
-import org.apache.hadoop.io.DataOutputBuffer;
-import org.apache.lucene.analysis.standard.StandardAnalyzer;
-import org.apache.lucene.queryparser.classic.ParseException;
-import org.apache.lucene.queryparser.classic.QueryParser;
-import org.apache.lucene.search.Query;
-import org.apache.lucene.util.Version;
-import org.apache.thrift.TException;
-import org.apache.thrift.protocol.TBinaryProtocol;
-import org.apache.thrift.protocol.TProtocol;
-import org.apache.thrift.transport.TFramedTransport;
-import org.apache.thrift.transport.TSocket;
-
-public class SearchLuceneApiTable {
-
-  public static void main(String[] args) throws BlurException, TException, IOException, ParseException {
-    TSocket trans;
-    Socket socket = new Socket();
-    socket.setTcpNoDelay(true);
-    socket.connect(new InetSocketAddress("127.0.0.1", 40020));
-    trans = new TSocket(socket);
-
-    TProtocol proto = new TBinaryProtocol(new TFramedTransport(trans));
-    Client client = new Client(proto);
-
-    // List<String> terms = client.terms("test_table", "fam0", "col0", "",
-    // (short) 10);
-    // System.out.println(terms);
-
-    QueryParser parser = new QueryParser(Version.LUCENE_40, "", new StandardAnalyzer(Version.LUCENE_40));
-
-    Query query = parser.parse("fam0.col0:abdomen");
-
-    QueryWritable queryWritable = new QueryWritable(query);
-    DataOutputBuffer buffer = new DataOutputBuffer();
-    queryWritable.write(buffer);
-    buffer.close();
-
-    Session session = client.openReadSession("test_table");
-    int total = 1000;
-    long totalTime = 0;
-    long totalTimeM = 0;
-    for (int i = 0; i < total; i++) {
-      QueryArgs queryArgs = new QueryArgs();
-      queryArgs.setNumberToFetch(10);
-      queryArgs.setQuery(trim(buffer));
-
-      long sm = System.currentTimeMillis();
-      long s = System.nanoTime();
-      List<TopFieldDocs> topDocs = client.search(session, queryArgs);
-      long e = System.nanoTime();
-      long em = System.currentTimeMillis();
-      totalTime += (e - s);
-      totalTimeM += (em - sm);
-      System.out.println("================");
-      for (TopFieldDocs topFieldDocs : topDocs) {
-        System.out.println(topFieldDocs);
-        List<Document> docs = client.doc(session, ThriftLuceneConversion.toThrift(topFieldDocs.getScoreDocs()), null);
-        for (Document document : docs) {
-          System.out.println(ThriftLuceneConversion.toString(document));
-        }
-      }
-
-    }
-    System.out.println(totalTime / total / 1000000.0);
-    System.out.println(totalTimeM / total);
-    client.closeReadSession(session);
-  }
-
-  private static byte[] trim(DataOutputBuffer buffer) {
-    byte[] buf = new byte[buffer.getLength()];
-    System.arraycopy(buffer.getData(), 0, buf, 0, buf.length);
-    return buf;
-  }
-}


Mime
View raw message