incubator-blur-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From amccu...@apache.org
Subject [24/28] Initial commit of the back port. The blur-util, blur-store, have been completed. Also a new distribution project help with the building of the project. Also all of the pom files have been updated to the new version. This is very much a work i
Date Mon, 18 Mar 2013 01:10:31 GMT
http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/a4601422/src/blur-store/src/test/java/org/apache/blur/store/blockcache/BlockDirectoryTest.java
----------------------------------------------------------------------
diff --git a/src/blur-store/src/test/java/org/apache/blur/store/blockcache/BlockDirectoryTest.java
b/src/blur-store/src/test/java/org/apache/blur/store/blockcache/BlockDirectoryTest.java
index adc998b..f2ef236 100644
--- a/src/blur-store/src/test/java/org/apache/blur/store/blockcache/BlockDirectoryTest.java
+++ b/src/blur-store/src/test/java/org/apache/blur/store/blockcache/BlockDirectoryTest.java
@@ -24,10 +24,14 @@ import java.io.IOException;
 import java.util.Map;
 import java.util.Random;
 
+import javax.jws.Oneway;
+
 import org.apache.blur.store.blockcache.BlockDirectory;
 import org.apache.blur.store.blockcache.Cache;
+import org.apache.blur.store.buffer.BufferStore;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.FSDirectory;
+import org.apache.lucene.store.IOContext;
 import org.apache.lucene.store.IndexInput;
 import org.apache.lucene.store.IndexOutput;
 import org.junit.Before;
@@ -36,67 +40,84 @@ import org.junit.Test;
 import com.googlecode.concurrentlinkedhashmap.ConcurrentLinkedHashMap;
 
 public class BlockDirectoryTest {
+  private static final File TMPDIR = new File(System.getProperty("blur.tmp.dir", "/tmp"));
+
+  private class MapperCache implements Cache {
+    public Map<String, byte[]> map = new ConcurrentLinkedHashMap.Builder<String,
byte[]>().maximumWeightedCapacity(8).build();
+
+    @Override
+    public void update(String name, long blockId, int blockOffset, byte[] buffer, int offset,
int length) {
+      byte[] cached = map.get(name + blockId);
+      if (cached != null) {
+        int newlen = Math.max(cached.length, blockOffset + length);
+        byte[] b = new byte[newlen];
+        System.arraycopy(cached, 0, b, 0, cached.length);
+        System.arraycopy(buffer, offset, b, blockOffset, length);
+        cached = b;
+      } else {
+        cached = copy(blockOffset, buffer, offset, length);
+      }
+      map.put(name + blockId, cached);
+    }
+
+    private byte[] copy(int blockOffset, byte[] buffer, int offset, int length) {
+      byte[] b = new byte[length + blockOffset];
+      System.arraycopy(buffer, offset, b, blockOffset, length);
+      return b;
+    }
+
+    @Override
+    public boolean fetch(String name, long blockId, int blockOffset, byte[] b, int off, int
lengthToReadInBlock) {
+      // return false;
+      byte[] data = map.get(name + blockId);
+      if (data == null) {
+        return false;
+      }
+      System.arraycopy(data, blockOffset, b, off, lengthToReadInBlock);
+      return true;
+    }
+
+    @Override
+    public void delete(String name) {
+
+    }
+
+    @Override
+    public long size() {
+      return map.size();
+    }
+
+    @Override
+    public void renameCacheFile(String source, String dest) {
+    }
+  }
 
   private static final int MAX_NUMBER_OF_WRITES = 10000;
   private static final int MIN_FILE_SIZE = 100;
   private static final int MAX_FILE_SIZE = 100000;
   private static final int MIN_BUFFER_SIZE = 1;
-  private static final int MAX_BUFFER_SIZE = 5000;
+  private static final int MAX_BUFFER_SIZE = 12000;
   private static final int MAX_NUMBER_OF_READS = 20000;
   private Directory directory;
   private File file;
   private long seed;
   private Random random;
-
+  private MapperCache mapperCache;
+  
   @Before
   public void setUp() throws IOException {
-    file = new File("./tmp");
+    BufferStore.init(128, 128);
+    file = new File(TMPDIR, "blockdirectorytest");
     rm(file);
     file.mkdirs();
     FSDirectory dir = FSDirectory.open(new File(file, "base"));
-    directory = new BlockDirectory("test", dir, getBasicCache());
+    mapperCache = new MapperCache();
+    directory = new BlockDirectory("test", dir, mapperCache);
     seed = new Random().nextLong();
+    System.out.println("Seed is " + seed);
     random = new Random(seed);
   }
 
-  private Cache getBasicCache() {
-    return new Cache() {
-      private Map<String, byte[]> map = new ConcurrentLinkedHashMap.Builder<String,
byte[]>().maximumWeightedCapacity(8).build();
-
-      @Override
-      public void update(String name, long blockId, byte[] buffer) {
-        map.put(name + blockId, copy(buffer));
-      }
-
-      private byte[] copy(byte[] buffer) {
-        byte[] b = new byte[buffer.length];
-        System.arraycopy(buffer, 0, b, 0, buffer.length);
-        return b;
-      }
-
-      @Override
-      public boolean fetch(String name, long blockId, int blockOffset, byte[] b, int off,
int lengthToReadInBlock) {
-        // return false;
-        byte[] data = map.get(name + blockId);
-        if (data == null) {
-          return false;
-        }
-        System.arraycopy(data, blockOffset, b, off, lengthToReadInBlock);
-        return true;
-      }
-
-      @Override
-      public void delete(String name) {
-
-      }
-
-      @Override
-      public long size() {
-        return map.size();
-      }
-    };
-  }
-
   @Test
   public void testEOF() throws IOException {
     Directory fsDir = FSDirectory.open(new File(file, "normal"));
@@ -110,7 +131,7 @@ public class BlockDirectoryTest {
   }
 
   private void testEof(String name, Directory directory, long length) throws IOException
{
-    IndexInput input = directory.openInput(name);
+    IndexInput input = directory.openInput(name, IOContext.DEFAULT);
     input.seek(length);
     try {
       input.readByte();
@@ -121,6 +142,8 @@ public class BlockDirectoryTest {
 
   @Test
   public void testRandomAccessWrites() throws IOException {
+    long t1 = System.nanoTime();
+
     int i = 0;
     try {
       for (; i < 10; i++) {
@@ -133,13 +156,20 @@ public class BlockDirectoryTest {
       e.printStackTrace();
       fail("Test failed with seed [" + seed + "] on pass [" + i + "]");
     }
+    long t2 = System.nanoTime();
+    System.out.println("Total time is " + ((t2 - t1)/1000000) + "ms");
+  }
+
+  @Test
+  public void testRandomAccessWritesLargeCache() throws IOException {
+    mapperCache.map = new ConcurrentLinkedHashMap.Builder<String, byte[]>().maximumWeightedCapacity(10000).build();
+    testRandomAccessWrites();
   }
 
   private void assertInputsEquals(String name, Directory fsDir, Directory hdfs) throws IOException
{
     int reads = random.nextInt(MAX_NUMBER_OF_READS);
-    int buffer = random.nextInt(MAX_BUFFER_SIZE - MIN_BUFFER_SIZE) + MIN_BUFFER_SIZE;
-    IndexInput fsInput = fsDir.openInput(name, buffer);
-    IndexInput hdfsInput = hdfs.openInput(name, buffer);
+    IndexInput fsInput = fsDir.openInput(name, IOContext.DEFAULT);
+    IndexInput hdfsInput = hdfs.openInput(name, IOContext.DEFAULT);
     assertEquals(fsInput.length(), hdfsInput.length());
     int fileLength = (int) fsInput.length();
     for (int i = 0; i < reads; i++) {
@@ -165,10 +195,8 @@ public class BlockDirectoryTest {
   private void createFile(String name, Directory fsDir, Directory hdfs) throws IOException
{
     int writes = random.nextInt(MAX_NUMBER_OF_WRITES);
     int fileLength = random.nextInt(MAX_FILE_SIZE - MIN_FILE_SIZE) + MIN_FILE_SIZE;
-    IndexOutput fsOutput = fsDir.createOutput(name);
-    fsOutput.setLength(fileLength);
-    IndexOutput hdfsOutput = hdfs.createOutput(name);
-    hdfsOutput.setLength(fileLength);
+    IndexOutput fsOutput = fsDir.createOutput(name, IOContext.DEFAULT);
+    IndexOutput hdfsOutput = hdfs.createOutput(name, IOContext.DEFAULT);
     for (int i = 0; i < writes; i++) {
       byte[] buf = new byte[random.nextInt(Math.min(MAX_BUFFER_SIZE - MIN_BUFFER_SIZE, fileLength))
+ MIN_BUFFER_SIZE];
       random.nextBytes(buf);

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/a4601422/src/blur-store/src/test/java/org/apache/blur/store/compressed/CompressedFieldDataDirectoryTest.java
----------------------------------------------------------------------
diff --git a/src/blur-store/src/test/java/org/apache/blur/store/compressed/CompressedFieldDataDirectoryTest.java
b/src/blur-store/src/test/java/org/apache/blur/store/compressed/CompressedFieldDataDirectoryTest.java
deleted file mode 100644
index f9072b7..0000000
--- a/src/blur-store/src/test/java/org/apache/blur/store/compressed/CompressedFieldDataDirectoryTest.java
+++ /dev/null
@@ -1,143 +0,0 @@
-package org.apache.blur.store.compressed;
-
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import static org.apache.blur.lucene.LuceneConstant.LUCENE_VERSION;
-import static org.junit.Assert.assertEquals;
-
-import java.io.IOException;
-
-import org.apache.blur.store.compressed.CompressedFieldDataDirectory;
-import org.apache.hadoop.io.compress.CompressionCodec;
-import org.apache.lucene.analysis.KeywordAnalyzer;
-import org.apache.lucene.document.Document;
-import org.apache.lucene.document.Field;
-import org.apache.lucene.document.Field.Index;
-import org.apache.lucene.document.Field.Store;
-import org.apache.lucene.index.CorruptIndexException;
-import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.index.IndexWriter;
-import org.apache.lucene.index.IndexWriterConfig;
-import org.apache.lucene.index.TieredMergePolicy;
-import org.apache.lucene.store.Directory;
-import org.apache.lucene.store.IndexInput;
-import org.apache.lucene.store.LockObtainFailedException;
-import org.apache.lucene.store.RAMDirectory;
-import org.junit.Test;
-
-
-public class CompressedFieldDataDirectoryTest {
-
-  private static final CompressionCodec COMPRESSION_CODEC = CompressedFieldDataDirectory.DEFAULT_COMPRESSION;
-
-  @Test
-  public void testCompressedFieldDataDirectoryBasic() throws CorruptIndexException, IOException
{
-    RAMDirectory dir = new RAMDirectory();
-    CompressedFieldDataDirectory directory = new CompressedFieldDataDirectory(dir, COMPRESSION_CODEC);
-    IndexWriterConfig config = new IndexWriterConfig(LUCENE_VERSION, new KeywordAnalyzer());
-    TieredMergePolicy mergePolicy = (TieredMergePolicy) config.getMergePolicy();
-    mergePolicy.setUseCompoundFile(false);
-    IndexWriter writer = new IndexWriter(directory, config);
-    addDocs(writer, 0, 10);
-    writer.close();
-    testFetches(directory);
-  }
-
-  @Test
-  public void testCompressedFieldDataDirectoryTransition() throws CorruptIndexException,
LockObtainFailedException, IOException {
-    RAMDirectory dir = new RAMDirectory();
-
-    IndexWriterConfig config = new IndexWriterConfig(LUCENE_VERSION, new KeywordAnalyzer());
-    TieredMergePolicy mergePolicy = (TieredMergePolicy) config.getMergePolicy();
-    mergePolicy.setUseCompoundFile(false);
-    IndexWriter writer = new IndexWriter(dir, config);
-
-    addDocs(writer, 0, 5);
-    writer.close();
-
-    CompressedFieldDataDirectory directory = new CompressedFieldDataDirectory(dir, COMPRESSION_CODEC);
-    config = new IndexWriterConfig(LUCENE_VERSION, new KeywordAnalyzer());
-    mergePolicy = (TieredMergePolicy) config.getMergePolicy();
-    mergePolicy.setUseCompoundFile(false);
-    writer = new IndexWriter(directory, config);
-    addDocs(writer, 5, 5);
-    writer.close();
-    testFetches(directory);
-  }
-
-  @Test
-  public void testCompressedFieldDataDirectoryMixedBlockSize() throws CorruptIndexException,
LockObtainFailedException, IOException {
-    RAMDirectory dir = new RAMDirectory();
-    IndexWriterConfig config = new IndexWriterConfig(LUCENE_VERSION, new KeywordAnalyzer());
-    TieredMergePolicy mergePolicy = (TieredMergePolicy) config.getMergePolicy();
-    mergePolicy.setUseCompoundFile(false);
-    IndexWriter writer = new IndexWriter(dir, config);
-    addDocs(writer, 0, 5);
-    writer.close();
-
-    CompressedFieldDataDirectory directory1 = new CompressedFieldDataDirectory(dir, COMPRESSION_CODEC,
2);
-    config = new IndexWriterConfig(LUCENE_VERSION, new KeywordAnalyzer());
-    mergePolicy = (TieredMergePolicy) config.getMergePolicy();
-    mergePolicy.setUseCompoundFile(false);
-    writer = new IndexWriter(directory1, config);
-    addDocs(writer, 5, 2);
-    writer.close();
-
-    CompressedFieldDataDirectory directory2 = new CompressedFieldDataDirectory(dir, COMPRESSION_CODEC,
4);
-    config = new IndexWriterConfig(LUCENE_VERSION, new KeywordAnalyzer());
-    mergePolicy = (TieredMergePolicy) config.getMergePolicy();
-    mergePolicy.setUseCompoundFile(false);
-    writer = new IndexWriter(directory2, config);
-    addDocs(writer, 7, 3);
-    writer.close();
-    testFetches(directory2);
-    testFileLengths(directory2);
-  }
-
-  private void testFileLengths(Directory dir) throws IOException {
-    String[] listAll = dir.listAll();
-    for (String name : listAll) {
-      IndexInput input = dir.openInput(name);
-      assertEquals(input.length(), dir.fileLength(name));
-      input.close();
-    }
-
-  }
-
-  private void testFetches(Directory directory) throws CorruptIndexException, IOException
{
-    IndexReader reader = IndexReader.open(directory);
-    for (int i = 0; i < reader.maxDoc(); i++) {
-      String id = Integer.toString(i);
-      Document document = reader.document(i);
-      assertEquals(id, document.get("id"));
-    }
-  }
-
-  private void addDocs(IndexWriter writer, int starting, int amount) throws CorruptIndexException,
IOException {
-    for (int i = 0; i < amount; i++) {
-      int index = starting + i;
-      writer.addDocument(getDoc(index));
-    }
-  }
-
-  private Document getDoc(int index) {
-    Document document = new Document();
-    document.add(new Field("id", Integer.toString(index), Store.YES, Index.NOT_ANALYZED_NO_NORMS));
-    return document;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/a4601422/src/blur-testsuite/pom.xml
----------------------------------------------------------------------
diff --git a/src/blur-testsuite/pom.xml b/src/blur-testsuite/pom.xml
index f899c8d..408e5d0 100644
--- a/src/blur-testsuite/pom.xml
+++ b/src/blur-testsuite/pom.xml
@@ -19,12 +19,13 @@ under the License.
 -->
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
 	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+	<modelVersion>4.0.0</modelVersion>
 	<parent>
 		<groupId>org.apache.blur</groupId>
 		<artifactId>blur</artifactId>
-		<version>0.1.3</version>
+		<version>0.1.5</version>
+        <relativePath>../pom.xml</relativePath>
 	</parent>
-	<modelVersion>4.0.0</modelVersion>
 	<groupId>org.apache.blur</groupId>
 	<artifactId>blur-testsuite</artifactId>
 	<packaging>jar</packaging>
@@ -34,34 +35,34 @@ under the License.
 		<dependency>
 			<groupId>org.apache.blur</groupId>
 			<artifactId>blur-core</artifactId>
-			<version>0.1.3</version>
+			<version>${project.version}</version>
 		</dependency>
 		<dependency>
 			<groupId>org.apache.blur</groupId>
 			<artifactId>blur-mapred</artifactId>
-			<version>0.1.3</version>
+			<version>${project.version}</version>
 		</dependency>
 		<dependency>
 			<groupId>org.apache.blur</groupId>
 			<artifactId>blur-thrift</artifactId>
-			<version>0.1.3</version>
+			<version>${project.version}</version>
 		</dependency>
 		<dependency>
 			<groupId>junit</groupId>
 			<artifactId>junit</artifactId>
-			<version>4.7</version>
+			<version>${junit.version}</version>
 			<scope>test</scope>
 		</dependency>
 		<dependency>
 			<groupId>org.apache.zookeeper</groupId>
 			<artifactId>zookeeper</artifactId>
-			<version>3.3.4</version>
+			<version>${zookeeper.version}</version>
 			<scope>provided</scope>
 		</dependency>
 		<dependency>
 			<groupId>log4j</groupId>
 			<artifactId>log4j</artifactId>
-			<version>1.2.15</version>
+			<version>${log4j.version}</version>
 			<scope>provided</scope>
 			<exclusions>
 				<exclusion>
@@ -89,10 +90,6 @@ under the License.
 			<id>libdir</id>
 			<url>file://${basedir}/../lib</url>
 		</repository>
-		<repository>
-			<id>cloudera</id>
-			<url>https://repository.cloudera.com/artifactory/cloudera-repos/</url>
-		</repository>
 	</repositories>
 
 	<build>
@@ -106,30 +103,6 @@ under the License.
 						<target>1.6</target>
 					</configuration>
 				</plugin>
-
-				<plugin>
-					<groupId>org.apache.maven.plugins</groupId>
-					<artifactId>maven-dependency-plugin</artifactId>
-					<executions>
-						<execution>
-							<id>copy-dependencies</id>
-							<phase>package</phase>
-							<goals>
-								<goal>copy-dependencies</goal>
-							</goals>
-							<configuration>
-								<outputDirectory>${project.build.directory}/../../../lib
-								</outputDirectory>
-								<overWriteReleases>false</overWriteReleases>
-								<overWriteSnapshots>false</overWriteSnapshots>
-								<overWriteIfNewer>true</overWriteIfNewer>
-								<excludeTransitive>true</excludeTransitive>
-								<excludeArtifactIds>junit,commons-cli,commons-logging,hadoop-core,slf4j-api,slf4j-log4j12
-								</excludeArtifactIds>
-							</configuration>
-						</execution>
-					</executions>
-				</plugin>
 			</plugins>
 		</pluginManagement>
 	</build>

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/a4601422/src/blur-thrift/pom.xml
----------------------------------------------------------------------
diff --git a/src/blur-thrift/pom.xml b/src/blur-thrift/pom.xml
index aa52fe8..37bcb3b 100644
--- a/src/blur-thrift/pom.xml
+++ b/src/blur-thrift/pom.xml
@@ -19,12 +19,13 @@ under the License.
 -->
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
 	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+	<modelVersion>4.0.0</modelVersion>
 	<parent>
 		<groupId>org.apache.blur</groupId>
 		<artifactId>blur</artifactId>
-		<version>0.1.3</version>
+		<version>0.1.5</version>
+        <relativePath>../pom.xml</relativePath>
 	</parent>
-	<modelVersion>4.0.0</modelVersion>
 	<groupId>org.apache.blur</groupId>
 	<artifactId>blur-thrift</artifactId>
 	<packaging>jar</packaging>
@@ -34,17 +35,17 @@ under the License.
 		<dependency>
 			<groupId>org.apache.blur</groupId>
 			<artifactId>blur-util</artifactId>
-			<version>0.1.3</version>
+			<version>${project.version}</version>
 		</dependency>
 		<dependency>
 			<groupId>org.apache.thrift</groupId>
 			<artifactId>libthrift</artifactId>
-			<version>0.7.0</version>
+			<version>${thrift.version}</version>
 		</dependency>
 		<dependency>
 			<groupId>log4j</groupId>
 			<artifactId>log4j</artifactId>
-			<version>1.2.15</version>
+			<version>${log4j.version}</version>
 			<scope>provided</scope>
 			<exclusions>
 				<exclusion>
@@ -95,29 +96,6 @@ under the License.
 					</execution>
 				</executions>
 			</plugin>
-			<plugin>
-				<groupId>org.apache.maven.plugins</groupId>
-				<artifactId>maven-dependency-plugin</artifactId>
-				<executions>
-					<execution>
-						<id>copy-dependencies</id>
-						<phase>package</phase>
-						<goals>
-							<goal>copy-dependencies</goal>
-						</goals>
-						<configuration>
-							<outputDirectory>${project.build.directory}/../../../lib
-							</outputDirectory>
-							<overWriteReleases>false</overWriteReleases>
-							<overWriteSnapshots>false</overWriteSnapshots>
-							<overWriteIfNewer>true</overWriteIfNewer>
-							<excludeTransitive>true</excludeTransitive>
-							<excludeArtifactIds>junit,commons-cli,commons-logging,hadoop,slf4j-api
-							</excludeArtifactIds>
-						</configuration>
-					</execution>
-				</executions>
-			</plugin>
 		</plugins>
 	</build>
 </project>

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/a4601422/src/blur-thrift/src/main/java/org/apache/blur/thrift/generated/AlternateColumnDefinition.java
----------------------------------------------------------------------
diff --git a/src/blur-thrift/src/main/java/org/apache/blur/thrift/generated/AlternateColumnDefinition.java
b/src/blur-thrift/src/main/java/org/apache/blur/thrift/generated/AlternateColumnDefinition.java
index 4818510..ae347fb 100644
--- a/src/blur-thrift/src/main/java/org/apache/blur/thrift/generated/AlternateColumnDefinition.java
+++ b/src/blur-thrift/src/main/java/org/apache/blur/thrift/generated/AlternateColumnDefinition.java
@@ -1,7 +1,8 @@
 /**
- * Autogenerated by Thrift Compiler (0.7.0)
+ * Autogenerated by Thrift Compiler (0.9.0)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ *  @generated
  */
 package org.apache.blur.thrift.generated;
 
@@ -24,6 +25,15 @@ package org.apache.blur.thrift.generated;
 
 
 
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
 import java.util.List;
 import java.util.ArrayList;
 import java.util.Map;
@@ -47,6 +57,12 @@ public class AlternateColumnDefinition implements org.apache.thrift.TBase<Altern
 
   private static final org.apache.thrift.protocol.TField ANALYZER_CLASS_NAME_FIELD_DESC =
new org.apache.thrift.protocol.TField("analyzerClassName", org.apache.thrift.protocol.TType.STRING,
(short)1);
 
+  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes =
new HashMap<Class<? extends IScheme>, SchemeFactory>();
+  static {
+    schemes.put(StandardScheme.class, new AlternateColumnDefinitionStandardSchemeFactory());
+    schemes.put(TupleScheme.class, new AlternateColumnDefinitionTupleSchemeFactory());
+  }
+
   /**
    * 
    */
@@ -114,7 +130,6 @@ public class AlternateColumnDefinition implements org.apache.thrift.TBase<Altern
   }
 
   // isset id assignments
-
   public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
   static {
     Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields,
org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
@@ -273,44 +288,11 @@ public class AlternateColumnDefinition implements org.apache.thrift.TBase<Altern
   }
 
   public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException
{
-    org.apache.thrift.protocol.TField field;
-    iprot.readStructBegin();
-    while (true)
-    {
-      field = iprot.readFieldBegin();
-      if (field.type == org.apache.thrift.protocol.TType.STOP) { 
-        break;
-      }
-      switch (field.id) {
-        case 1: // ANALYZER_CLASS_NAME
-          if (field.type == org.apache.thrift.protocol.TType.STRING) {
-            this.analyzerClassName = iprot.readString();
-          } else { 
-            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
-          }
-          break;
-        default:
-          org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
-      }
-      iprot.readFieldEnd();
-    }
-    iprot.readStructEnd();
-
-    // check for required fields of primitive type, which can't be checked in the validate
method
-    validate();
+    schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
   }
 
   public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException
{
-    validate();
-
-    oprot.writeStructBegin(STRUCT_DESC);
-    if (this.analyzerClassName != null) {
-      oprot.writeFieldBegin(ANALYZER_CLASS_NAME_FIELD_DESC);
-      oprot.writeString(this.analyzerClassName);
-      oprot.writeFieldEnd();
-    }
-    oprot.writeFieldStop();
-    oprot.writeStructEnd();
+    schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
   }
 
   @Override
@@ -331,6 +313,7 @@ public class AlternateColumnDefinition implements org.apache.thrift.TBase<Altern
 
   public void validate() throws org.apache.thrift.TException {
     // check for required fields
+    // check for sub-struct validity
   }
 
   private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
@@ -349,5 +332,89 @@ public class AlternateColumnDefinition implements org.apache.thrift.TBase<Altern
     }
   }
 
+  private static class AlternateColumnDefinitionStandardSchemeFactory implements SchemeFactory
{
+    public AlternateColumnDefinitionStandardScheme getScheme() {
+      return new AlternateColumnDefinitionStandardScheme();
+    }
+  }
+
+  private static class AlternateColumnDefinitionStandardScheme extends StandardScheme<AlternateColumnDefinition>
{
+
+    public void read(org.apache.thrift.protocol.TProtocol iprot, AlternateColumnDefinition
struct) throws org.apache.thrift.TException {
+      org.apache.thrift.protocol.TField schemeField;
+      iprot.readStructBegin();
+      while (true)
+      {
+        schemeField = iprot.readFieldBegin();
+        if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+          break;
+        }
+        switch (schemeField.id) {
+          case 1: // ANALYZER_CLASS_NAME
+            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+              struct.analyzerClassName = iprot.readString();
+              struct.setAnalyzerClassNameIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          default:
+            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+        }
+        iprot.readFieldEnd();
+      }
+      iprot.readStructEnd();
+
+      // check for required fields of primitive type, which can't be checked in the validate
method
+      struct.validate();
+    }
+
+    public void write(org.apache.thrift.protocol.TProtocol oprot, AlternateColumnDefinition
struct) throws org.apache.thrift.TException {
+      struct.validate();
+
+      oprot.writeStructBegin(STRUCT_DESC);
+      if (struct.analyzerClassName != null) {
+        oprot.writeFieldBegin(ANALYZER_CLASS_NAME_FIELD_DESC);
+        oprot.writeString(struct.analyzerClassName);
+        oprot.writeFieldEnd();
+      }
+      oprot.writeFieldStop();
+      oprot.writeStructEnd();
+    }
+
+  }
+
+  private static class AlternateColumnDefinitionTupleSchemeFactory implements SchemeFactory
{
+    public AlternateColumnDefinitionTupleScheme getScheme() {
+      return new AlternateColumnDefinitionTupleScheme();
+    }
+  }
+
+  private static class AlternateColumnDefinitionTupleScheme extends TupleScheme<AlternateColumnDefinition>
{
+
+    @Override
+    public void write(org.apache.thrift.protocol.TProtocol prot, AlternateColumnDefinition
struct) throws org.apache.thrift.TException {
+      TTupleProtocol oprot = (TTupleProtocol) prot;
+      BitSet optionals = new BitSet();
+      if (struct.isSetAnalyzerClassName()) {
+        optionals.set(0);
+      }
+      oprot.writeBitSet(optionals, 1);
+      if (struct.isSetAnalyzerClassName()) {
+        oprot.writeString(struct.analyzerClassName);
+      }
+    }
+
+    @Override
+    public void read(org.apache.thrift.protocol.TProtocol prot, AlternateColumnDefinition
struct) throws org.apache.thrift.TException {
+      TTupleProtocol iprot = (TTupleProtocol) prot;
+      BitSet incoming = iprot.readBitSet(1);
+      if (incoming.get(0)) {
+        struct.analyzerClassName = iprot.readString();
+        struct.setAnalyzerClassNameIsSet(true);
+      }
+    }
+  }
+
 }
 

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/a4601422/src/blur-thrift/src/main/java/org/apache/blur/thrift/generated/AnalyzerDefinition.java
----------------------------------------------------------------------
diff --git a/src/blur-thrift/src/main/java/org/apache/blur/thrift/generated/AnalyzerDefinition.java
b/src/blur-thrift/src/main/java/org/apache/blur/thrift/generated/AnalyzerDefinition.java
index 4d453f8..9a83d30 100644
--- a/src/blur-thrift/src/main/java/org/apache/blur/thrift/generated/AnalyzerDefinition.java
+++ b/src/blur-thrift/src/main/java/org/apache/blur/thrift/generated/AnalyzerDefinition.java
@@ -1,7 +1,8 @@
 /**
- * Autogenerated by Thrift Compiler (0.7.0)
+ * Autogenerated by Thrift Compiler (0.9.0)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ *  @generated
  */
 package org.apache.blur.thrift.generated;
 
@@ -24,6 +25,15 @@ package org.apache.blur.thrift.generated;
 
 
 
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
 import java.util.List;
 import java.util.ArrayList;
 import java.util.Map;
@@ -49,6 +59,12 @@ public class AnalyzerDefinition implements org.apache.thrift.TBase<AnalyzerDefin
   private static final org.apache.thrift.protocol.TField FULL_TEXT_ANALYZER_CLASS_NAME_FIELD_DESC
= new org.apache.thrift.protocol.TField("fullTextAnalyzerClassName", org.apache.thrift.protocol.TType.STRING,
(short)2);
   private static final org.apache.thrift.protocol.TField COLUMN_FAMILY_DEFINITIONS_FIELD_DESC
= new org.apache.thrift.protocol.TField("columnFamilyDefinitions", org.apache.thrift.protocol.TType.MAP,
(short)3);
 
+  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes =
new HashMap<Class<? extends IScheme>, SchemeFactory>();
+  static {
+    schemes.put(StandardScheme.class, new AnalyzerDefinitionStandardSchemeFactory());
+    schemes.put(TupleScheme.class, new AnalyzerDefinitionTupleSchemeFactory());
+  }
+
   /**
    * 
    */
@@ -136,7 +152,6 @@ public class AnalyzerDefinition implements org.apache.thrift.TBase<AnalyzerDefin
   }
 
   // isset id assignments
-
   public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
   static {
     Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields,
org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
@@ -463,90 +478,11 @@ public class AnalyzerDefinition implements org.apache.thrift.TBase<AnalyzerDefin
   }
 
   public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException
{
-    org.apache.thrift.protocol.TField field;
-    iprot.readStructBegin();
-    while (true)
-    {
-      field = iprot.readFieldBegin();
-      if (field.type == org.apache.thrift.protocol.TType.STOP) { 
-        break;
-      }
-      switch (field.id) {
-        case 1: // DEFAULT_DEFINITION
-          if (field.type == org.apache.thrift.protocol.TType.STRUCT) {
-            this.defaultDefinition = new ColumnDefinition();
-            this.defaultDefinition.read(iprot);
-          } else { 
-            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
-          }
-          break;
-        case 2: // FULL_TEXT_ANALYZER_CLASS_NAME
-          if (field.type == org.apache.thrift.protocol.TType.STRING) {
-            this.fullTextAnalyzerClassName = iprot.readString();
-          } else { 
-            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
-          }
-          break;
-        case 3: // COLUMN_FAMILY_DEFINITIONS
-          if (field.type == org.apache.thrift.protocol.TType.MAP) {
-            {
-              org.apache.thrift.protocol.TMap _map70 = iprot.readMapBegin();
-              this.columnFamilyDefinitions = new HashMap<String,ColumnFamilyDefinition>(2*_map70.size);
-              for (int _i71 = 0; _i71 < _map70.size; ++_i71)
-              {
-                String _key72; // required
-                ColumnFamilyDefinition _val73; // required
-                _key72 = iprot.readString();
-                _val73 = new ColumnFamilyDefinition();
-                _val73.read(iprot);
-                this.columnFamilyDefinitions.put(_key72, _val73);
-              }
-              iprot.readMapEnd();
-            }
-          } else { 
-            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
-          }
-          break;
-        default:
-          org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
-      }
-      iprot.readFieldEnd();
-    }
-    iprot.readStructEnd();
-
-    // check for required fields of primitive type, which can't be checked in the validate
method
-    validate();
+    schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
   }
 
   public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException
{
-    validate();
-
-    oprot.writeStructBegin(STRUCT_DESC);
-    if (this.defaultDefinition != null) {
-      oprot.writeFieldBegin(DEFAULT_DEFINITION_FIELD_DESC);
-      this.defaultDefinition.write(oprot);
-      oprot.writeFieldEnd();
-    }
-    if (this.fullTextAnalyzerClassName != null) {
-      oprot.writeFieldBegin(FULL_TEXT_ANALYZER_CLASS_NAME_FIELD_DESC);
-      oprot.writeString(this.fullTextAnalyzerClassName);
-      oprot.writeFieldEnd();
-    }
-    if (this.columnFamilyDefinitions != null) {
-      oprot.writeFieldBegin(COLUMN_FAMILY_DEFINITIONS_FIELD_DESC);
-      {
-        oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING,
org.apache.thrift.protocol.TType.STRUCT, this.columnFamilyDefinitions.size()));
-        for (Map.Entry<String, ColumnFamilyDefinition> _iter74 : this.columnFamilyDefinitions.entrySet())
-        {
-          oprot.writeString(_iter74.getKey());
-          _iter74.getValue().write(oprot);
-        }
-        oprot.writeMapEnd();
-      }
-      oprot.writeFieldEnd();
-    }
-    oprot.writeFieldStop();
-    oprot.writeStructEnd();
+    schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
   }
 
   @Override
@@ -583,6 +519,10 @@ public class AnalyzerDefinition implements org.apache.thrift.TBase<AnalyzerDefin
 
   public void validate() throws org.apache.thrift.TException {
     // check for required fields
+    // check for sub-struct validity
+    if (defaultDefinition != null) {
+      defaultDefinition.validate();
+    }
   }
 
   private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
@@ -601,5 +541,177 @@ public class AnalyzerDefinition implements org.apache.thrift.TBase<AnalyzerDefin
     }
   }
 
+  private static class AnalyzerDefinitionStandardSchemeFactory implements SchemeFactory {
+    public AnalyzerDefinitionStandardScheme getScheme() {
+      return new AnalyzerDefinitionStandardScheme();
+    }
+  }
+
+  private static class AnalyzerDefinitionStandardScheme extends StandardScheme<AnalyzerDefinition>
{
+
+    public void read(org.apache.thrift.protocol.TProtocol iprot, AnalyzerDefinition struct)
throws org.apache.thrift.TException {
+      org.apache.thrift.protocol.TField schemeField;
+      iprot.readStructBegin();
+      while (true)
+      {
+        schemeField = iprot.readFieldBegin();
+        if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+          break;
+        }
+        switch (schemeField.id) {
+          case 1: // DEFAULT_DEFINITION
+            if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
+              struct.defaultDefinition = new ColumnDefinition();
+              struct.defaultDefinition.read(iprot);
+              struct.setDefaultDefinitionIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          case 2: // FULL_TEXT_ANALYZER_CLASS_NAME
+            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+              struct.fullTextAnalyzerClassName = iprot.readString();
+              struct.setFullTextAnalyzerClassNameIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          case 3: // COLUMN_FAMILY_DEFINITIONS
+            if (schemeField.type == org.apache.thrift.protocol.TType.MAP) {
+              {
+                org.apache.thrift.protocol.TMap _map140 = iprot.readMapBegin();
+                struct.columnFamilyDefinitions = new HashMap<String,ColumnFamilyDefinition>(2*_map140.size);
+                for (int _i141 = 0; _i141 < _map140.size; ++_i141)
+                {
+                  String _key142; // required
+                  ColumnFamilyDefinition _val143; // optional
+                  _key142 = iprot.readString();
+                  _val143 = new ColumnFamilyDefinition();
+                  _val143.read(iprot);
+                  struct.columnFamilyDefinitions.put(_key142, _val143);
+                }
+                iprot.readMapEnd();
+              }
+              struct.setColumnFamilyDefinitionsIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          default:
+            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+        }
+        iprot.readFieldEnd();
+      }
+      iprot.readStructEnd();
+
+      // check for required fields of primitive type, which can't be checked in the validate
method
+      struct.validate();
+    }
+
+    public void write(org.apache.thrift.protocol.TProtocol oprot, AnalyzerDefinition struct)
throws org.apache.thrift.TException {
+      struct.validate();
+
+      oprot.writeStructBegin(STRUCT_DESC);
+      if (struct.defaultDefinition != null) {
+        oprot.writeFieldBegin(DEFAULT_DEFINITION_FIELD_DESC);
+        struct.defaultDefinition.write(oprot);
+        oprot.writeFieldEnd();
+      }
+      if (struct.fullTextAnalyzerClassName != null) {
+        oprot.writeFieldBegin(FULL_TEXT_ANALYZER_CLASS_NAME_FIELD_DESC);
+        oprot.writeString(struct.fullTextAnalyzerClassName);
+        oprot.writeFieldEnd();
+      }
+      if (struct.columnFamilyDefinitions != null) {
+        oprot.writeFieldBegin(COLUMN_FAMILY_DEFINITIONS_FIELD_DESC);
+        {
+          oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING,
org.apache.thrift.protocol.TType.STRUCT, struct.columnFamilyDefinitions.size()));
+          for (Map.Entry<String, ColumnFamilyDefinition> _iter144 : struct.columnFamilyDefinitions.entrySet())
+          {
+            oprot.writeString(_iter144.getKey());
+            _iter144.getValue().write(oprot);
+          }
+          oprot.writeMapEnd();
+        }
+        oprot.writeFieldEnd();
+      }
+      oprot.writeFieldStop();
+      oprot.writeStructEnd();
+    }
+
+  }
+
+  private static class AnalyzerDefinitionTupleSchemeFactory implements SchemeFactory {
+    public AnalyzerDefinitionTupleScheme getScheme() {
+      return new AnalyzerDefinitionTupleScheme();
+    }
+  }
+
+  private static class AnalyzerDefinitionTupleScheme extends TupleScheme<AnalyzerDefinition>
{
+
+    @Override
+    public void write(org.apache.thrift.protocol.TProtocol prot, AnalyzerDefinition struct)
throws org.apache.thrift.TException {
+      TTupleProtocol oprot = (TTupleProtocol) prot;
+      BitSet optionals = new BitSet();
+      if (struct.isSetDefaultDefinition()) {
+        optionals.set(0);
+      }
+      if (struct.isSetFullTextAnalyzerClassName()) {
+        optionals.set(1);
+      }
+      if (struct.isSetColumnFamilyDefinitions()) {
+        optionals.set(2);
+      }
+      oprot.writeBitSet(optionals, 3);
+      if (struct.isSetDefaultDefinition()) {
+        struct.defaultDefinition.write(oprot);
+      }
+      if (struct.isSetFullTextAnalyzerClassName()) {
+        oprot.writeString(struct.fullTextAnalyzerClassName);
+      }
+      if (struct.isSetColumnFamilyDefinitions()) {
+        {
+          oprot.writeI32(struct.columnFamilyDefinitions.size());
+          for (Map.Entry<String, ColumnFamilyDefinition> _iter145 : struct.columnFamilyDefinitions.entrySet())
+          {
+            oprot.writeString(_iter145.getKey());
+            _iter145.getValue().write(oprot);
+          }
+        }
+      }
+    }
+
+    @Override
+    public void read(org.apache.thrift.protocol.TProtocol prot, AnalyzerDefinition struct)
throws org.apache.thrift.TException {
+      TTupleProtocol iprot = (TTupleProtocol) prot;
+      BitSet incoming = iprot.readBitSet(3);
+      if (incoming.get(0)) {
+        struct.defaultDefinition = new ColumnDefinition();
+        struct.defaultDefinition.read(iprot);
+        struct.setDefaultDefinitionIsSet(true);
+      }
+      if (incoming.get(1)) {
+        struct.fullTextAnalyzerClassName = iprot.readString();
+        struct.setFullTextAnalyzerClassNameIsSet(true);
+      }
+      if (incoming.get(2)) {
+        {
+          org.apache.thrift.protocol.TMap _map146 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING,
org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
+          struct.columnFamilyDefinitions = new HashMap<String,ColumnFamilyDefinition>(2*_map146.size);
+          for (int _i147 = 0; _i147 < _map146.size; ++_i147)
+          {
+            String _key148; // required
+            ColumnFamilyDefinition _val149; // optional
+            _key148 = iprot.readString();
+            _val149 = new ColumnFamilyDefinition();
+            _val149.read(iprot);
+            struct.columnFamilyDefinitions.put(_key148, _val149);
+          }
+        }
+        struct.setColumnFamilyDefinitionsIsSet(true);
+      }
+    }
+  }
+
 }
 


Mime
View raw message