Return-Path: Delivered-To: apmail-incubator-lucene-net-commits-archive@locus.apache.org Received: (qmail 54885 invoked from network); 15 Jul 2008 21:45:00 -0000 Received: from hermes.apache.org (HELO mail.apache.org) (140.211.11.2) by minotaur.apache.org with SMTP; 15 Jul 2008 21:45:00 -0000 Received: (qmail 11358 invoked by uid 500); 15 Jul 2008 21:44:57 -0000 Delivered-To: apmail-incubator-lucene-net-commits-archive@incubator.apache.org Received: (qmail 11345 invoked by uid 500); 15 Jul 2008 21:44:57 -0000 Mailing-List: contact lucene-net-commits-help@incubator.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: lucene-net-dev@incubator.apache.org Delivered-To: mailing list lucene-net-commits@incubator.apache.org Received: (qmail 11240 invoked by uid 99); 15 Jul 2008 21:44:57 -0000 Received: from athena.apache.org (HELO athena.apache.org) (140.211.11.136) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 15 Jul 2008 14:44:57 -0700 X-ASF-Spam-Status: No, hits=-2000.0 required=10.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 15 Jul 2008 21:44:00 +0000 Received: by eris.apache.org (Postfix, from userid 65534) id 9F7992388A6A; Tue, 15 Jul 2008 14:44:24 -0700 (PDT) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 8bit Subject: svn commit: r677059 [5/19] - in /incubator/lucene.net/trunk/C#/src: ./ Demo/DeleteFiles/ Demo/DemoLib/ Demo/IndexFiles/ Demo/IndexHtml/ Demo/SearchFiles/ Lucene.Net/ Lucene.Net/Analysis/ Lucene.Net/Index/ Lucene.Net/Search/ Lucene.Net/Search/Function/ ... Date: Tue, 15 Jul 2008 21:44:10 -0000 To: lucene-net-commits@incubator.apache.org From: aroush@apache.org X-Mailer: svnmailer-1.0.8 Message-Id: <20080715214424.9F7992388A6A@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestFieldsReader.cs URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestFieldsReader.cs?rev=677059&r1=677058&r2=677059&view=diff ============================================================================== --- incubator/lucene.net/trunk/C#/src/Test/Index/TestFieldsReader.cs (original) +++ incubator/lucene.net/trunk/C#/src/Test/Index/TestFieldsReader.cs Tue Jul 15 14:44:04 2008 @@ -19,73 +19,76 @@ using NUnit.Framework; -using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer; using Lucene.Net.Documents; -using Similarity = Lucene.Net.Search.Similarity; +using AlreadyClosedException = Lucene.Net.Store.AlreadyClosedException; using FSDirectory = Lucene.Net.Store.FSDirectory; using RAMDirectory = Lucene.Net.Store.RAMDirectory; +using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer; +using Similarity = Lucene.Net.Search.Similarity; +using LuceneTestCase = Lucene.Net.Util.LuceneTestCase; using _TestUtil = Lucene.Net.Util._TestUtil; namespace Lucene.Net.Index { [TestFixture] - public class TestFieldsReader + public class TestFieldsReader : LuceneTestCase { - private class AnonymousClassFieldSelector : FieldSelector - { - public AnonymousClassFieldSelector(TestFieldsReader enclosingInstance) - { - InitBlock(enclosingInstance); - } - private void InitBlock(TestFieldsReader enclosingInstance) - { - this.enclosingInstance = enclosingInstance; - } - private TestFieldsReader enclosingInstance; - public TestFieldsReader Enclosing_Instance - { - get - { - return enclosingInstance; - } + [Serializable] + private class AnonymousClassFieldSelector : FieldSelector + { + public AnonymousClassFieldSelector(TestFieldsReader enclosingInstance) + { + InitBlock(enclosingInstance); + } + private void InitBlock(TestFieldsReader enclosingInstance) + { + this.enclosingInstance = enclosingInstance; + } + private TestFieldsReader enclosingInstance; + public TestFieldsReader Enclosing_Instance + { + get + { + return enclosingInstance; + } - } - public virtual FieldSelectorResult Accept(System.String fieldName) - { - if (fieldName.Equals(DocHelper.TEXT_FIELD_1_KEY) || fieldName.Equals(DocHelper.COMPRESSED_TEXT_FIELD_2_KEY) || fieldName.Equals(DocHelper.LAZY_FIELD_BINARY_KEY)) - return FieldSelectorResult.SIZE; - else if (fieldName.Equals(DocHelper.TEXT_FIELD_3_KEY)) - return FieldSelectorResult.LOAD; - else - return FieldSelectorResult.NO_LOAD; - } - } - private RAMDirectory dir = new RAMDirectory(); + } + public virtual FieldSelectorResult Accept(System.String fieldName) + { + if (fieldName.Equals(DocHelper.TEXT_FIELD_1_KEY) || fieldName.Equals(DocHelper.COMPRESSED_TEXT_FIELD_2_KEY) || fieldName.Equals(DocHelper.LAZY_FIELD_BINARY_KEY)) + return FieldSelectorResult.SIZE; + else if (fieldName.Equals(DocHelper.TEXT_FIELD_3_KEY)) + return FieldSelectorResult.LOAD; + else + return FieldSelectorResult.NO_LOAD; + } + } + private RAMDirectory dir = new RAMDirectory(); private Lucene.Net.Documents.Document testDoc = new Lucene.Net.Documents.Document(); private FieldInfos fieldInfos = null; + private System.String segmentName = null; - // public TestFieldsReader(System.String s) - // { - // } - - [SetUp] - public virtual void SetUp() + [SetUp] + public override void SetUp() { + base.SetUp(); fieldInfos = new FieldInfos(); DocHelper.SetupDoc(testDoc); fieldInfos.Add(testDoc); - DocumentWriter writer = new DocumentWriter(dir, new WhitespaceAnalyzer(), Similarity.GetDefault(), 50); - Assert.IsTrue(writer != null); - writer.AddDocument("test", testDoc); + IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true); + writer.SetUseCompoundFile(false); + writer.AddDocument(testDoc); + writer.Close(); + segmentName = writer.NewestSegment().name; } [Test] - public virtual void Test() + public virtual void Test() { Assert.IsTrue(dir != null); Assert.IsTrue(fieldInfos != null); - FieldsReader reader = new FieldsReader(dir, "test", fieldInfos); + FieldsReader reader = new FieldsReader(dir, segmentName, fieldInfos); Assert.IsTrue(reader != null); Assert.IsTrue(reader.Size() == 1); Lucene.Net.Documents.Document doc = reader.Doc(0, null); @@ -112,182 +115,221 @@ } [Test] - public virtual void TestLazyFields() - { - Assert.IsTrue(dir != null); - Assert.IsTrue(fieldInfos != null); - FieldsReader reader = new FieldsReader(dir, "test", fieldInfos); - Assert.IsTrue(reader != null); - Assert.IsTrue(reader.Size() == 1); - System.Collections.Hashtable loadFieldNames = new System.Collections.Hashtable(); - loadFieldNames.Add(DocHelper.TEXT_FIELD_1_KEY, DocHelper.TEXT_FIELD_1_KEY); - loadFieldNames.Add(DocHelper.TEXT_FIELD_UTF1_KEY, DocHelper.TEXT_FIELD_UTF1_KEY); - System.Collections.Hashtable lazyFieldNames = new System.Collections.Hashtable(); - //new String[]{DocHelper.LARGE_LAZY_FIELD_KEY, DocHelper.LAZY_FIELD_KEY, DocHelper.LAZY_FIELD_BINARY_KEY}; - lazyFieldNames.Add(DocHelper.LARGE_LAZY_FIELD_KEY, DocHelper.LARGE_LAZY_FIELD_KEY); - lazyFieldNames.Add(DocHelper.LAZY_FIELD_KEY, DocHelper.LAZY_FIELD_KEY); - lazyFieldNames.Add(DocHelper.LAZY_FIELD_BINARY_KEY, DocHelper.LAZY_FIELD_BINARY_KEY); - lazyFieldNames.Add(DocHelper.TEXT_FIELD_UTF2_KEY, DocHelper.TEXT_FIELD_UTF2_KEY); - lazyFieldNames.Add(DocHelper.COMPRESSED_TEXT_FIELD_2_KEY, DocHelper.COMPRESSED_TEXT_FIELD_2_KEY); - SetBasedFieldSelector fieldSelector = new SetBasedFieldSelector(loadFieldNames, lazyFieldNames); - Lucene.Net.Documents.Document doc = reader.Doc(0, fieldSelector); - Assert.IsTrue(doc != null, "doc is null and it shouldn't be"); - Fieldable field = doc.GetFieldable(DocHelper.LAZY_FIELD_KEY); - Assert.IsTrue(field != null, "field is null and it shouldn't be"); - Assert.IsTrue(field.IsLazy(), "field is not lazy and it should be"); - System.String value_Renamed = field.StringValue(); - Assert.IsTrue(value_Renamed != null, "value is null and it shouldn't be"); - Assert.IsTrue(value_Renamed.Equals(DocHelper.LAZY_FIELD_TEXT) == true, value_Renamed + " is not equal to " + DocHelper.LAZY_FIELD_TEXT); - field = doc.GetFieldable(DocHelper.COMPRESSED_TEXT_FIELD_2_KEY); - Assert.IsTrue(field != null, "field is null and it shouldn't be"); - Assert.IsTrue(field.IsLazy(), "field is not lazy and it should be"); - value_Renamed = field.StringValue(); - Assert.IsTrue(value_Renamed != null, "value is null and it shouldn't be"); - Assert.IsTrue(value_Renamed.Equals(DocHelper.FIELD_2_COMPRESSED_TEXT) == true, value_Renamed + " is not equal to " + DocHelper.FIELD_2_COMPRESSED_TEXT); - field = doc.GetFieldable(DocHelper.TEXT_FIELD_1_KEY); - Assert.IsTrue(field != null, "field is null and it shouldn't be"); - Assert.IsTrue(field.IsLazy() == false, "Field is lazy and it should not be"); - field = doc.GetFieldable(DocHelper.TEXT_FIELD_UTF1_KEY); - Assert.IsTrue(field != null, "field is null and it shouldn't be"); - Assert.IsTrue(field.IsLazy() == false, "Field is lazy and it should not be"); - Assert.IsTrue(field.StringValue().Equals(DocHelper.FIELD_UTF1_TEXT) == true, field.StringValue() + " is not equal to " + DocHelper.FIELD_UTF1_TEXT); - - field = doc.GetFieldable(DocHelper.TEXT_FIELD_UTF2_KEY); - Assert.IsTrue(field != null, "field is null and it shouldn't be"); - Assert.IsTrue(field.IsLazy() == true, "Field is lazy and it should not be"); - Assert.IsTrue(field.StringValue().Equals(DocHelper.FIELD_UTF2_TEXT) == true, field.StringValue() + " is not equal to " + DocHelper.FIELD_UTF2_TEXT); - - field = doc.GetFieldable(DocHelper.LAZY_FIELD_BINARY_KEY); - Assert.IsTrue(field != null, "field is null and it shouldn't be"); - byte[] bytes = field.BinaryValue(); - Assert.IsTrue(bytes != null, "bytes is null and it shouldn't be"); - Assert.IsTrue(DocHelper.LAZY_FIELD_BINARY_BYTES.Length == bytes.Length, ""); - for (int i = 0; i < bytes.Length; i++) - { - Assert.IsTrue(bytes[i] == DocHelper.LAZY_FIELD_BINARY_BYTES[i], "byte[" + i + "] is mismatched"); - } - } + public virtual void TestLazyFields() + { + Assert.IsTrue(dir != null); + Assert.IsTrue(fieldInfos != null); + FieldsReader reader = new FieldsReader(dir, segmentName, fieldInfos); + Assert.IsTrue(reader != null); + Assert.IsTrue(reader.Size() == 1); + System.Collections.Hashtable loadFieldNames = new System.Collections.Hashtable(); + loadFieldNames.Add(DocHelper.TEXT_FIELD_1_KEY, DocHelper.TEXT_FIELD_1_KEY); + loadFieldNames.Add(DocHelper.TEXT_FIELD_UTF1_KEY, DocHelper.TEXT_FIELD_UTF1_KEY); + System.Collections.Hashtable lazyFieldNames = new System.Collections.Hashtable(); + //new String[]{DocHelper.LARGE_LAZY_FIELD_KEY, DocHelper.LAZY_FIELD_KEY, DocHelper.LAZY_FIELD_BINARY_KEY}; + lazyFieldNames.Add(DocHelper.LARGE_LAZY_FIELD_KEY, DocHelper.LARGE_LAZY_FIELD_KEY); + lazyFieldNames.Add(DocHelper.LAZY_FIELD_KEY, DocHelper.LAZY_FIELD_KEY); + lazyFieldNames.Add(DocHelper.LAZY_FIELD_BINARY_KEY, DocHelper.LAZY_FIELD_BINARY_KEY); + lazyFieldNames.Add(DocHelper.TEXT_FIELD_UTF2_KEY, DocHelper.TEXT_FIELD_UTF2_KEY); + lazyFieldNames.Add(DocHelper.COMPRESSED_TEXT_FIELD_2_KEY, DocHelper.COMPRESSED_TEXT_FIELD_2_KEY); + SetBasedFieldSelector fieldSelector = new SetBasedFieldSelector(loadFieldNames, lazyFieldNames); + Lucene.Net.Documents.Document doc = reader.Doc(0, fieldSelector); + Assert.IsTrue(doc != null, "doc is null and it shouldn't be"); + Fieldable field = doc.GetFieldable(DocHelper.LAZY_FIELD_KEY); + Assert.IsTrue(field != null, "field is null and it shouldn't be"); + Assert.IsTrue(field.IsLazy(), "field is not lazy and it should be"); + System.String value_Renamed = field.StringValue(); + Assert.IsTrue(value_Renamed != null, "value is null and it shouldn't be"); + Assert.IsTrue(value_Renamed.Equals(DocHelper.LAZY_FIELD_TEXT) == true, value_Renamed + " is not equal to " + DocHelper.LAZY_FIELD_TEXT); + field = doc.GetFieldable(DocHelper.COMPRESSED_TEXT_FIELD_2_KEY); + Assert.IsTrue(field != null, "field is null and it shouldn't be"); + Assert.IsTrue(field.IsLazy(), "field is not lazy and it should be"); + value_Renamed = field.StringValue(); + Assert.IsTrue(value_Renamed != null, "value is null and it shouldn't be"); + Assert.IsTrue(value_Renamed.Equals(DocHelper.FIELD_2_COMPRESSED_TEXT) == true, value_Renamed + " is not equal to " + DocHelper.FIELD_2_COMPRESSED_TEXT); + field = doc.GetFieldable(DocHelper.TEXT_FIELD_1_KEY); + Assert.IsTrue(field != null, "field is null and it shouldn't be"); + Assert.IsTrue(field.IsLazy() == false, "Field is lazy and it should not be"); + field = doc.GetFieldable(DocHelper.TEXT_FIELD_UTF1_KEY); + Assert.IsTrue(field != null, "field is null and it shouldn't be"); + Assert.IsTrue(field.IsLazy() == false, "Field is lazy and it should not be"); + Assert.IsTrue(field.StringValue().Equals(DocHelper.FIELD_UTF1_TEXT) == true, field.StringValue() + " is not equal to " + DocHelper.FIELD_UTF1_TEXT); + + field = doc.GetFieldable(DocHelper.TEXT_FIELD_UTF2_KEY); + Assert.IsTrue(field != null, "field is null and it shouldn't be"); + Assert.IsTrue(field.IsLazy() == true, "Field is lazy and it should not be"); + Assert.IsTrue(field.StringValue().Equals(DocHelper.FIELD_UTF2_TEXT) == true, field.StringValue() + " is not equal to " + DocHelper.FIELD_UTF2_TEXT); + + field = doc.GetFieldable(DocHelper.LAZY_FIELD_BINARY_KEY); + Assert.IsTrue(field != null, "field is null and it shouldn't be"); + byte[] bytes = field.BinaryValue(); + Assert.IsTrue(bytes != null, "bytes is null and it shouldn't be"); + Assert.IsTrue(DocHelper.LAZY_FIELD_BINARY_BYTES.Length == bytes.Length, ""); + for (int i = 0; i < bytes.Length; i++) + { + Assert.IsTrue(bytes[i] == DocHelper.LAZY_FIELD_BINARY_BYTES[i], "byte[" + i + "] is mismatched"); + } + } - [Test] - public virtual void TestLoadFirst() - { - Assert.IsTrue(dir != null); - Assert.IsTrue(fieldInfos != null); - FieldsReader reader = new FieldsReader(dir, "test", fieldInfos); - Assert.IsTrue(reader != null); - Assert.IsTrue(reader.Size() == 1); - LoadFirstFieldSelector fieldSelector = new LoadFirstFieldSelector(); - Lucene.Net.Documents.Document doc = reader.Doc(0, fieldSelector); - Assert.IsTrue(doc != null, "doc is null and it shouldn't be"); - int count = 0; - System.Collections.IList l = doc.GetFields(); - for (System.Collections.IEnumerator iter = l.GetEnumerator(); iter.MoveNext(); ) - { - Field field = (Field) iter.Current; - Assert.IsTrue(field != null, "field is null and it shouldn't be"); - System.String sv = field.StringValue(); - Assert.IsTrue(sv != null, "sv is null and it shouldn't be"); - count++; - } - Assert.IsTrue(count == 1, count + " does not equal: " + 1); - } + [Test] + public virtual void TestLazyFieldsAfterClose() + { + Assert.IsTrue(dir != null); + Assert.IsTrue(fieldInfos != null); + FieldsReader reader = new FieldsReader(dir, segmentName, fieldInfos); + Assert.IsTrue(reader != null); + Assert.IsTrue(reader.Size() == 1); + System.Collections.Hashtable loadFieldNames = new System.Collections.Hashtable(); + loadFieldNames.Add(DocHelper.TEXT_FIELD_1_KEY, DocHelper.TEXT_FIELD_1_KEY); + loadFieldNames.Add(DocHelper.TEXT_FIELD_UTF1_KEY, DocHelper.TEXT_FIELD_UTF1_KEY); + System.Collections.Hashtable lazyFieldNames = new System.Collections.Hashtable(); + lazyFieldNames.Add(DocHelper.LARGE_LAZY_FIELD_KEY, DocHelper.LARGE_LAZY_FIELD_KEY); + lazyFieldNames.Add(DocHelper.LAZY_FIELD_KEY, DocHelper.LAZY_FIELD_KEY); + lazyFieldNames.Add(DocHelper.LAZY_FIELD_BINARY_KEY, DocHelper.LAZY_FIELD_BINARY_KEY); + lazyFieldNames.Add(DocHelper.TEXT_FIELD_UTF2_KEY, DocHelper.TEXT_FIELD_UTF2_KEY); + lazyFieldNames.Add(DocHelper.COMPRESSED_TEXT_FIELD_2_KEY, DocHelper.COMPRESSED_TEXT_FIELD_2_KEY); + SetBasedFieldSelector fieldSelector = new SetBasedFieldSelector(loadFieldNames, lazyFieldNames); + Document doc = reader.Doc(0, fieldSelector); + Assert.IsTrue(doc != null, "doc is null and it shouldn't be"); + Fieldable field = doc.GetFieldable(DocHelper.LAZY_FIELD_KEY); + Assert.IsTrue(field != null, "field is null and it shouldn't be"); + Assert.IsTrue(field.IsLazy(), "field is not lazy and it should be"); + reader.Close(); + try + { + System.String value_Renamed = field.StringValue(); + Assert.Fail("did not hit AlreadyClosedException as expected"); + } + catch (AlreadyClosedException) + { + // expected + } + } - /// Not really a test per se, but we should have some way of assessing whether this is worthwhile. - ///

- /// Must test using a File based directory - /// - ///

- /// Exception - [Test] - public virtual void TestLazyPerformance() - { - System.String tmpIODir = SupportClass.AppSettings.Get("tempDir", ""); - System.String userName = System.Environment.UserName; - System.String path = tmpIODir + System.IO.Path.DirectorySeparatorChar.ToString() + "lazyDir" + userName; - System.IO.FileInfo file = new System.IO.FileInfo(path); - _TestUtil.RmDir(file); - FSDirectory tmpDir = FSDirectory.GetDirectory(file); - Assert.IsTrue(tmpDir != null); - DocumentWriter writer = new DocumentWriter(tmpDir, new WhitespaceAnalyzer(), Similarity.GetDefault(), 50); - Assert.IsTrue(writer != null); - writer.AddDocument("test", testDoc); - Assert.IsTrue(fieldInfos != null); - FieldsReader reader; - long lazyTime = 0; - long regularTime = 0; - int length = 50; - System.Collections.Hashtable lazyFieldNames = new System.Collections.Hashtable(); - lazyFieldNames.Add(DocHelper.LARGE_LAZY_FIELD_KEY, DocHelper.LARGE_LAZY_FIELD_KEY); - SetBasedFieldSelector fieldSelector = new SetBasedFieldSelector(new System.Collections.Hashtable(), lazyFieldNames); - - for (int i = 0; i < length; i++) - { - reader = new FieldsReader(tmpDir, "test", fieldInfos); - Assert.IsTrue(reader != null); - Assert.IsTrue(reader.Size() == 1); + [Test] + public virtual void TestLoadFirst() + { + Assert.IsTrue(dir != null); + Assert.IsTrue(fieldInfos != null); + FieldsReader reader = new FieldsReader(dir, segmentName, fieldInfos); + Assert.IsTrue(reader != null); + Assert.IsTrue(reader.Size() == 1); + LoadFirstFieldSelector fieldSelector = new LoadFirstFieldSelector(); + Lucene.Net.Documents.Document doc = reader.Doc(0, fieldSelector); + Assert.IsTrue(doc != null, "doc is null and it shouldn't be"); + int count = 0; + System.Collections.IList l = doc.GetFields(); + for (System.Collections.IEnumerator iter = l.GetEnumerator(); iter.MoveNext(); ) + { + Field field = (Field) iter.Current; + Assert.IsTrue(field != null, "field is null and it shouldn't be"); + System.String sv = field.StringValue(); + Assert.IsTrue(sv != null, "sv is null and it shouldn't be"); + count++; + } + Assert.IsTrue(count == 1, count + " does not equal: " + 1); + } + + /// Not really a test per se, but we should have some way of assessing whether this is worthwhile. + ///

+ /// Must test using a File based directory + /// + ///

+ /// Exception + [Test] + public virtual void TestLazyPerformance() + { + System.String tmpIODir = SupportClass.AppSettings.Get("tempDir", ""); + System.String userName = System.Environment.UserName; + System.String path = tmpIODir + System.IO.Path.DirectorySeparatorChar.ToString() + "lazyDir" + userName; + System.IO.FileInfo file = new System.IO.FileInfo(path); + _TestUtil.RmDir(file); + FSDirectory tmpDir = FSDirectory.GetDirectory(file); + Assert.IsTrue(tmpDir != null); + + IndexWriter writer = new IndexWriter(tmpDir, new WhitespaceAnalyzer(), true); + writer.SetUseCompoundFile(false); + writer.AddDocument(testDoc); + writer.Close(); + segmentName = writer.NewestSegment().name; + + Assert.IsTrue(fieldInfos != null); + FieldsReader reader; + long lazyTime = 0; + long regularTime = 0; + int length = 50; + System.Collections.Hashtable lazyFieldNames = new System.Collections.Hashtable(); + lazyFieldNames.Add(DocHelper.LARGE_LAZY_FIELD_KEY, DocHelper.LARGE_LAZY_FIELD_KEY); + SetBasedFieldSelector fieldSelector = new SetBasedFieldSelector(new System.Collections.Hashtable(), lazyFieldNames); + + for (int i = 0; i < length; i++) + { + reader = new FieldsReader(tmpDir, segmentName, fieldInfos); + Assert.IsTrue(reader != null); + Assert.IsTrue(reader.Size() == 1); - Lucene.Net.Documents.Document doc; - doc = reader.Doc(0, null); //Load all of them - Assert.IsTrue(doc != null, "doc is null and it shouldn't be"); - Fieldable field = doc.GetFieldable(DocHelper.LARGE_LAZY_FIELD_KEY); - Assert.IsTrue(field.IsLazy() == false, "field is lazy"); - System.String value_Renamed; - long start; - long finish; - start = System.DateTime.Now.Millisecond; - //On my machine this was always 0ms. - value_Renamed = field.StringValue(); - finish = System.DateTime.Now.Millisecond; - Assert.IsTrue(value_Renamed != null, "value is null and it shouldn't be"); - Assert.IsTrue(field != null, "field is null and it shouldn't be"); - regularTime += (finish - start); - reader.Close(); - reader = null; - doc = null; - //Hmmm, are we still in cache??? - System.GC.Collect(); - reader = new FieldsReader(tmpDir, "test", fieldInfos); - doc = reader.Doc(0, fieldSelector); - field = doc.GetFieldable(DocHelper.LARGE_LAZY_FIELD_KEY); - Assert.IsTrue(field.IsLazy() == true, "field is not lazy"); - start = System.DateTime.Now.Millisecond; - //On my machine this took around 50 - 70ms - value_Renamed = field.StringValue(); - finish = System.DateTime.Now.Millisecond; - Assert.IsTrue(value_Renamed != null, "value is null and it shouldn't be"); - lazyTime += (finish - start); - reader.Close(); - } - System.Console.Out.WriteLine("Average Non-lazy time (should be very close to zero): " + regularTime / length + " ms for " + length + " reads"); - System.Console.Out.WriteLine("Average Lazy Time (should be greater than zero): " + lazyTime / length + " ms for " + length + " reads"); - } + Lucene.Net.Documents.Document doc; + doc = reader.Doc(0, null); //Load all of them + Assert.IsTrue(doc != null, "doc is null and it shouldn't be"); + Fieldable field = doc.GetFieldable(DocHelper.LARGE_LAZY_FIELD_KEY); + Assert.IsTrue(field.IsLazy() == false, "field is lazy"); + System.String value_Renamed; + long start; + long finish; + start = System.DateTime.Now.Millisecond; + //On my machine this was always 0ms. + value_Renamed = field.StringValue(); + finish = System.DateTime.Now.Millisecond; + Assert.IsTrue(value_Renamed != null, "value is null and it shouldn't be"); + Assert.IsTrue(field != null, "field is null and it shouldn't be"); + regularTime += (finish - start); + reader.Close(); + reader = null; + doc = null; + //Hmmm, are we still in cache??? + System.GC.Collect(); + reader = new FieldsReader(tmpDir, segmentName, fieldInfos); + doc = reader.Doc(0, fieldSelector); + field = doc.GetFieldable(DocHelper.LARGE_LAZY_FIELD_KEY); + Assert.IsTrue(field.IsLazy() == true, "field is not lazy"); + start = System.DateTime.Now.Millisecond; + //On my machine this took around 50 - 70ms + value_Renamed = field.StringValue(); + finish = System.DateTime.Now.Millisecond; + Assert.IsTrue(value_Renamed != null, "value is null and it shouldn't be"); + lazyTime += (finish - start); + reader.Close(); + } + System.Console.Out.WriteLine("Average Non-lazy time (should be very close to zero): " + regularTime / length + " ms for " + length + " reads"); + System.Console.Out.WriteLine("Average Lazy Time (should be greater than zero): " + lazyTime / length + " ms for " + length + " reads"); + } - [Test] - public virtual void TestLoadSize() - { - FieldsReader reader = new FieldsReader(dir, "test", fieldInfos); - Lucene.Net.Documents.Document doc; - - doc = reader.Doc(0, new AnonymousClassFieldSelector(this)); - Fieldable f1 = doc.GetFieldable(DocHelper.TEXT_FIELD_1_KEY); - Fieldable f3 = doc.GetFieldable(DocHelper.TEXT_FIELD_3_KEY); - Fieldable fb = doc.GetFieldable(DocHelper.LAZY_FIELD_BINARY_KEY); - Assert.IsTrue(f1.IsBinary()); - Assert.IsTrue(!f3.IsBinary()); - Assert.IsTrue(fb.IsBinary()); - AssertSizeEquals(2 * DocHelper.FIELD_1_TEXT.Length, f1.BinaryValue()); - Assert.AreEqual(DocHelper.FIELD_3_TEXT, f3.StringValue()); - AssertSizeEquals(DocHelper.LAZY_FIELD_BINARY_BYTES.Length, fb.BinaryValue()); + [Test] + public virtual void TestLoadSize() + { + FieldsReader reader = new FieldsReader(dir, segmentName, fieldInfos); + Lucene.Net.Documents.Document doc; - reader.Close(); - } + doc = reader.Doc(0, new AnonymousClassFieldSelector(this)); + Fieldable f1 = doc.GetFieldable(DocHelper.TEXT_FIELD_1_KEY); + Fieldable f3 = doc.GetFieldable(DocHelper.TEXT_FIELD_3_KEY); + Fieldable fb = doc.GetFieldable(DocHelper.LAZY_FIELD_BINARY_KEY); + Assert.IsTrue(f1.IsBinary()); + Assert.IsTrue(!f3.IsBinary()); + Assert.IsTrue(fb.IsBinary()); + AssertSizeEquals(2 * DocHelper.FIELD_1_TEXT.Length, f1.BinaryValue()); + Assert.AreEqual(DocHelper.FIELD_3_TEXT, f3.StringValue()); + AssertSizeEquals(DocHelper.LAZY_FIELD_BINARY_BYTES.Length, fb.BinaryValue()); + + reader.Close(); + } - private void AssertSizeEquals(int size, byte[] sizebytes) - { - Assert.AreEqual((byte) (size >> 24), sizebytes[0]); - Assert.AreEqual((byte) (size >> 16), sizebytes[1]); - Assert.AreEqual((byte) (size >> 8), sizebytes[2]); - Assert.AreEqual((byte) size, sizebytes[3]); - } - } + private void AssertSizeEquals(int size, byte[] sizebytes) + { + Assert.AreEqual((byte) (size >> 24), sizebytes[0]); + Assert.AreEqual((byte) (size >> 16), sizebytes[1]); + Assert.AreEqual((byte) (size >> 8), sizebytes[2]); + Assert.AreEqual((byte) size, sizebytes[3]); + } + } } \ No newline at end of file Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestFilterIndexReader.cs URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestFilterIndexReader.cs?rev=677059&r1=677058&r2=677059&view=diff ============================================================================== --- incubator/lucene.net/trunk/C#/src/Test/Index/TestFilterIndexReader.cs (original) +++ incubator/lucene.net/trunk/C#/src/Test/Index/TestFilterIndexReader.cs Tue Jul 15 14:44:04 2008 @@ -19,16 +19,19 @@ using NUnit.Framework; -using RAMDirectory = Lucene.Net.Store.RAMDirectory; -using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer; +//using TestRunner = junit.textui.TestRunner; using Document = Lucene.Net.Documents.Document; using Field = Lucene.Net.Documents.Field; +using MockRAMDirectory = Lucene.Net.Store.MockRAMDirectory; +using RAMDirectory = Lucene.Net.Store.RAMDirectory; +using LuceneTestCase = Lucene.Net.Util.LuceneTestCase; +using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer; namespace Lucene.Net.Index { [TestFixture] - public class TestFilterIndexReader + public class TestFilterIndexReader : LuceneTestCase { private class TestReader : FilterIndexReader @@ -56,7 +59,7 @@ /// Filter that only returns odd numbered documents. private class TestTermPositions : FilterTermPositions { - public TestTermPositions(TermPositions in_Renamed) : base(in_Renamed) + public TestTermPositions(TermPositions in_Renamed) : base(in_Renamed) { } @@ -100,9 +103,9 @@ /// Tests the IndexReader.getFieldNames implementation /// Exception on error [Test] - public virtual void TestFilterIndexReader_Renamed_Method() + public virtual void TestFilterIndexReader_Renamed_Method() { - RAMDirectory directory = new RAMDirectory(); + RAMDirectory directory = new MockRAMDirectory(); IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true); Lucene.Net.Documents.Document d1 = new Lucene.Net.Documents.Document(); @@ -121,6 +124,8 @@ IndexReader reader = new TestReader(IndexReader.Open(directory)); + Assert.IsTrue(reader.IsOptimized()); + TermEnum terms = reader.Terms(); while (terms.Next()) { @@ -135,6 +140,7 @@ } reader.Close(); + directory.Close(); } } } \ No newline at end of file Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexFileDeleter.cs URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestIndexFileDeleter.cs?rev=677059&r1=677058&r2=677059&view=diff ============================================================================== --- incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexFileDeleter.cs (original) +++ incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexFileDeleter.cs Tue Jul 15 14:44:04 2008 @@ -19,16 +19,17 @@ using NUnit.Framework; -using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer; -using IndexSearcher = Lucene.Net.Search.IndexSearcher; -using TermQuery = Lucene.Net.Search.TermQuery; -using Hits = Lucene.Net.Search.Hits; +using Document = Lucene.Net.Documents.Document; +using Field = Lucene.Net.Documents.Field; using Directory = Lucene.Net.Store.Directory; using IndexInput = Lucene.Net.Store.IndexInput; using IndexOutput = Lucene.Net.Store.IndexOutput; using RAMDirectory = Lucene.Net.Store.RAMDirectory; -using Document = Lucene.Net.Documents.Document; -using Field = Lucene.Net.Documents.Field; +using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer; +using Hits = Lucene.Net.Search.Hits; +using IndexSearcher = Lucene.Net.Search.IndexSearcher; +using TermQuery = Lucene.Net.Search.TermQuery; +using LuceneTestCase = Lucene.Net.Util.LuceneTestCase; namespace Lucene.Net.Index { @@ -37,16 +38,17 @@ Verify we can read the pre-2.1 file format, do searches against it, and add documents to it.*/ - [TestFixture] - public class TestIndexFileDeleter + [TestFixture] + public class TestIndexFileDeleter : LuceneTestCase { - [Test] + [Test] public virtual void TestDeleteLeftoverFiles() { Directory dir = new RAMDirectory(); IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true); + writer.SetMaxBufferedDocs(10); int i; for (i = 0; i < 35; i++) { @@ -91,7 +93,7 @@ for (i = 0; i < fieldInfos.Size(); i++) { FieldInfo fi = fieldInfos.FieldInfo(i); - if (fi.Name.Equals("content")) + if (fi.Name_ForNUnitTest.Equals("content")) { contentFieldIndex = i; break; @@ -196,6 +198,8 @@ out_Renamed.WriteBytes(b, len); remainder -= len; } + in_Renamed.Close(); + out_Renamed.Close(); } private void AddDoc(IndexWriter writer, int id) @@ -206,28 +210,28 @@ writer.AddDocument(doc); } - public static bool ArrayEquals(System.Array array1, System.Array array2) - { - bool result = false; - if ((array1 == null) && (array2 == null)) - result = true; - else if ((array1 != null) && (array2 != null)) - { - if (array1.Length == array2.Length) - { - int length = array1.Length; - result = true; - for (int index = 0; index < length; index++) - { - if (!(array1.GetValue(index).Equals(array2.GetValue(index)))) - { - result = false; - break; - } - } - } - } - return result; - } - } + public static bool ArrayEquals(System.Array array1, System.Array array2) + { + bool result = false; + if ((array1 == null) && (array2 == null)) + result = true; + else if ((array1 != null) && (array2 != null)) + { + if (array1.Length == array2.Length) + { + int length = array1.Length; + result = true; + for (int index = 0; index < length; index++) + { + if (!(array1.GetValue(index).Equals(array2.GetValue(index)))) + { + result = false; + break; + } + } + } + } + return result; + } + } } \ No newline at end of file Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexInput.cs URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestIndexInput.cs?rev=677059&r1=677058&r2=677059&view=diff ============================================================================== --- incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexInput.cs (original) +++ incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexInput.cs Tue Jul 15 14:44:04 2008 @@ -1,4 +1,4 @@ -/* +/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. @@ -20,14 +20,15 @@ using NUnit.Framework; using IndexInput = Lucene.Net.Store.IndexInput; +using LuceneTestCase = Lucene.Net.Util.LuceneTestCase; namespace Lucene.Net.Index { [TestFixture] - public class TestIndexInput + public class TestIndexInput : LuceneTestCase { [Test] - public virtual void TestRead() + public virtual void TestRead() { IndexInput is_Renamed = new MockIndexInput(new byte[]{(byte) (0x80), (byte) (0x01), (byte) (0xFF), (byte) (0x7F), (byte) (0x80), (byte) (0x80), (byte) (0x01), (byte) (0x81), (byte) (0x80), (byte) (0x01), (byte) (0x06), (byte) 'L', (byte) 'u', (byte) 'c', (byte) 'e', (byte) 'n', (byte) 'e'}); Assert.AreEqual(128, is_Renamed.ReadVInt()); @@ -37,40 +38,40 @@ Assert.AreEqual("Lucene", is_Renamed.ReadString()); } - /// Expert - /// - /// - /// IOException - [Test] - public virtual void TestSkipChars() - { - byte[] bytes = new byte[]{(byte) 0x80, (byte) 0x01, (byte) 0xFF, (byte) 0x7F, (byte) 0x80, (byte) 0x80, (byte) 0x01, (byte) 0x81, (byte) 0x80, (byte) 0x01, (byte) 0x06, (byte) 'L', (byte) 'u', (byte) 'c', (byte) 'e', (byte) 'n', (byte) 'e'}; - System.String utf8Str = "\u0634\u1ea1"; - byte[] utf8Bytes = System.Text.Encoding.GetEncoding("UTF-8").GetBytes(utf8Str); - byte[] theBytes = new byte[bytes.Length + 1 + utf8Bytes.Length]; - Array.Copy(bytes, 0, theBytes, 0, bytes.Length); - theBytes[bytes.Length] = (byte) utf8Str.Length; //Add in the number of chars we are storing, which should fit in a byte for this test - Array.Copy(utf8Bytes, 0, theBytes, bytes.Length + 1, utf8Bytes.Length); - IndexInput is_Renamed = new MockIndexInput(theBytes); - Assert.AreEqual(128, is_Renamed.ReadVInt()); - Assert.AreEqual(16383, is_Renamed.ReadVInt()); - Assert.AreEqual(16384, is_Renamed.ReadVInt()); - Assert.AreEqual(16385, is_Renamed.ReadVInt()); - int charsToRead = is_Renamed.ReadVInt(); //number of chars in the Lucene string - Assert.IsTrue(0x06 == charsToRead, 0x06 + " does not equal: " + charsToRead); - is_Renamed.SkipChars(3); - char[] chars = new char[3]; //there should be 6 chars remaining - is_Renamed.ReadChars(chars, 0, 3); - System.String tmpStr = new System.String(chars); - Assert.IsTrue(tmpStr.Equals("ene") == true, tmpStr + " is not equal to " + "ene"); - //Now read the UTF8 stuff - charsToRead = is_Renamed.ReadVInt() - 1; //since we are skipping one - is_Renamed.SkipChars(1); - Assert.IsTrue(utf8Str.Length - 1 == charsToRead, utf8Str.Length - 1 + " does not equal: " + charsToRead); - chars = new char[charsToRead]; - is_Renamed.ReadChars(chars, 0, charsToRead); - tmpStr = new System.String(chars); - Assert.IsTrue(tmpStr.Equals(utf8Str.Substring(1)) == true, tmpStr + " is not equal to " + utf8Str.Substring(1)); - } - } + /// Expert + /// + /// + /// IOException + [Test] + public virtual void TestSkipChars() + { + byte[] bytes = new byte[]{(byte) 0x80, (byte) 0x01, (byte) 0xFF, (byte) 0x7F, (byte) 0x80, (byte) 0x80, (byte) 0x01, (byte) 0x81, (byte) 0x80, (byte) 0x01, (byte) 0x06, (byte) 'L', (byte) 'u', (byte) 'c', (byte) 'e', (byte) 'n', (byte) 'e'}; + System.String utf8Str = "\u0634\u1ea1"; + byte[] utf8Bytes = System.Text.Encoding.GetEncoding("UTF-8").GetBytes(utf8Str); + byte[] theBytes = new byte[bytes.Length + 1 + utf8Bytes.Length]; + Array.Copy(bytes, 0, theBytes, 0, bytes.Length); + theBytes[bytes.Length] = (byte) utf8Str.Length; //Add in the number of chars we are storing, which should fit in a byte for this test + Array.Copy(utf8Bytes, 0, theBytes, bytes.Length + 1, utf8Bytes.Length); + IndexInput is_Renamed = new MockIndexInput(theBytes); + Assert.AreEqual(128, is_Renamed.ReadVInt()); + Assert.AreEqual(16383, is_Renamed.ReadVInt()); + Assert.AreEqual(16384, is_Renamed.ReadVInt()); + Assert.AreEqual(16385, is_Renamed.ReadVInt()); + int charsToRead = is_Renamed.ReadVInt(); //number of chars in the Lucene string + Assert.IsTrue(0x06 == charsToRead, 0x06 + " does not equal: " + charsToRead); + is_Renamed.SkipChars(3); + char[] chars = new char[3]; //there should be 6 chars remaining + is_Renamed.ReadChars(chars, 0, 3); + System.String tmpStr = new System.String(chars); + Assert.IsTrue(tmpStr.Equals("ene") == true, tmpStr + " is not equal to " + "ene"); + //Now read the UTF8 stuff + charsToRead = is_Renamed.ReadVInt() - 1; //since we are skipping one + is_Renamed.SkipChars(1); + Assert.IsTrue(utf8Str.Length - 1 == charsToRead, utf8Str.Length - 1 + " does not equal: " + charsToRead); + chars = new char[charsToRead]; + is_Renamed.ReadChars(chars, 0, charsToRead); + tmpStr = new System.String(chars); + Assert.IsTrue(tmpStr.Equals(utf8Str.Substring(1)) == true, tmpStr + " is not equal to " + utf8Str.Substring(1)); + } + } } \ No newline at end of file Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexModifier.cs URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestIndexModifier.cs?rev=677059&r1=677058&r2=677059&view=diff ============================================================================== --- incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexModifier.cs (original) +++ incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexModifier.cs Tue Jul 15 14:44:04 2008 @@ -19,9 +19,6 @@ using NUnit.Framework; -using Analyzer = Lucene.Net.Analysis.Analyzer; -using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer; -using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer; using Document = Lucene.Net.Documents.Document; using Field = Lucene.Net.Documents.Field; using Index = Lucene.Net.Documents.Field.Index; @@ -29,6 +26,10 @@ using Directory = Lucene.Net.Store.Directory; using FSDirectory = Lucene.Net.Store.FSDirectory; using RAMDirectory = Lucene.Net.Store.RAMDirectory; +using LuceneTestCase = Lucene.Net.Util.LuceneTestCase; +using Analyzer = Lucene.Net.Analysis.Analyzer; +using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer; +using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer; namespace Lucene.Net.Index { @@ -39,8 +40,10 @@ /// /// Daniel Naber /// + /// + /// [TestFixture] - public class TestIndexModifier + public class TestIndexModifier : LuceneTestCase { private int docCount = 0; @@ -48,7 +51,7 @@ private Term allDocTerm = new Term("all", "x"); [Test] - public virtual void TestIndex() + public virtual void TestIndex() { Directory ramDir = new RAMDirectory(); IndexModifier i = new IndexModifier(ramDir, new StandardAnalyzer(), true); @@ -76,7 +79,7 @@ // Lucene defaults: Assert.IsNull(i.GetInfoStream()); Assert.IsTrue(i.GetUseCompoundFile()); - Assert.AreEqual(10, i.GetMaxBufferedDocs()); + Assert.AreEqual(IndexWriter.DISABLE_AUTO_FLUSH, i.GetMaxBufferedDocs()); Assert.AreEqual(10000, i.GetMaxFieldLength()); Assert.AreEqual(10, i.GetMergeFactor()); // test setting properties: @@ -110,14 +113,14 @@ i.DocCount(); Assert.Fail(); } - catch (System.SystemException e) + catch (System.SystemException) { // expected exception } } [Test] - public virtual void TestExtendedIndex() + public virtual void TestExtendedIndex() { Directory ramDir = new RAMDirectory(); PowerIndex powerIndex = new PowerIndex(this, ramDir, new StandardAnalyzer(), true); @@ -134,21 +137,21 @@ private Lucene.Net.Documents.Document GetDoc() { Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document(); - doc.Add(new Field("body", ((System.Int32) docCount).ToString(), Field.Store.YES, Field.Index.UN_TOKENIZED)); + doc.Add(new Field("body", System.Convert.ToString(docCount), Field.Store.YES, Field.Index.UN_TOKENIZED)); doc.Add(new Field("all", "x", Field.Store.YES, Field.Index.UN_TOKENIZED)); docCount++; return doc; } [Test] - public virtual void TestIndexWithThreads() + public virtual void TestIndexWithThreads() { _TestIndexInternal(0); _TestIndexInternal(10); _TestIndexInternal(50); } - private void _TestIndexInternal(int maxWait) + private void _TestIndexInternal(int maxWait) { bool create = true; //Directory rd = new RAMDirectory(); @@ -187,7 +190,7 @@ index.Close(); Assert.Fail(); } - catch (System.SystemException e) + catch (System.SystemException) { // expected exception } @@ -311,10 +314,10 @@ System.String delId = null; try { - delId = idStack[idStack.Count - 1] as System.String; - idStack.RemoveAt(idStack.Count - 1); + delId = idStack[idStack.Count - 1] as System.String; + idStack.RemoveAt(idStack.Count - 1); } - catch (System.ArgumentOutOfRangeException e) + catch (System.ArgumentOutOfRangeException) { continue; } @@ -352,12 +355,12 @@ Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document(); lock (GetType()) { - doc.Add(new Field("id", ((System.Int32) id).ToString(), Field.Store.YES, Field.Index.UN_TOKENIZED)); + doc.Add(new Field("id", System.Convert.ToString(id), Field.Store.YES, Field.Index.UN_TOKENIZED)); id++; } // add random stuff: - doc.Add(new Field("content", ((System.Int32) random.Next(1000)).ToString(), Field.Store.YES, Field.Index.TOKENIZED)); - doc.Add(new Field("content", ((System.Int32) random.Next(1000)).ToString(), Field.Store.YES, Field.Index.TOKENIZED)); + doc.Add(new Field("content", System.Convert.ToString(random.Next(1000)), Field.Store.YES, Field.Index.TOKENIZED)); + doc.Add(new Field("content", System.Convert.ToString(random.Next(1000)), Field.Store.YES, Field.Index.TOKENIZED)); doc.Add(new Field("all", "x", Field.Store.YES, Field.Index.TOKENIZED)); return doc; }