lucene-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From mikemcc...@apache.org
Subject svn commit: r1640053 [2/10] - in /lucene/dev/branches/lucene6005/lucene: core/src/java/org/apache/lucene/document/ core/src/java/org/apache/lucene/index/ core/src/test/org/apache/lucene/ core/src/test/org/apache/lucene/codecs/compressing/ core/src/test...
Date Mon, 17 Nov 2014 00:43:47 GMT
Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/Test2BSortedDocValues.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/Test2BSortedDocValues.java?rev=1640053&r1=1640052&r2=1640053&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/Test2BSortedDocValues.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/Test2BSortedDocValues.java Mon Nov 17 00:43:44 2014
@@ -18,17 +18,17 @@ package org.apache.lucene.index;
  */
 
 import org.apache.lucene.analysis.MockAnalyzer;
+import org.apache.lucene.document.Document2;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.SortedDocValuesField;
 import org.apache.lucene.store.BaseDirectoryWrapper;
 import org.apache.lucene.store.MockDirectoryWrapper;
 import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.LuceneTestCase.Monster;
+import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.TestUtil;
 import org.apache.lucene.util.TimeUnits;
-import org.apache.lucene.util.LuceneTestCase.Monster;
-import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
-
 import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite;
 
 @SuppressCodecs({"SimpleText", "Memory", "Direct"})
@@ -54,11 +54,10 @@ public class Test2BSortedDocValues exten
         .setOpenMode(IndexWriterConfig.OpenMode.CREATE)
         .setCodec(TestUtil.getDefaultCodec()));
 
-    Document doc = new Document();
+    Document2 doc = w.newDocument();
     byte bytes[] = new byte[2];
     BytesRef data = new BytesRef(bytes);
-    SortedDocValuesField dvField = new SortedDocValuesField("dv", data);
-    doc.add(dvField);
+    doc.addBinary("dv", data);
     
     for (int i = 0; i < IndexWriter.MAX_DOCS; i++) {
       bytes[0] = (byte)(i >> 8);
@@ -110,11 +109,10 @@ public class Test2BSortedDocValues exten
         .setOpenMode(IndexWriterConfig.OpenMode.CREATE)
         .setCodec(TestUtil.getDefaultCodec()));
 
-    Document doc = new Document();
+    Document2 doc = w.newDocument();
     byte bytes[] = new byte[4];
     BytesRef data = new BytesRef(bytes);
-    SortedDocValuesField dvField = new SortedDocValuesField("dv", data);
-    doc.add(dvField);
+    doc.addBinary("dv", data);
     
     for (int i = 0; i < IndexWriter.MAX_DOCS; i++) {
       bytes[0] = (byte)(i >> 24);

Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/Test4GBStoredFields.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/Test4GBStoredFields.java?rev=1640053&r1=1640052&r2=1640053&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/Test4GBStoredFields.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/Test4GBStoredFields.java Mon Nov 17 00:43:44 2014
@@ -57,18 +57,14 @@ public class Test4GBStoredFields extends
      ((LogByteSizeMergePolicy) mp).setMaxMergeMB(1024*1024*1024);
     }
 
-    final Document doc = new Document();
-    final FieldType ft = new FieldType();
-    ft.setStored(true);
-    ft.freeze();
+    final Document2 doc = w.newDocument();
     final int valueLength = RandomInts.randomIntBetween(random(), 1 << 13, 1 << 20);
     final byte[] value = new byte[valueLength];
     for (int i = 0; i < valueLength; ++i) {
       // random so that even compressing codecs can't compress it
       value[i] = (byte) random().nextInt(256);
     }
-    final Field f = new Field("fld", value, ft);
-    doc.add(f);
+    doc.addStored("fld", value);
 
     final int numDocs = (int) ((1L << 32) / valueLength + 100);
     for (int i = 0; i < numDocs; ++i) {

Added: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestAbuseSchema.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestAbuseSchema.java?rev=1640053&view=auto
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestAbuseSchema.java (added)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestAbuseSchema.java Mon Nov 17 00:43:44 2014
@@ -0,0 +1,523 @@
+package org.apache.lucene.index;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.MockAnalyzer;
+import org.apache.lucene.analysis.TokenStream;
+import org.apache.lucene.document.Document2;
+import org.apache.lucene.document.LowSchemaField;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.ScoreDoc;
+import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.util.TestUtil;
+
+/** Holds test cases that make schema changes only "allowed" by the low schema. */
+
+public class TestAbuseSchema extends LuceneTestCase {
+
+  // LUCENE-1010
+  public void testNoTermVectorAfterTermVectorMerge() throws IOException {
+    Directory dir = newDirectory();
+    IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig());
+    List<LowSchemaField> document = new ArrayList<>();
+    LowSchemaField field = new LowSchemaField("tvtest", "a b c", IndexOptions.DOCS, false);
+    field.enableTermVectors(false, false, false);
+    document.add(field);
+    iw.addDocument(document);
+    iw.commit();
+
+    document = new ArrayList<>();
+    document.add(new LowSchemaField("tvtest", "a b c", IndexOptions.DOCS, false));
+    iw.addDocument(document);
+    // Make first segment
+    iw.commit();
+
+    iw.forceMerge(1);
+
+    document = new ArrayList<>();
+    document.add(field);
+    iw.addDocument(document);
+    // Make 2nd segment
+    iw.commit();
+    iw.forceMerge(1);
+
+    iw.close();
+    dir.close();
+  }
+
+  /** 
+   * In a single doc, for the same field, mix the term vectors up 
+   */
+  public void testInconsistentTermVectorOptions() throws IOException {
+
+    LowSchemaField f1, f2;
+
+    // no vectors + vectors
+    f1 = new LowSchemaField("field", "value1", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, true);
+    f2 = new LowSchemaField("field", "value2", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, true);
+    f2.enableTermVectors(false, false, false);
+    doTestMixup(f1, f2);
+    
+    // vectors + vectors with pos
+    f1 = new LowSchemaField("field", "value1", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, true);
+    f1.enableTermVectors(false, false, false);
+    f2 = new LowSchemaField("field", "value2", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, true);
+    f2.enableTermVectors(true, false, false);
+    doTestMixup(f1, f2);
+    
+    // vectors + vectors with off
+    f1 = new LowSchemaField("field", "value1", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, true);
+    f1.enableTermVectors(false, false, false);
+    f2 = new LowSchemaField("field", "value2", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, true);
+    f2.enableTermVectors(false, true, false);
+    doTestMixup(f1, f2);
+    
+    // vectors with pos + vectors with pos + off
+    f1 = new LowSchemaField("field", "value1", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, true);
+    f1.enableTermVectors(true, false, false);
+    f2 = new LowSchemaField("field", "value2", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, true);
+    f2.enableTermVectors(true, true, false);
+    doTestMixup(f1, f2);
+
+    // vectors with pos + vectors with pos + pay
+    f1 = new LowSchemaField("field", "value1", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, true);
+    f1.enableTermVectors(true, false, false);
+    f2 = new LowSchemaField("field", "value2", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, true);
+    f2.enableTermVectors(true, false, true);
+    doTestMixup(f1, f2);
+  }
+  
+  private void doTestMixup(LowSchemaField f1, LowSchemaField f2) throws IOException {
+    Directory dir = newDirectory();
+    RandomIndexWriter iw = new RandomIndexWriter(random(), dir);
+    
+    // add 3 good docs
+    for (int i = 0; i < 3; i++) {
+      Document2 doc = iw.newDocument();
+      doc.addAtom("id", Integer.toString(i));
+      iw.addDocument(doc);
+    }
+
+    // add broken doc
+    List<LowSchemaField> doc = new ArrayList<>();
+    doc.add(f1);
+    doc.add(f2);
+    
+    // ensure broken doc hits exception
+    try {
+      iw.addDocument(doc);
+      fail("didn't hit expected exception");
+    } catch (IllegalArgumentException iae) {
+      assertNotNull(iae.getMessage());
+      assertTrue(iae.getMessage().startsWith("all instances of a given field name must have the same term vectors settings"));
+    }
+    
+    // ensure good docs are still ok
+    IndexReader ir = iw.getReader();
+    assertEquals(3, ir.numDocs());
+    
+    ir.close();
+    iw.close();
+    dir.close();
+  }
+
+  // LUCENE-5611: don't abort segment when term vector settings are wrong
+  public void testNoAbortOnBadTVSettings() throws Exception {
+    Directory dir = newDirectory();
+    // Don't use RandomIndexWriter because we want to be sure both docs go to 1 seg:
+    IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
+    IndexWriter iw = new IndexWriter(dir, iwc);
+
+    List<LowSchemaField> doc = new ArrayList<>();
+    iw.addDocument(doc);
+    LowSchemaField field = new LowSchemaField("field", "value", IndexOptions.NONE, false);
+    field.enableTermVectors(false, false, false);
+    doc.add(field);
+    try {
+      iw.addDocument(doc);
+      fail("should have hit exc");
+    } catch (IllegalArgumentException iae) {
+      // expected
+    }
+    IndexReader r = DirectoryReader.open(iw, true);
+
+    // Make sure the exc didn't lose our first document:
+    assertEquals(1, r.numDocs());
+    iw.close();
+    r.close();
+    dir.close();
+  }
+
+  public void testPostingsOffsetsWithUnindexedFields() throws Exception {
+    Directory dir = newDirectory();
+    RandomIndexWriter riw = newRandomIndexWriter(dir);
+    for (int i = 0; i < 100; i++) {
+      // ensure at least one doc is indexed with offsets
+      LowSchemaField field;
+      if (i < 99 && random().nextInt(2) == 0) {
+        // stored only
+        field = new LowSchemaField("foo", "boo!", IndexOptions.NONE, false);
+      } else {
+        field = new LowSchemaField("foo", "boo!", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS, true);
+        if (random().nextBoolean()) {
+          // store some term vectors for the checkindex cross-check
+          field.enableTermVectors(random().nextBoolean(), random().nextBoolean(), false);
+        }
+      }
+      riw.addDocument(Collections.singletonList(field));
+    }
+    CompositeReader ir = riw.getReader();
+    LeafReader slow = SlowCompositeReaderWrapper.wrap(ir);
+    FieldInfos fis = slow.getFieldInfos();
+    assertEquals(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS, fis.fieldInfo("foo").getIndexOptions());
+    slow.close();
+    ir.close();
+    riw.close();
+    dir.close();
+  }
+  
+  /**
+   * Tests various combinations of omitNorms=true/false, the field not existing at all,
+   * ensuring that only omitNorms is 'viral'.
+   * Internally checks that MultiNorms.norms() is consistent (returns the same bytes)
+   * as the fully merged equivalent.
+   */
+  public void testOmitNormsCombos() throws IOException {
+    // indexed with norms
+    LowSchemaField norms = new LowSchemaField("foo", "a", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, true);
+
+    // indexed without norms
+    LowSchemaField noNorms = new LowSchemaField("foo", "a", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, true);
+    noNorms.disableNorms();
+
+    // not indexed, but stored
+    LowSchemaField noIndex = new LowSchemaField("foo", "a", IndexOptions.NONE, false);
+
+    // not indexed but stored, omitNorms is set
+    LowSchemaField noNormsNoIndex = new LowSchemaField("foo", "a", IndexOptions.NONE, false);
+    noNormsNoIndex.disableNorms();
+
+    // not indexed nor stored (doesnt exist at all, we index a different field instead)
+    LowSchemaField emptyNorms = new LowSchemaField("bar", "a", IndexOptions.NONE, false);
+    
+    assertNotNull(getNorms("foo", norms, norms));
+    assertNull(getNorms("foo", norms, noNorms));
+    assertNotNull(getNorms("foo", norms, noIndex));
+    assertNotNull(getNorms("foo", norms, noNormsNoIndex));
+    assertNotNull(getNorms("foo", norms, emptyNorms));
+    assertNull(getNorms("foo", noNorms, noNorms));
+    assertNull(getNorms("foo", noNorms, noIndex));
+    assertNull(getNorms("foo", noNorms, noNormsNoIndex));
+    assertNull(getNorms("foo", noNorms, emptyNorms));
+    assertNull(getNorms("foo", noIndex, noIndex));
+    assertNull(getNorms("foo", noIndex, noNormsNoIndex));
+    assertNull(getNorms("foo", noIndex, emptyNorms));
+    assertNull(getNorms("foo", noNormsNoIndex, noNormsNoIndex));
+    assertNull(getNorms("foo", noNormsNoIndex, emptyNorms));
+    assertNull(getNorms("foo", emptyNorms, emptyNorms));
+  }
+
+  /**
+   * Indexes at least 1 document with f1, and at least 1 document with f2.
+   * returns the norms for "field".
+   */
+  NumericDocValues getNorms(String field, LowSchemaField f1, LowSchemaField f2) throws IOException {
+    Directory dir = newDirectory();
+    IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()))
+                              .setMergePolicy(newLogMergePolicy());
+    RandomIndexWriter riw = new RandomIndexWriter(random(), dir, iwc);
+    
+    // add f1
+    riw.addDocument(Collections.singletonList(f1));
+    
+    // add f2
+    riw.addDocument(Collections.singletonList(f2));
+    
+    // add a mix of f1's and f2's
+    int numExtraDocs = TestUtil.nextInt(random(), 1, 1000);
+    for (int i = 0; i < numExtraDocs; i++) {
+      riw.addDocument(Collections.singletonList(random().nextBoolean() ? f1 : f2));
+    }
+
+    IndexReader ir1 = riw.getReader();
+    // todo: generalize
+    NumericDocValues norms1 = MultiDocValues.getNormValues(ir1, field);
+    
+    // fully merge and validate MultiNorms against single segment.
+    riw.forceMerge(1);
+    DirectoryReader ir2 = riw.getReader();
+    NumericDocValues norms2 = getOnlySegmentReader(ir2).getNormValues(field);
+
+    if (norms1 == null) {
+      assertNull(norms2);
+    } else {
+      for(int docID=0;docID<ir1.maxDoc();docID++) {
+        assertEquals(norms1.get(docID), norms2.get(docID));
+      }
+    }
+    ir1.close();
+    ir2.close();
+    riw.close();
+    dir.close();
+    return norms1;
+  }
+
+  public void testSameFieldNameForPostingAndDocValue() throws Exception {
+    // LUCENE-5192: FieldInfos.Builder neglected to update
+    // globalFieldNumbers.docValuesType map if the field existed, resulting in
+    // potentially adding the same field with different DV types.
+    Directory dir = newDirectory();
+    IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
+    IndexWriter writer = new IndexWriter(dir, conf);
+    List<LowSchemaField> doc = new ArrayList<>();
+
+    LowSchemaField field = new LowSchemaField("f", "mock-value", IndexOptions.DOCS, false);
+    field.disableNorms();
+    field.doNotStore();
+    doc.add(field);
+
+    field = new LowSchemaField("f", 5, IndexOptions.NONE, false);
+    field.setDocValuesType(DocValuesType.NUMERIC);
+    doc.add(field);
+    writer.addDocument(doc);
+    writer.commit();
+    
+    doc = new ArrayList<>();
+    field = new LowSchemaField("f", new BytesRef("mock"), IndexOptions.NONE, false);
+    field.setDocValuesType(DocValuesType.BINARY);
+    doc.add(field);
+
+    try {
+      writer.addDocument(doc);
+      fail("should not have succeeded to add a field with different DV type than what already exists");
+    } catch (IllegalArgumentException e) {
+      writer.rollback();
+    }
+    
+    dir.close();
+  }
+
+  // LUCENE-6049
+  public void testExcIndexingDocBeforeDocValues() throws Exception {
+    Directory dir = newDirectory();
+    IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
+    IndexWriter w = new IndexWriter(dir, iwc);
+    List<LowSchemaField> doc = new ArrayList<>();
+    LowSchemaField field = new LowSchemaField("test", "value", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, true);
+    field.setDocValuesType(DocValuesType.SORTED);
+    field.doNotStore();
+    field.setTokenStream(new TokenStream() {
+        @Override
+        public boolean incrementToken() {
+          throw new RuntimeException("no");
+        }
+      });
+    doc.add(field);
+    try {
+      w.addDocument(doc);
+      fail("did not hit exception");
+    } catch (RuntimeException re) {
+      // expected
+    }
+    w.addDocument(w.newDocument());
+    w.close();
+    dir.close();
+  }
+
+
+  public void testSameFieldNumbersAcrossSegments() throws Exception {
+    for (int i = 0; i < 2; i++) {
+      Directory dir = newDirectory();
+      IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
+                                                   .setMergePolicy(NoMergePolicy.INSTANCE));
+
+      List<LowSchemaField> d1 = new ArrayList<>();
+      d1.add(new LowSchemaField("f1", "first field", IndexOptions.DOCS, false));
+      d1.add(new LowSchemaField("f2", "second field", IndexOptions.DOCS, false));
+      writer.addDocument(d1);
+
+      if (i == 1) {
+        writer.close();
+        writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
+                                         .setMergePolicy(NoMergePolicy.INSTANCE));
+      } else {
+        writer.commit();
+      }
+
+      List<LowSchemaField> d2 = new ArrayList<>();
+      d2.add(new LowSchemaField("f2", "second field", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, true));
+      LowSchemaField field = new LowSchemaField("f1", "first field", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, true);
+      field.enableTermVectors(false, false, false);
+      d2.add(field);
+      d2.add(new LowSchemaField("f3", "third field", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, true));
+      d2.add(new LowSchemaField("f4", "fourth field", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, true));
+      writer.addDocument(d2);
+
+      writer.close();
+
+      SegmentInfos sis = SegmentInfos.readLatestCommit(dir);
+      assertEquals(2, sis.size());
+
+      FieldInfos fis1 = IndexWriter.readFieldInfos(sis.info(0));
+      FieldInfos fis2 = IndexWriter.readFieldInfos(sis.info(1));
+
+      assertEquals("f1", fis1.fieldInfo(0).name);
+      assertEquals("f2", fis1.fieldInfo(1).name);
+      assertEquals("f1", fis2.fieldInfo(0).name);
+      assertEquals("f2", fis2.fieldInfo(1).name);
+      assertEquals("f3", fis2.fieldInfo(2).name);
+      assertEquals("f4", fis2.fieldInfo(3).name);
+
+      writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
+      writer.forceMerge(1);
+      writer.close();
+
+      sis = SegmentInfos.readLatestCommit(dir);
+      assertEquals(1, sis.size());
+
+      FieldInfos fis3 = IndexWriter.readFieldInfos(sis.info(0));
+
+      assertEquals("f1", fis3.fieldInfo(0).name);
+      assertEquals("f2", fis3.fieldInfo(1).name);
+      assertEquals("f3", fis3.fieldInfo(2).name);
+      assertEquals("f4", fis3.fieldInfo(3).name);
+
+
+      dir.close();
+    }
+  }
+
+  public void testEnablingNorms() throws IOException {
+    Directory dir = newDirectory();
+    IndexWriter writer  = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
+                                          .setMaxBufferedDocs(10));
+    // Enable norms for only 1 doc, pre flush
+    for(int j=0;j<10;j++) {
+      List<LowSchemaField> doc = new ArrayList<>();
+      LowSchemaField f;
+      if (j != 8) {
+        f = new LowSchemaField("field", "aaa", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, true);
+        f.disableNorms();
+      } else {
+        f = new LowSchemaField("field", "aaa", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, true);
+        f.doNotStore();
+      }
+      doc.add(f);
+      writer.addDocument(doc);
+    }
+    writer.close();
+
+    Term searchTerm = new Term("field", "aaa");
+
+    IndexReader reader = DirectoryReader.open(dir);
+    IndexSearcher searcher = newSearcher(reader);
+    ScoreDoc[] hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
+    assertEquals(10, hits.length);
+    reader.close();
+
+    writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
+                             .setOpenMode(IndexWriterConfig.OpenMode.CREATE).setMaxBufferedDocs(10));
+    // Enable norms for only 1 doc, post flush
+    for(int j=0;j<27;j++) {
+      List<LowSchemaField> doc = new ArrayList<>();
+      LowSchemaField f;
+      if (j != 26) {
+        f = new LowSchemaField("field", "aaa", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, true);
+        f.disableNorms();
+      } else {
+        f = new LowSchemaField("field", "aaa", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, true);
+        f.doNotStore();
+      }
+      doc.add(f);
+      writer.addDocument(doc);
+    }
+    writer.close();
+    reader = DirectoryReader.open(dir);
+    searcher = newSearcher(reader);
+    hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
+    assertEquals(27, hits.length);
+    reader.close();
+
+    reader = DirectoryReader.open(dir);
+    reader.close();
+
+    dir.close();
+  }
+
+  public void testVariableSchema() throws Exception {
+    Directory dir = newDirectory();
+    for(int i=0;i<20;i++) {
+      if (VERBOSE) {
+        System.out.println("TEST: iter=" + i);
+      }
+      IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
+                                                  .setMaxBufferedDocs(2)
+                                                  .setMergePolicy(newLogMergePolicy()));
+      //LogMergePolicy lmp = (LogMergePolicy) writer.getConfig().getMergePolicy();
+      //lmp.setMergeFactor(2);
+      //lmp.setNoCFSRatio(0.0);
+      List<LowSchemaField> doc = new ArrayList<>();
+      String contents = "aa bb cc dd ee ff gg hh ii jj kk";
+
+      if (i == 7) {
+        // Add empty docs here
+        LowSchemaField field = new LowSchemaField("content3", "", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, true);
+        field.doNotStore();
+        doc.add(field);
+      } else {
+        if (i%2 == 0) {
+          doc.add(new LowSchemaField("content4", contents, IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, true));
+          doc.add(new LowSchemaField("content5", "", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, true));
+        } else {
+          LowSchemaField field = new LowSchemaField("content5", "", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, true);
+          field.doNotStore();
+          doc.add(field);
+        }
+        LowSchemaField field = new LowSchemaField("content1", contents, IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, true);
+        field.doNotStore();
+        doc.add(field);
+        doc.add(new LowSchemaField("content3", "", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, true));
+      }
+
+      for(int j=0;j<4;j++) {
+        writer.addDocument(doc);
+      }
+
+      writer.close();
+
+      if (0 == i % 4) {
+        writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
+        //LogMergePolicy lmp2 = (LogMergePolicy) writer.getConfig().getMergePolicy();
+        //lmp2.setNoCFSRatio(0.0);
+        writer.forceMerge(1);
+        writer.close();
+      }
+    }
+    dir.close();
+  }
+
+}

Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestAddIndexes.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestAddIndexes.java?rev=1640053&r1=1640052&r2=1640053&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestAddIndexes.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestAddIndexes.java Mon Nov 17 00:43:44 2014
@@ -971,8 +971,8 @@ public class TestAddIndexes extends Luce
       dirs[i] = newDirectory();
       IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
       IndexWriter writer = new IndexWriter(dirs[i], conf);
-      Document doc = new Document();
-      doc.add(new StringField("id", "myid", Field.Store.NO));
+      Document2 doc = writer.newDocument();
+      doc.addAtom("id", "myid");
       writer.addDocument(doc);
       writer.close();
     }
@@ -1001,9 +1001,9 @@ public class TestAddIndexes extends Luce
   // just like addDocs but with ID, starting from docStart
   private void addDocsWithID(IndexWriter writer, int numDocs, int docStart) throws IOException {
     for (int i = 0; i < numDocs; i++) {
-      Document doc = new Document();
-      doc.add(newTextField("content", "aaa", Field.Store.NO));
-      doc.add(newTextField("id", "" + (docStart + i), Field.Store.YES));
+      Document2 doc = writer.newDocument();
+      doc.addLargeText("content", "aaa");
+      doc.addLargeText("id", "" + (docStart + i));
       writer.addDocument(doc);
     }
   }
@@ -1093,10 +1093,10 @@ public class TestAddIndexes extends Luce
     for (int i = 0; i < dirs.length; i++) {
       dirs[i] = new RAMDirectory();
       IndexWriter w = new IndexWriter(dirs[i], new IndexWriterConfig(new MockAnalyzer(random())));
-      Document d = new Document();
-      FieldType customType = new FieldType(TextField.TYPE_STORED);
-      customType.setStoreTermVectors(true);
-      d.add(new Field("c", "v", customType));
+      Document2 d = w.newDocument();
+      FieldTypes fieldTypes = w.getFieldTypes();
+      fieldTypes.enableTermVectors("c");
+      d.addLargeText("c", "v");
       w.addDocument(d);
       w.close();
     }
@@ -1139,10 +1139,8 @@ public class TestAddIndexes extends Luce
       IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
       conf.setCodec(new UnRegisteredCodec());
       IndexWriter w = new IndexWriter(toAdd, conf);
-      Document doc = new Document();
-      FieldType customType = new FieldType();
-      customType.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); 
-      doc.add(newField("foo", "bar", customType));
+      Document2 doc = w.newDocument();
+      doc.addLargeText("foo", "bar");
       w.addDocument(doc);
       w.close();
     }
@@ -1178,18 +1176,18 @@ public class TestAddIndexes extends Luce
   public void testFieldNamesChanged() throws IOException {
     Directory d1 = newDirectory();
     RandomIndexWriter w = new RandomIndexWriter(random(), d1);
-    Document doc = new Document();
-    doc.add(newStringField("f1", "doc1 field1", Field.Store.YES));
-    doc.add(newStringField("id", "1", Field.Store.YES));
+    Document2 doc = w.newDocument();
+    doc.addAtom("f1", "doc1 field1");
+    doc.addAtom("id", "1");
     w.addDocument(doc);
     IndexReader r1 = w.getReader();
     w.close();
 
     Directory d2 = newDirectory();
     w = new RandomIndexWriter(random(), d2);
-    doc = new Document();
-    doc.add(newStringField("f2", "doc2 field2", Field.Store.YES));
-    doc.add(newStringField("id", "2", Field.Store.YES));
+    doc = w.newDocument();
+    doc.addAtom("f2", "doc2 field2");
+    doc.addAtom("id", "2");
     w.addDocument(doc);
     IndexReader r2 = w.getReader();
     w.close();
@@ -1238,7 +1236,7 @@ public class TestAddIndexes extends Luce
   public void testFakeAllDeleted() throws Exception {
     Directory src = newDirectory(), dest = newDirectory();
     RandomIndexWriter w = new RandomIndexWriter(random(), src);
-    w.addDocument(new Document());
+    w.addDocument(w.newDocument());
     IndexReader allDeletedReader = new AllDeletedFilterReader(w.getReader().leaves().get(0).reader());
     w.close();
     
@@ -1260,7 +1258,7 @@ public class TestAddIndexes extends Luce
   public void testLocksBlock() throws Exception {
     Directory src = newDirectory();
     RandomIndexWriter w1 = new RandomIndexWriter(random(), src);
-    w1.addDocument(new Document());
+    w1.addDocument(w1.newDocument());
     w1.commit();
 
     Directory dest = newDirectory();

Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestAllFilesHaveChecksumFooter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestAllFilesHaveChecksumFooter.java?rev=1640053&r1=1640052&r2=1640053&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestAllFilesHaveChecksumFooter.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestAllFilesHaveChecksumFooter.java Mon Nov 17 00:43:44 2014
@@ -21,6 +21,7 @@ import java.io.IOException;
 
 import org.apache.lucene.analysis.MockAnalyzer;
 import org.apache.lucene.codecs.CodecUtil;
+import org.apache.lucene.document.Document2;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.Field;
 import org.apache.lucene.document.NumericDocValuesField;
@@ -38,17 +39,12 @@ public class TestAllFilesHaveChecksumFoo
     IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
     conf.setCodec(TestUtil.getDefaultCodec());
     RandomIndexWriter riw = new RandomIndexWriter(random(), dir, conf);
-    Document doc = new Document();
-    // these fields should sometimes get term vectors, etc
-    Field idField = newStringField("id", "", Field.Store.NO);
-    Field bodyField = newTextField("body", "", Field.Store.NO);
-    Field dvField = new NumericDocValuesField("dv", 5);
-    doc.add(idField);
-    doc.add(bodyField);
-    doc.add(dvField);
     for (int i = 0; i < 100; i++) {
-      idField.setStringValue(Integer.toString(i));
-      bodyField.setStringValue(TestUtil.randomUnicodeString(random()));
+      Document2 doc = riw.newDocument();
+      doc.addUniqueInt("id", i);
+      // these fields should sometimes get term vectors, etc
+      doc.addLargeText("body", TestUtil.randomUnicodeString(random()));
+      doc.addInt("dv", 5);
       riw.addDocument(doc);
       if (random().nextInt(7) == 0) {
         riw.commit();

Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestAllFilesHaveCodecHeader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestAllFilesHaveCodecHeader.java?rev=1640053&r1=1640052&r2=1640053&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestAllFilesHaveCodecHeader.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestAllFilesHaveCodecHeader.java Mon Nov 17 00:43:44 2014
@@ -23,9 +23,11 @@ import java.util.Map;
 
 import org.apache.lucene.analysis.MockAnalyzer;
 import org.apache.lucene.codecs.CodecUtil;
+import org.apache.lucene.document.Document2;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.Field;
 import org.apache.lucene.document.FieldType;
+import org.apache.lucene.document.FieldTypes;
 import org.apache.lucene.document.NumericDocValuesField;
 import org.apache.lucene.document.TextField;
 import org.apache.lucene.store.Directory;
@@ -43,23 +45,16 @@ public class TestAllFilesHaveCodecHeader
     IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
     conf.setCodec(TestUtil.getDefaultCodec());
     RandomIndexWriter riw = new RandomIndexWriter(random(), dir, conf);
-    Document doc = new Document();
-    Field idField = newStringField("id", "", Field.Store.YES);
-    Field bodyField = newTextField("body", "", Field.Store.YES);
-    FieldType vectorsType = new FieldType(TextField.TYPE_STORED);
-    vectorsType.setStoreTermVectors(true);
-    vectorsType.setStoreTermVectorPositions(true);
-    Field vectorsField = new Field("vectors", "", vectorsType);
-    Field dvField = new NumericDocValuesField("dv", 5);
-    doc.add(idField);
-    doc.add(bodyField);
-    doc.add(vectorsField);
-    doc.add(dvField);
+    FieldTypes fieldTypes = riw.getFieldTypes();
+    fieldTypes.enableTermVectors("vectors");
+    fieldTypes.enableTermVectorPositions("vectors");
+
     for (int i = 0; i < 100; i++) {
-      idField.setStringValue(Integer.toString(i));
-      bodyField.setStringValue(TestUtil.randomUnicodeString(random()));
-      dvField.setLongValue(random().nextInt(5));
-      vectorsField.setStringValue(TestUtil.randomUnicodeString(random()));
+      Document2 doc = riw.newDocument();
+      doc.addInt("id", i);
+      doc.addLargeText("body", TestUtil.randomUnicodeString(random()));
+      doc.addLargeText("vectors", TestUtil.randomUnicodeString(random()));
+      doc.addInt("dv", random().nextInt(5));
       riw.addDocument(doc);
       if (random().nextInt(7) == 0) {
         riw.commit();

Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestAtomicUpdate.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestAtomicUpdate.java?rev=1640053&r1=1640052&r2=1640053&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestAtomicUpdate.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestAtomicUpdate.java Mon Nov 17 00:43:44 2014
@@ -75,12 +75,14 @@ public class TestAtomicUpdate extends Lu
 
     @Override
     public void doWork() throws Exception {
+      FieldTypes fieldTypes = writer.getFieldTypes();
+
       // Update all 100 docs...
       for(int i=0; i<100; i++) {
-        Document d = new Document();
-        d.add(new StringField("id", Integer.toString(i), Field.Store.YES));
-        d.add(new TextField("contents", English.intToEnglish(i+10*count), Field.Store.NO));
-        writer.updateDocument(new Term("id", Integer.toString(i)), d);
+        Document2 d = writer.newDocument();
+        d.addUniqueInt("id", i);
+        d.addLargeText("contents", English.intToEnglish(i+10*count));
+        writer.updateDocument(fieldTypes.newIntTerm("id", i), d);
       }
     }
   }
@@ -116,9 +118,9 @@ public class TestAtomicUpdate extends Lu
 
     // Establish a base index of 100 docs:
     for(int i=0;i<100;i++) {
-      Document d = new Document();
-      d.add(newStringField("id", Integer.toString(i), Field.Store.YES));
-      d.add(newTextField("contents", English.intToEnglish(i), Field.Store.NO));
+      Document2 d = writer.newDocument();
+      d.addUniqueInt("id", i);
+      d.addLargeText("contents", English.intToEnglish(i));
       if ((i-1)%7 == 0) {
         writer.commit();
       }

Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestBagOfPostings.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestBagOfPostings.java?rev=1640053&r1=1640052&r2=1640053&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestBagOfPostings.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestBagOfPostings.java Mon Nov 17 00:43:44 2014
@@ -26,6 +26,7 @@ import java.util.concurrent.ConcurrentLi
 import java.util.concurrent.CountDownLatch;
 
 import org.apache.lucene.analysis.MockAnalyzer;
+import org.apache.lucene.document.Document2;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.Field;
 import org.apache.lucene.store.Directory;
@@ -86,9 +87,6 @@ public class TestBagOfPostings extends L
           @Override
           public void run() {
             try {
-              Document document = new Document();
-              Field field = newTextField("field", "", Field.Store.NO);
-              document.add(field);
               startingGun.await();
               while (!postings.isEmpty()) {
                 StringBuilder text = new StringBuilder();
@@ -107,7 +105,8 @@ public class TestBagOfPostings extends L
                   text.append(token);
                   visited.add(token);
                 }
-                field.setStringValue(text.toString());
+                Document2 document = iw.newDocument();
+                document.addLargeText("field", text.toString());
                 iw.addDocument(document);
               }
             } catch (Exception e) {

Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestBinaryDocValuesUpdates.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestBinaryDocValuesUpdates.java?rev=1640053&r1=1640052&r2=1640053&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestBinaryDocValuesUpdates.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestBinaryDocValuesUpdates.java Mon Nov 17 00:43:44 2014
@@ -13,8 +13,10 @@ import org.apache.lucene.codecs.DocValue
 import org.apache.lucene.codecs.asserting.AssertingCodec;
 import org.apache.lucene.codecs.asserting.AssertingDocValuesFormat;
 import org.apache.lucene.document.BinaryDocValuesField;
+import org.apache.lucene.document.Document2;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.Field.Store;
+import org.apache.lucene.document.FieldTypes;
 import org.apache.lucene.document.NumericDocValuesField;
 import org.apache.lucene.document.SortedDocValuesField;
 import org.apache.lucene.document.SortedSetDocValuesField;
@@ -27,8 +29,8 @@ import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.IOUtils;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.TestUtil;
+import org.junit.Ignore;
 import org.junit.Test;
-
 import com.carrotsearch.randomizedtesting.generators.RandomPicks;
 
 /*
@@ -75,10 +77,10 @@ public class TestBinaryDocValuesUpdates 
     return bytes;
   }
   
-  private Document doc(int id) {
-    Document doc = new Document();
-    doc.add(new StringField("id", "doc-" + id, Store.NO));
-    doc.add(new BinaryDocValuesField("val", toBytes(id + 1)));
+  private Document2 doc(IndexWriter w, int id) {
+    Document2 doc = w.newDocument();
+    doc.addAtom("id", "doc-" + id);
+    doc.addBinary("val", toBytes(id + 1));
     return doc;
   }
   
@@ -86,9 +88,11 @@ public class TestBinaryDocValuesUpdates 
     Directory dir = newDirectory();
     IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false))
                                                 .setRAMBufferSizeMB(0.00000001));
-    writer.addDocument(doc(0)); // val=1
-    writer.addDocument(doc(1)); // val=2
-    writer.addDocument(doc(3)); // val=2
+    FieldTypes fieldTypes = writer.getFieldTypes();
+    fieldTypes.disableSorting("val");
+    writer.addDocument(doc(writer, 0)); // val=1
+    writer.addDocument(doc(writer, 1)); // val=2
+    writer.addDocument(doc(writer, 3)); // val=2
     writer.commit();
     assertEquals(2, writer.getFlushDeletesCount());
     writer.updateBinaryDocValue(new Term("id", "doc-0"), "val", toBytes(5));
@@ -111,8 +115,10 @@ public class TestBinaryDocValuesUpdates 
     conf.setMaxBufferedDocs(10);
     conf.setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH);
     IndexWriter writer = new IndexWriter(dir, conf);
-    writer.addDocument(doc(0)); // val=1
-    writer.addDocument(doc(1)); // val=2
+    FieldTypes fieldTypes = writer.getFieldTypes();
+    fieldTypes.disableSorting("val");
+    writer.addDocument(doc(writer, 0)); // val=1
+    writer.addDocument(doc(writer, 1)); // val=2
     if (random().nextBoolean()) { // randomly commit before the update is sent
       writer.commit();
     }
@@ -143,10 +149,12 @@ public class TestBinaryDocValuesUpdates 
     conf.setMaxBufferedDocs(2); // generate few segments
     conf.setMergePolicy(NoMergePolicy.INSTANCE); // prevent merges for this test
     IndexWriter writer = new IndexWriter(dir, conf);
+    FieldTypes fieldTypes = writer.getFieldTypes();
+    fieldTypes.disableSorting("val");
     int numDocs = 10;
     long[] expectedValues = new long[numDocs];
     for (int i = 0; i < numDocs; i++) {
-      writer.addDocument(doc(i));
+      writer.addDocument(doc(writer, i));
       expectedValues[i] = i + 1;
     }
     writer.commit();
@@ -188,8 +196,11 @@ public class TestBinaryDocValuesUpdates 
     Directory dir = newDirectory();
     IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
     IndexWriter writer = new IndexWriter(dir, conf);
-    writer.addDocument(doc(0));
-    writer.addDocument(doc(1));
+    FieldTypes fieldTypes = writer.getFieldTypes();
+    fieldTypes.disableSorting("val");
+
+    writer.addDocument(doc(writer, 0));
+    writer.addDocument(doc(writer, 1));
     
     final boolean isNRT = random().nextBoolean();
     final DirectoryReader reader1;
@@ -228,9 +239,11 @@ public class TestBinaryDocValuesUpdates 
     conf.setMaxBufferedDocs(10); // control segment flushing
     conf.setMergePolicy(NoMergePolicy.INSTANCE); // prevent merges for this test
     IndexWriter writer = new IndexWriter(dir, conf);
-    
+    FieldTypes fieldTypes = writer.getFieldTypes();
+    fieldTypes.disableSorting("val");
+
     for (int i = 0; i < 6; i++) {
-      writer.addDocument(doc(i));
+      writer.addDocument(doc(writer, i));
       if (i % 2 == 1) {
         writer.commit(); // create 2-docs segments
       }
@@ -276,9 +289,11 @@ public class TestBinaryDocValuesUpdates 
     IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
     conf.setMaxBufferedDocs(10); // control segment flushing
     IndexWriter writer = new IndexWriter(dir, conf);
+    FieldTypes fieldTypes = writer.getFieldTypes();
+    fieldTypes.disableSorting("val");
     
-    writer.addDocument(doc(0));
-    writer.addDocument(doc(1));
+    writer.addDocument(doc(writer, 0));
+    writer.addDocument(doc(writer, 1));
     
     if (random().nextBoolean()) {
       writer.commit();
@@ -310,9 +325,11 @@ public class TestBinaryDocValuesUpdates 
     IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
     conf.setMaxBufferedDocs(10); // control segment flushing
     IndexWriter writer = new IndexWriter(dir, conf);
+    FieldTypes fieldTypes = writer.getFieldTypes();
+    fieldTypes.disableSorting("val");
     
-    writer.addDocument(doc(0));
-    writer.addDocument(doc(1));
+    writer.addDocument(doc(writer, 0));
+    writer.addDocument(doc(writer, 1));
     
     if (random().nextBoolean()) {
       writer.commit();
@@ -343,15 +360,18 @@ public class TestBinaryDocValuesUpdates 
     IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
     conf.setMaxBufferedDocs(10); // prevent merges
     IndexWriter writer = new IndexWriter(dir, conf);
-    
+    FieldTypes fieldTypes = writer.getFieldTypes();
+    fieldTypes.setMultiValued("ssdv");
+    fieldTypes.disableSorting("bdv");
+
     for (int i = 0; i < 4; i++) {
-      Document doc = new Document();
-      doc.add(new StringField("dvUpdateKey", "dv", Store.NO));
-      doc.add(new NumericDocValuesField("ndv", i));
-      doc.add(new BinaryDocValuesField("bdv", new BytesRef(Integer.toString(i))));
-      doc.add(new SortedDocValuesField("sdv", new BytesRef(Integer.toString(i))));
-      doc.add(new SortedSetDocValuesField("ssdv", new BytesRef(Integer.toString(i))));
-      doc.add(new SortedSetDocValuesField("ssdv", new BytesRef(Integer.toString(i * 2))));
+      Document2 doc = writer.newDocument();
+      doc.addAtom("dvUpdateKey", "dv");
+      doc.addInt("ndv", i);
+      doc.addBinary("bdv", new BytesRef(Integer.toString(i)));
+      doc.addShortText("sdv", Integer.toString(i));
+      doc.addShortText("ssdv", Integer.toString(i));
+      doc.addShortText("ssdv", Integer.toString(i * 2));
       writer.addDocument(doc);
     }
     writer.commit();
@@ -392,12 +412,15 @@ public class TestBinaryDocValuesUpdates 
     IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
     conf.setMaxBufferedDocs(10); // prevent merges
     IndexWriter writer = new IndexWriter(dir, conf);
+    FieldTypes fieldTypes = writer.getFieldTypes();
+    fieldTypes.disableSorting("bdv1");
+    fieldTypes.disableSorting("bdv2");
     
     for (int i = 0; i < 2; i++) {
-      Document doc = new Document();
-      doc.add(new StringField("dvUpdateKey", "dv", Store.NO));
-      doc.add(new BinaryDocValuesField("bdv1", toBytes(i)));
-      doc.add(new BinaryDocValuesField("bdv2", toBytes(i)));
+      Document2 doc = writer.newDocument();
+      doc.addAtom("dvUpdateKey", "dv");
+      doc.addBinary("bdv1", toBytes(i));
+      doc.addBinary("bdv2", toBytes(i));
       writer.addDocument(doc);
     }
     writer.commit();
@@ -424,12 +447,14 @@ public class TestBinaryDocValuesUpdates 
     Directory dir = newDirectory();
     IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
     IndexWriter writer = new IndexWriter(dir, conf);
-    
+    FieldTypes fieldTypes = writer.getFieldTypes();
+    fieldTypes.disableSorting("bdv");
+
     for (int i = 0; i < 2; i++) {
-      Document doc = new Document();
-      doc.add(new StringField("dvUpdateKey", "dv", Store.NO));
+      Document2 doc = writer.newDocument();
+      doc.addAtom("dvUpdateKey", "dv");
       if (i == 0) { // index only one document with value
-        doc.add(new BinaryDocValuesField("bdv", toBytes(5L)));
+        doc.addBinary("bdv", toBytes(5L));
       }
       writer.addDocument(doc);
     }
@@ -457,9 +482,9 @@ public class TestBinaryDocValuesUpdates 
     IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
     IndexWriter writer = new IndexWriter(dir, conf);
     
-    Document doc = new Document();
-    doc.add(new StringField("key", "doc", Store.NO));
-    doc.add(new StringField("foo", "bar", Store.NO));
+    Document2 doc = writer.newDocument();
+    doc.addAtom("key", "doc");
+    doc.addAtom("foo", "bar");
     writer.addDocument(doc); // flushed document
     writer.commit();
     writer.addDocument(doc); // in-memory document
@@ -494,11 +519,13 @@ public class TestBinaryDocValuesUpdates 
       }
     });
     IndexWriter writer = new IndexWriter(dir, conf);
+    FieldTypes fieldTypes = writer.getFieldTypes();
+    fieldTypes.disableSorting("bdv");
     
-    Document doc = new Document();
-    doc.add(new StringField("key", "doc", Store.NO));
-    doc.add(new BinaryDocValuesField("bdv", toBytes(5L)));
-    doc.add(new SortedDocValuesField("sorted", new BytesRef("value")));
+    Document2 doc = writer.newDocument();
+    doc.addAtom("key", "doc");
+    doc.addBinary("bdv", toBytes(5L));
+    doc.addShortText("sorted", "value");
     writer.addDocument(doc); // flushed document
     writer.commit();
     writer.addDocument(doc); // in-memory document
@@ -525,10 +552,12 @@ public class TestBinaryDocValuesUpdates 
     Directory dir = newDirectory();
     IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
     IndexWriter writer = new IndexWriter(dir, conf);
+    FieldTypes fieldTypes = writer.getFieldTypes();
+    fieldTypes.disableSorting("bdv");
     
-    Document doc = new Document();
-    doc.add(new StringField("key", "doc", Store.NO));
-    doc.add(new BinaryDocValuesField("bdv", toBytes(5L)));
+    Document2 doc = writer.newDocument();
+    doc.addAtom("key", "doc");
+    doc.addBinary("bdv", toBytes(5L));
     writer.addDocument(doc); // flushed document
     writer.commit();
     writer.addDocument(doc); // in-memory document
@@ -552,17 +581,18 @@ public class TestBinaryDocValuesUpdates 
     Random random = random();
     IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random));
     IndexWriter writer = new IndexWriter(dir, conf);
+    FieldTypes fieldTypes = writer.getFieldTypes();
+    fieldTypes.disableSorting("bdv");
     
     int docid = 0;
     int numRounds = atLeast(10);
     for (int rnd = 0; rnd < numRounds; rnd++) {
-      Document doc = new Document();
-      doc.add(new StringField("key", "doc", Store.NO));
-      doc.add(new BinaryDocValuesField("bdv", toBytes(-1)));
       int numDocs = atLeast(30);
       for (int i = 0; i < numDocs; i++) {
-        doc.removeField("id");
-        doc.add(new StringField("id", Integer.toString(docid++), Store.NO));
+        Document2 doc = writer.newDocument();
+        doc.addAtom("key", "doc");
+        doc.addBinary("bdv", toBytes(-1));
+        doc.addUniqueInt("id", docid++);
         writer.addDocument(doc);
       }
       
@@ -570,7 +600,7 @@ public class TestBinaryDocValuesUpdates 
       writer.updateBinaryDocValue(new Term("key", "doc"), "bdv", toBytes(value));
       
       if (random.nextDouble() < 0.2) { // randomly delete some docs
-        writer.deleteDocuments(new Term("id", Integer.toString(random.nextInt(docid))));
+        writer.deleteDocuments(fieldTypes.newIntTerm("id", random.nextInt(docid)));
       }
       
       // randomly commit or reopen-IW (or nothing), before forceMerge
@@ -588,10 +618,10 @@ public class TestBinaryDocValuesUpdates 
       // forceMerge is called, the index will be with one segment and deletes
       // and some MPs might now merge it, thereby invalidating test's
       // assumption that the reader has no deletes).
-      doc = new Document();
-      doc.add(new StringField("id", Integer.toString(docid++), Store.NO));
-      doc.add(new StringField("key", "doc", Store.NO));
-      doc.add(new BinaryDocValuesField("bdv", toBytes(value)));
+      Document2 doc = writer.newDocument();
+      doc.addUniqueInt("id", docid++);
+      doc.addAtom("key", "doc");
+      doc.addBinary("bdv", toBytes(value));
       writer.addDocument(doc);
 
       writer.forceMerge(1, true);
@@ -623,11 +653,13 @@ public class TestBinaryDocValuesUpdates 
     Directory dir = newDirectory();
     IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
     IndexWriter writer = new IndexWriter(dir, conf);
+    FieldTypes fieldTypes = writer.getFieldTypes();
+    fieldTypes.disableSorting("bdv");
     
-    Document doc = new Document();
-    doc.add(new StringField("k1", "v1", Store.NO));
-    doc.add(new StringField("k2", "v2", Store.NO));
-    doc.add(new BinaryDocValuesField("bdv", toBytes(5L)));
+    Document2 doc = writer.newDocument();
+    doc.addAtom("k1", "v1");
+    doc.addAtom("k2", "v2");
+    doc.addBinary("bdv", toBytes(5L));
     writer.addDocument(doc); // flushed document
     writer.commit();
     writer.addDocument(doc); // in-memory document
@@ -654,6 +686,7 @@ public class TestBinaryDocValuesUpdates 
     lmp.setMergeFactor(3); // merge often
     conf.setMergePolicy(lmp);
     IndexWriter writer = new IndexWriter(dir, conf);
+    FieldTypes fieldTypes = writer.getFieldTypes();
     
     final boolean isNRT = random.nextBoolean();
     DirectoryReader reader;
@@ -668,6 +701,7 @@ public class TestBinaryDocValuesUpdates 
     final long[] fieldValues = new long[numFields];
     for (int i = 0; i < fieldValues.length; i++) {
       fieldValues[i] = 1;
+      fieldTypes.disableSorting("f" + i);
     }
     
     int numRounds = atLeast(15);
@@ -676,12 +710,12 @@ public class TestBinaryDocValuesUpdates 
       int numDocs = atLeast(5);
 //      System.out.println("[" + Thread.currentThread().getName() + "]: round=" + i + ", numDocs=" + numDocs);
       for (int j = 0; j < numDocs; j++) {
-        Document doc = new Document();
-        doc.add(new StringField("id", "doc-" + docID, Store.NO));
-        doc.add(new StringField("key", "all", Store.NO)); // update key
+        Document2 doc = writer.newDocument();
+        doc.addAtom("id", "doc-" + docID);
+        doc.addAtom("key", "all"); // update key
         // add all fields with their current value
         for (int f = 0; f < fieldValues.length; f++) {
-          doc.add(new BinaryDocValuesField("f" + f, toBytes(fieldValues[f])));
+          doc.addBinary("f" + f, toBytes(fieldValues[f]));
         }
         writer.addDocument(doc);
         ++docID;
@@ -735,7 +769,9 @@ public class TestBinaryDocValuesUpdates 
     writer.close();
     IOUtils.close(reader, dir);
   }
-  
+
+  // nocommit fixme LUCENE-6062
+  @Ignore
   public void testUpdateSegmentWithNoDocValues() throws Exception {
     Directory dir = newDirectory();
     IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
@@ -744,23 +780,25 @@ public class TestBinaryDocValuesUpdates 
     // legit.
     conf.setMergePolicy(NoMergePolicy.INSTANCE);
     IndexWriter writer = new IndexWriter(dir, conf);
-    
+    FieldTypes fieldTypes = writer.getFieldTypes();
+    fieldTypes.disableSorting("bdv");
+
     // first segment with BDV
-    Document doc = new Document();
-    doc.add(new StringField("id", "doc0", Store.NO));
-    doc.add(new BinaryDocValuesField("bdv", toBytes(3L)));
+    Document2 doc = writer.newDocument();
+    doc.addAtom("id", "doc0");
+    doc.addBinary("bdv", toBytes(3L));
     writer.addDocument(doc);
-    doc = new Document();
-    doc.add(new StringField("id", "doc4", Store.NO)); // document without 'bdv' field
+    doc = writer.newDocument();
+    doc.addAtom("id", "doc4"); // document without 'bdv' field
     writer.addDocument(doc);
     writer.commit();
     
     // second segment with no BDV
-    doc = new Document();
-    doc.add(new StringField("id", "doc1", Store.NO));
+    doc = writer.newDocument();
+    doc.addAtom("id", "doc1");
     writer.addDocument(doc);
-    doc = new Document();
-    doc.add(new StringField("id", "doc2", Store.NO)); // document that isn't updated
+    doc = writer.newDocument();
+    doc.addAtom("id", "doc2"); // document that isn't updated
     writer.addDocument(doc);
     writer.commit();
     
@@ -789,7 +827,9 @@ public class TestBinaryDocValuesUpdates 
 
     dir.close();
   }
-  
+
+  // nocommit fixme LUCENE-6062
+  @Ignore 
   public void testUpdateSegmentWithPostingButNoDocValues() throws Exception {
     Directory dir = newDirectory();
     IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
@@ -798,19 +838,21 @@ public class TestBinaryDocValuesUpdates 
     // legit.
     conf.setMergePolicy(NoMergePolicy.INSTANCE);
     IndexWriter writer = new IndexWriter(dir, conf);
-    
+    FieldTypes fieldTypes = writer.getFieldTypes();
+    fieldTypes.disableSorting("bdv");
+
     // first segment with BDV
-    Document doc = new Document();
-    doc.add(new StringField("id", "doc0", Store.NO));
-    doc.add(new StringField("bdv", "mock-value", Store.NO));
-    doc.add(new BinaryDocValuesField("bdv", toBytes(5L)));
+    Document2 doc = writer.newDocument();
+    doc.addAtom("id", "doc0");
+    doc.addAtom("bdvmock", "mock-value");
+    doc.addBinary("bdv", toBytes(5L));
     writer.addDocument(doc);
     writer.commit();
     
     // second segment with no BDV
-    doc = new Document();
-    doc.add(new StringField("id", "doc1", Store.NO));
-    doc.add(new StringField("bdv", "mock-value", Store.NO));
+    doc = writer.newDocument();
+    doc.addAtom("id", "doc1");
+    doc.addAtom("bdvmock", "mock-value");
     writer.addDocument(doc);
     writer.commit();
     
@@ -837,13 +879,16 @@ public class TestBinaryDocValuesUpdates 
     Directory dir = newDirectory();
     IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
     IndexWriter writer = new IndexWriter(dir, conf);
+    FieldTypes fieldTypes = writer.getFieldTypes();
+    fieldTypes.disableSorting("f");
     
-    Document doc = new Document();
-    doc.add(new StringField("f", "mock-value", Store.NO));
-    doc.add(new BinaryDocValuesField("f", toBytes(5L)));
+    // nocommit use low schema API here:
+    Document2 doc = writer.newDocument();
+    doc.addAtom("fmock", "mock-value");
+    doc.addBinary("f", toBytes(5L));
     writer.addDocument(doc);
     writer.commit();
-    writer.updateBinaryDocValue(new Term("f", "mock-value"), "f", toBytes(17L));
+    writer.updateBinaryDocValue(new Term("fmock", "mock-value"), "f", toBytes(17L));
     writer.close();
     
     DirectoryReader r = DirectoryReader.open(dir);
@@ -859,23 +904,33 @@ public class TestBinaryDocValuesUpdates 
     IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
     final IndexWriter writer = new IndexWriter(dir, conf);
     
+    FieldTypes fieldTypes = writer.getFieldTypes();
+    fieldTypes.disableSorting("bdv");
+    fieldTypes.disableSorting("control");
+
     // create index
     final int numFields = TestUtil.nextInt(random(), 1, 4);
+
+    for(int i=0;i<numFields;i++) {
+      fieldTypes.disableSorting("f" + i);
+      fieldTypes.disableSorting("cf" + i);
+    }
+
     final int numDocs = atLeast(2000);
     for (int i = 0; i < numDocs; i++) {
-      Document doc = new Document();
-      doc.add(new StringField("id", "doc" + i, Store.NO));
+      Document2 doc = writer.newDocument();
+      doc.addUniqueAtom("id", "doc" + i);
       double group = random().nextDouble();
       String g;
       if (group < 0.1) g = "g0";
       else if (group < 0.5) g = "g1";
       else if (group < 0.8) g = "g2";
       else g = "g3";
-      doc.add(new StringField("updKey", g, Store.NO));
+      doc.addAtom("updKey", g);
       for (int j = 0; j < numFields; j++) {
         long value = random().nextInt();
-        doc.add(new BinaryDocValuesField("f" + j, toBytes(value)));
-        doc.add(new BinaryDocValuesField("cf" + j, toBytes(value * 2))); // control, always updated to f * 2
+        doc.addBinary("f" + j, toBytes(value));
+        doc.addBinary("cf" + j, toBytes(value * 2)); // control, always updated to f * 2
       }
       writer.addDocument(doc);
     }
@@ -907,7 +962,11 @@ public class TestBinaryDocValuesUpdates 
               final String cf = "cf" + field;
 //              System.out.println("[" + Thread.currentThread().getName() + "] numUpdates=" + numUpdates + " updateTerm=" + t + " field=" + field);
               long updValue = random.nextInt();
-              writer.updateDocValues(t, new BinaryDocValuesField(f, toBytes(updValue)), new BinaryDocValuesField(cf, toBytes(updValue*2)));
+              Document2 update = writer.newDocument();
+              update.disableExistsField();
+              update.addBinary(f, toBytes(updValue));
+              update.addBinary(cf, toBytes(updValue*2));
+              writer.updateDocValues(t, update);
               
               if (random.nextDouble() < 0.2) {
                 // delete a random document
@@ -988,13 +1047,16 @@ public class TestBinaryDocValuesUpdates 
     IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
     conf.setMaxBufferedDocs(4);
     IndexWriter writer = new IndexWriter(dir, conf);
+    FieldTypes fieldTypes = writer.getFieldTypes();
+    fieldTypes.disableSorting("f");
+    fieldTypes.disableSorting("cf");
     final int numDocs = atLeast(10);
     for (int i = 0; i < numDocs; i++) {
-      Document doc = new Document();
-      doc.add(new StringField("id", "doc" + i, Store.NO));
+      Document2 doc = writer.newDocument();
+      doc.addUniqueAtom("id", "doc" + i);
       long value = random().nextInt();
-      doc.add(new BinaryDocValuesField("f", toBytes(value)));
-      doc.add(new BinaryDocValuesField("cf", toBytes(value * 2)));
+      doc.addBinary("f", toBytes(value));
+      doc.addBinary("cf", toBytes(value * 2));
       writer.addDocument(doc);
     }
     
@@ -1003,7 +1065,11 @@ public class TestBinaryDocValuesUpdates 
       int doc = random().nextInt(numDocs);
       Term t = new Term("id", "doc" + doc);
       long value = random().nextLong();
-      writer.updateDocValues(t, new BinaryDocValuesField("f", toBytes(value)), new BinaryDocValuesField("cf", toBytes(value*2)));
+      Document2 update = writer.newDocument();
+      update.disableExistsField();
+      update.addBinary("f", toBytes(value));
+      update.addBinary("cf", toBytes(value*2));
+      writer.updateDocValues(t, update);
       DirectoryReader reader = DirectoryReader.open(writer, true);
       for (LeafReaderContext context : reader.leaves()) {
         LeafReader r = context.reader();
@@ -1030,10 +1096,13 @@ public class TestBinaryDocValuesUpdates 
       }
     });
     IndexWriter writer = new IndexWriter(dir, conf);
-    Document doc = new Document();
-    doc.add(new StringField("id", "d0", Store.NO));
-    doc.add(new BinaryDocValuesField("f1", toBytes(5L)));
-    doc.add(new BinaryDocValuesField("f2", toBytes(13L)));
+    FieldTypes fieldTypes = writer.getFieldTypes();
+    fieldTypes.disableSorting("f1");
+    fieldTypes.disableSorting("f2");
+    Document2 doc = writer.newDocument();
+    doc.addAtom("id", "d0");
+    doc.addBinary("f1", toBytes(5L));
+    doc.addBinary("f2", toBytes(13L));
     writer.addDocument(doc);
     writer.close();
     
@@ -1047,10 +1116,10 @@ public class TestBinaryDocValuesUpdates 
       }
     });
     writer = new IndexWriter(dir, conf);
-    doc = new Document();
-    doc.add(new StringField("id", "d1", Store.NO));
-    doc.add(new BinaryDocValuesField("f1", toBytes(17L)));
-    doc.add(new BinaryDocValuesField("f2", toBytes(2L)));
+    doc = writer.newDocument();
+    doc.addAtom("id", "d1");
+    doc.addBinary("f1", toBytes(17L));
+    doc.addBinary("f2", toBytes(2L));
     writer.addDocument(doc);
     writer.updateBinaryDocValue(new Term("id", "d0"), "f1", toBytes(12L));
     writer.close();
@@ -1071,6 +1140,9 @@ public class TestBinaryDocValuesUpdates 
     Directory dir1 = newDirectory();
     IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
     IndexWriter writer = new IndexWriter(dir1, conf);
+    FieldTypes fieldTypes = writer.getFieldTypes();
+    fieldTypes.disableSorting("bdv");
+    fieldTypes.disableSorting("control");
     
     final int numDocs = atLeast(50);
     final int numTerms = TestUtil.nextInt(random(), 1, numDocs / 5);
@@ -1081,10 +1153,10 @@ public class TestBinaryDocValuesUpdates 
 
     // create first index
     for (int i = 0; i < numDocs; i++) {
-      Document doc = new Document();
-      doc.add(new StringField("id", RandomPicks.randomFrom(random(), randomTerms), Store.NO));
-      doc.add(new BinaryDocValuesField("bdv", toBytes(4L)));
-      doc.add(new BinaryDocValuesField("control", toBytes(8L)));
+      Document2 doc = writer.newDocument();
+      doc.addAtom("id", RandomPicks.randomFrom(random(), randomTerms));
+      doc.addBinary("bdv", toBytes(4L));
+      doc.addBinary("control", toBytes(8L));
       writer.addDocument(doc);
     }
     
@@ -1095,7 +1167,11 @@ public class TestBinaryDocValuesUpdates 
     // update some docs to a random value
     long value = random().nextInt();
     Term term = new Term("id", RandomPicks.randomFrom(random(), randomTerms));
-    writer.updateDocValues(term, new BinaryDocValuesField("bdv", toBytes(value)), new BinaryDocValuesField("control", toBytes(value * 2)));
+    Document2 update = writer.newDocument();
+    update.disableExistsField();
+    update.addBinary("bdv", toBytes(value));
+    update.addBinary("control", toBytes(value*2));
+    writer.updateDocValues(term, update);
     writer.close();
     
     Directory dir2 = newDirectory();
@@ -1132,11 +1208,14 @@ public class TestBinaryDocValuesUpdates 
     }
     IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
     IndexWriter writer = new IndexWriter(dir, conf);
-    
-    Document doc = new Document();
-    doc.add(new StringField("id", "d0", Store.NO));
-    doc.add(new BinaryDocValuesField("f1", toBytes(1L)));
-    doc.add(new BinaryDocValuesField("f2", toBytes(1L)));
+    FieldTypes fieldTypes = writer.getFieldTypes();
+    fieldTypes.disableSorting("f1");
+    fieldTypes.disableSorting("f2");
+    
+    Document2 doc = writer.newDocument();
+    doc.addAtom("id", "d0");
+    doc.addBinary("f1", toBytes(1L));
+    doc.addBinary("f2", toBytes(1L));
     writer.addDocument(doc);
 
     // update each field twice to make sure all unneeded files are deleted
@@ -1165,7 +1244,9 @@ public class TestBinaryDocValuesUpdates 
     conf.setRAMBufferSizeMB(IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB);
     conf.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH); // don't flush by doc
     IndexWriter writer = new IndexWriter(dir, conf);
-    
+    FieldTypes fieldTypes = writer.getFieldTypes();
+    fieldTypes.setMultiValued("upd");
+
     // test data: lots of documents (few 10Ks) and lots of update terms (few hundreds)
     final int numDocs = atLeast(20000);
     final int numBinaryFields = atLeast(5);
@@ -1175,19 +1256,24 @@ public class TestBinaryDocValuesUpdates 
       updateTerms.add(TestUtil.randomSimpleString(random));
     }
 
+    for(int i=0;i<numBinaryFields;i++) {
+      fieldTypes.disableSorting("f" + i);
+      fieldTypes.disableSorting("cf" + i);
+    }
+
 //    System.out.println("numDocs=" + numDocs + " numBinaryFields=" + numBinaryFields + " numTerms=" + numTerms);
     
     // build a large index with many BDV fields and update terms
     for (int i = 0; i < numDocs; i++) {
-      Document doc = new Document();
+      Document2 doc = writer.newDocument();
       int numUpdateTerms = TestUtil.nextInt(random, 1, numTerms / 10);
       for (int j = 0; j < numUpdateTerms; j++) {
-        doc.add(new StringField("upd", RandomPicks.randomFrom(random, updateTerms), Store.NO));
+        doc.addAtom("upd", RandomPicks.randomFrom(random, updateTerms));
       }
       for (int j = 0; j < numBinaryFields; j++) {
         long val = random.nextInt();
-        doc.add(new BinaryDocValuesField("f" + j, toBytes(val)));
-        doc.add(new BinaryDocValuesField("cf" + j, toBytes(val * 2)));
+        doc.addBinary("f" + j, toBytes(val));
+        doc.addBinary("cf" + j, toBytes(val * 2));
       }
       writer.addDocument(doc);
     }
@@ -1203,8 +1289,11 @@ public class TestBinaryDocValuesUpdates 
       int field = random.nextInt(numBinaryFields);
       Term updateTerm = new Term("upd", RandomPicks.randomFrom(random, updateTerms));
       long value = random.nextInt();
-      writer.updateDocValues(updateTerm, new BinaryDocValuesField("f" + field, toBytes(value)), 
-          new BinaryDocValuesField("cf" + field, toBytes(value * 2)));
+      Document2 update = writer.newDocument();
+      update.disableExistsField();
+      update.addBinary("f" + field, toBytes(value));
+      update.addBinary("cf" + field, toBytes(value*2));
+      writer.updateDocValues(updateTerm, update);
     }
 
     writer.close();
@@ -1229,12 +1318,16 @@ public class TestBinaryDocValuesUpdates 
     Directory dir = newDirectory();
     IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
     IndexWriter writer = new IndexWriter(dir, conf);
-    
-    Document doc = new Document();
-    doc.add(new StringField("upd", "t1", Store.NO));
-    doc.add(new StringField("upd", "t2", Store.NO));
-    doc.add(new BinaryDocValuesField("f1", toBytes(1L)));
-    doc.add(new BinaryDocValuesField("f2", toBytes(1L)));
+    FieldTypes fieldTypes = writer.getFieldTypes();
+    fieldTypes.setMultiValued("upd");
+    fieldTypes.disableSorting("f1");
+    fieldTypes.disableSorting("f2");
+    
+    Document2 doc = writer.newDocument();
+    doc.addAtom("upd", "t1");
+    doc.addAtom("upd", "t2");
+    doc.addBinary("f1", toBytes(1L));
+    doc.addBinary("f2", toBytes(1L));
     writer.addDocument(doc);
     writer.updateBinaryDocValue(new Term("upd", "t1"), "f1", toBytes(2L)); // update f1 to 2
     writer.updateBinaryDocValue(new Term("upd", "t1"), "f2", toBytes(2L)); // update f2 to 2
@@ -1255,10 +1348,12 @@ public class TestBinaryDocValuesUpdates 
     Directory dir = newDirectory();
     IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
     IndexWriter writer = new IndexWriter(dir, conf);
-    
-    Document doc = new Document();
-    doc.add(new StringField("id", "doc", Store.NO));
-    doc.add(new BinaryDocValuesField("f1", toBytes(1L)));
+    FieldTypes fieldTypes = writer.getFieldTypes();
+    fieldTypes.disableSorting("f1");
+
+    Document2 doc = writer.newDocument();
+    doc.addAtom("id", "doc");
+    doc.addBinary("f1", toBytes(1L));
     writer.addDocument(doc);
     writer.addDocument(doc);
     writer.commit();
@@ -1279,10 +1374,12 @@ public class TestBinaryDocValuesUpdates 
     Directory dir = newDirectory();
     IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
     IndexWriter writer = new IndexWriter(dir, conf);
+    FieldTypes fieldTypes = writer.getFieldTypes();
+    fieldTypes.disableSorting("f1");
     
-    Document doc = new Document();
-    doc.add(new StringField("id", "doc", Store.NO));
-    doc.add(new BinaryDocValuesField("f1", toBytes(1L)));
+    Document2 doc = writer.newDocument();
+    doc.addAtom("id", "doc");
+    doc.addBinary("f1", toBytes(1L));
     writer.addDocument(doc);
     // update w/ multiple nonexisting terms in same field
     writer.updateBinaryDocValue(new Term("c", "foo"), "f1", toBytes(2L));
@@ -1308,8 +1405,10 @@ public class TestBinaryDocValuesUpdates 
     conf.setMaxBufferedDocs(Integer.MAX_VALUE); // manually flush
     conf.setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH);
     IndexWriter writer = new IndexWriter(dir, conf);
+    FieldTypes fieldTypes = writer.getFieldTypes();
+    fieldTypes.disableSorting("val");
     for (int i = 0; i < 100; i++) {
-      writer.addDocument(doc(i));
+      writer.addDocument(doc(writer, i));
     }
     writer.commit();
     writer.close();

Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestBinaryTerms.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestBinaryTerms.java?rev=1640053&r1=1640052&r2=1640053&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestBinaryTerms.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestBinaryTerms.java Mon Nov 17 00:43:44 2014
@@ -19,6 +19,7 @@ package org.apache.lucene.index;
 
 import java.io.IOException;
 
+import org.apache.lucene.document.Document2;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.Field;
 import org.apache.lucene.document.FieldType;
@@ -44,11 +45,9 @@ public class TestBinaryTerms extends Luc
       bytes.bytes[0] = (byte) i;
       bytes.bytes[1] = (byte) (255 - i);
       bytes.length = 2;
-      Document doc = new Document();
-      FieldType customType = new FieldType();
-      customType.setStored(true);
-      doc.add(new Field("id", "" + i, customType));
-      doc.add(new TextField("bytes", tokenStream));
+      Document2 doc = iw.newDocument();
+      doc.addStored("id", "" + i);
+      doc.addLargeText("bytes", tokenStream);
       iw.addDocument(doc);
     }
     

Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestCheckIndex.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestCheckIndex.java?rev=1640053&r1=1640052&r2=1640053&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestCheckIndex.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestCheckIndex.java Mon Nov 17 00:43:44 2014
@@ -17,23 +17,25 @@ package org.apache.lucene.index;
  * limitations under the License.
  */
 
-import java.io.IOException;
 import java.io.ByteArrayOutputStream;
+import java.io.IOException;
 import java.io.PrintStream;
-import java.util.List;
 import java.util.ArrayList;
+import java.util.List;
 
-import org.apache.lucene.util.IOUtils;
-import org.apache.lucene.util.LuceneTestCase;
-import org.apache.lucene.store.Directory;
-import org.apache.lucene.store.LockObtainFailedException;
 import org.apache.lucene.analysis.CannedTokenStream;
 import org.apache.lucene.analysis.MockAnalyzer;
 import org.apache.lucene.analysis.Token;
+import org.apache.lucene.document.Document2;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.Field;
 import org.apache.lucene.document.FieldType;
+import org.apache.lucene.document.FieldTypes;
 import org.apache.lucene.document.TextField;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.LockObtainFailedException;
+import org.apache.lucene.util.IOUtils;
+import org.apache.lucene.util.LuceneTestCase;
 
 public class TestCheckIndex extends LuceneTestCase {
 
@@ -41,13 +43,15 @@ public class TestCheckIndex extends Luce
     Directory dir = newDirectory();
     IndexWriter writer  = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
                                                  .setMaxBufferedDocs(2));
+    FieldTypes fieldTypes = writer.getFieldTypes();
+    fieldTypes.disableExistsFilters();
+    fieldTypes.enableTermVectors("field");
+    fieldTypes.enableTermVectorPositions("field");
+    fieldTypes.enableTermVectorOffsets("field");
+
     for(int i=0;i<19;i++) {
-      Document doc = new Document();
-      FieldType customType = new FieldType(TextField.TYPE_STORED);
-      customType.setStoreTermVectors(true);
-      customType.setStoreTermVectorPositions(true);
-      customType.setStoreTermVectorOffsets(true);
-      doc.add(newField("field", "aaa"+i, customType));
+      Document2 doc = writer.newDocument();
+      doc.addLargeText("field", "aaa"+i);
       writer.addDocument(doc);
     }
     writer.forceMerge(1);
@@ -104,15 +108,15 @@ public class TestCheckIndex extends Luce
   public void testBogusTermVectors() throws IOException {
     Directory dir = newDirectory();
     IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(null));
-    Document doc = new Document();
-    FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
-    ft.setStoreTermVectors(true);
-    ft.setStoreTermVectorOffsets(true);
-    Field field = new Field("foo", "", ft);
-    field.setTokenStream(new CannedTokenStream(
+    FieldTypes fieldTypes = iw.getFieldTypes();
+    fieldTypes.enableTermVectors("foo");
+    fieldTypes.enableTermVectorOffsets("foo");
+    fieldTypes.disableHighlighting("foo");
+
+    Document2 doc = iw.newDocument();
+    doc.addLargeText("foo", new CannedTokenStream(
         new Token("bar", 5, 10), new Token("bar", 1, 4)
     ));
-    doc.add(field);
     iw.addDocument(doc);
     iw.close();
     dir.close(); // checkindex
@@ -121,7 +125,7 @@ public class TestCheckIndex extends Luce
   public void testObtainsLock() throws IOException {
     Directory dir = newDirectory();
     IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(null));
-    iw.addDocument(new Document());
+    iw.addDocument(iw.newDocument());
     iw.commit();
     
     // keep IW open...

Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestCodecHoldsOpenFiles.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestCodecHoldsOpenFiles.java?rev=1640053&r1=1640052&r2=1640053&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestCodecHoldsOpenFiles.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestCodecHoldsOpenFiles.java Mon Nov 17 00:43:44 2014
@@ -19,6 +19,7 @@ package org.apache.lucene.index;
 
 import java.io.IOException;
 
+import org.apache.lucene.document.Document2;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.TextField;
 import org.apache.lucene.store.BaseDirectoryWrapper;
@@ -33,8 +34,8 @@ public class TestCodecHoldsOpenFiles ext
     RandomIndexWriter w = new RandomIndexWriter(random(), d);
     int numDocs = atLeast(100);
     for(int i=0;i<numDocs;i++) {
-      Document doc = new Document();
-      doc.add(newField("foo", "bar", TextField.TYPE_NOT_STORED));
+      Document2 doc = w.newDocument();
+      doc.addLargeText("foo", "bar");
       w.addDocument(doc);
     }
 

Modified: lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java?rev=1640053&r1=1640052&r2=1640053&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java (original)
+++ lucene/dev/branches/lucene6005/lucene/core/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java Mon Nov 17 00:43:44 2014
@@ -24,8 +24,10 @@ import java.util.concurrent.atomic.Atomi
 import java.util.concurrent.atomic.AtomicInteger;
 
 import org.apache.lucene.analysis.MockAnalyzer;
+import org.apache.lucene.document.Document2;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.Field;
+import org.apache.lucene.document.FieldTypes;
 import org.apache.lucene.document.StringField;
 import org.apache.lucene.document.TextField;
 import org.apache.lucene.index.IndexWriterConfig.OpenMode;
@@ -84,9 +86,6 @@ public class TestConcurrentMergeSchedule
 
     IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(new MockAnalyzer(random()))
                                                       .setMaxBufferedDocs(2));
-    Document doc = new Document();
-    Field idField = newStringField("id", "", Field.Store.YES);
-    doc.add(idField);
     int extraCount = 0;
 
     for(int i=0;i<10;i++) {
@@ -95,10 +94,13 @@ public class TestConcurrentMergeSchedule
       }
 
       for(int j=0;j<20;j++) {
-        idField.setStringValue(Integer.toString(i*20+j));
+        Document2 doc = writer.newDocument();
+        doc.addInt("id", i*20+j);
         writer.addDocument(doc);
       }
 
+      Document2 doc = writer.newDocument();
+      doc.addInt("id", i*20+19);
       // must cycle here because sometimes the merge flushes
       // the doc we just added and so there's nothing to
       // flush, and we don't hit the exception
@@ -142,15 +144,14 @@ public class TestConcurrentMergeSchedule
     IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(new MockAnalyzer(random()))
                                                       .setMergePolicy(mp));
 
-    Document doc = new Document();
-    Field idField = newStringField("id", "", Field.Store.YES);
-    doc.add(idField);
+    FieldTypes fieldTypes = writer.getFieldTypes();
     for(int i=0;i<10;i++) {
       if (VERBOSE) {
         System.out.println("\nTEST: cycle");
       }
       for(int j=0;j<100;j++) {
-        idField.setStringValue(Integer.toString(i*100+j));
+        Document2 doc = writer.newDocument();
+        doc.addUniqueInt("id", i*100+j);
         writer.addDocument(doc);
       }
 
@@ -159,7 +160,7 @@ public class TestConcurrentMergeSchedule
         if (VERBOSE) {
           System.out.println("TEST: del " + delID);
         }
-        writer.deleteDocuments(new Term("id", ""+delID));
+        writer.deleteDocuments(fieldTypes.newIntTerm("id", delID));
         delID += 10;
       }
 
@@ -189,8 +190,8 @@ public class TestConcurrentMergeSchedule
       }
 
       for(int j=0;j<21;j++) {
-        Document doc = new Document();
-        doc.add(newTextField("content", "a b c", Field.Store.NO));
+        Document2 doc = writer.newDocument();
+        doc.addLargeText("content", "a b c");
         writer.addDocument(doc);
       }
         
@@ -212,9 +213,6 @@ public class TestConcurrentMergeSchedule
 
   public void testNoWaitClose() throws IOException {
     Directory directory = newDirectory();
-    Document doc = new Document();
-    Field idField = newStringField("id", "", Field.Store.YES);
-    doc.add(idField);
 
     IndexWriter writer = new IndexWriter(
         directory,
@@ -224,24 +222,26 @@ public class TestConcurrentMergeSchedule
             setMergePolicy(newLogMergePolicy(100)).
             setCommitOnClose(false)
     );
+    FieldTypes fieldTypes = writer.getFieldTypes();
 
     for(int iter=0;iter<10;iter++) {
 
       for(int j=0;j<201;j++) {
-        idField.setStringValue(Integer.toString(iter*201+j));
+        Document2 doc = writer.newDocument();
+        doc.addUniqueInt("id",iter*201+j);
         writer.addDocument(doc);
       }
 
       int delID = iter*201;
       for(int j=0;j<20;j++) {
-        writer.deleteDocuments(new Term("id", Integer.toString(delID)));
+        writer.deleteDocuments(fieldTypes.newIntTerm("id", delID));
         delID += 5;
       }
 
       // Force a bunch of merge threads to kick off so we
       // stress out aborting them on close:
       ((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(3);
-      writer.addDocument(doc);
+      writer.addDocument(writer.newDocument());
       writer.commit();
 
       try {
@@ -327,8 +327,8 @@ public class TestConcurrentMergeSchedule
     tmp.setSegmentsPerTier(2);
 
     IndexWriter w = new IndexWriter(dir, iwc);
-    Document doc = new Document();
-    doc.add(newField("field", "field", TextField.TYPE_NOT_STORED));
+    Document2 doc = w.newDocument();
+    doc.addLargeText("field", "field");
     while(enoughMergesWaiting.getCount() != 0 && !failed.get()) {
       for(int i=0;i<10;i++) {
         w.addDocument(doc);
@@ -374,13 +374,15 @@ public class TestConcurrentMergeSchedule
       iwc.setCodec(TestUtil.alwaysPostingsFormat(TestUtil.getDefaultPostingsFormat()));
     }
     IndexWriter w = new IndexWriter(d, iwc);
+    FieldTypes fieldTypes = w.getFieldTypes();
+
     for(int i=0;i<1000;i++) {
-      Document doc = new Document();
-      doc.add(new StringField("id", ""+i, Field.Store.NO));
+      Document2 doc = w.newDocument();
+      doc.addUniqueInt("id", i);
       w.addDocument(doc);
 
       if (random().nextBoolean()) {
-        w.deleteDocuments(new Term("id", ""+random().nextInt(i+1)));
+        w.deleteDocuments(fieldTypes.newIntTerm("id", random().nextInt(i+1)));
       }
     }
     atLeastOneMerge.await();
@@ -431,7 +433,7 @@ public class TestConcurrentMergeSchedule
     IndexWriter w = new IndexWriter(d, iwc);
     // Makes 100 segments
     for(int i=0;i<200;i++) {
-      w.addDocument(new Document());
+      w.addDocument(w.newDocument());
     }
 
     // No merges should have run so far, because TMP has high segmentsPerTier:
@@ -445,7 +447,7 @@ public class TestConcurrentMergeSchedule
 
     // Makes another 100 segments
     for(int i=0;i<200;i++) {
-      w.addDocument(new Document());
+      w.addDocument(w.newDocument());
     }
 
     ((ConcurrentMergeScheduler) w.getConfig().getMergeScheduler()).setMaxMergesAndThreads(1, 1);



Mime
View raw message