lucene-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From mikemcc...@apache.org
Subject svn commit: r1596817 [1/5] - in /lucene/dev/branches/lucene5675: ./ dev-tools/ dev-tools/idea/lucene/spatial/ dev-tools/idea/solr/contrib/analysis-extras/ dev-tools/scripts/ lucene/ lucene/codecs/ lucene/codecs/src/java/org/apache/lucene/codecs/memory/...
Date Thu, 22 May 2014 11:38:49 GMT
Author: mikemccand
Date: Thu May 22 11:38:47 2014
New Revision: 1596817

URL: http://svn.apache.org/r1596817
Log:
LUCENE-5675: merge trunk

Added:
    lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/search/BooleanTopLevelScorers.java
      - copied unchanged from r1596815, lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/search/BooleanTopLevelScorers.java
    lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/search/FilterScorer.java
      - copied unchanged from r1596815, lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/search/FilterScorer.java
    lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/store/OutputStreamIndexOutput.java
      - copied unchanged from r1596815, lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/store/OutputStreamIndexOutput.java
    lucene/dev/branches/lucene5675/lucene/core/src/test/org/apache/lucene/search/TestBooleanCoord.java
      - copied unchanged from r1596815, lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/search/TestBooleanCoord.java
    lucene/dev/branches/lucene5675/lucene/test-framework/src/java/org/apache/lucene/util/TestRuleTemporaryFilesCleanup.java
      - copied unchanged from r1596815, lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/util/TestRuleTemporaryFilesCleanup.java
    lucene/dev/branches/lucene5675/solr/core/src/test/org/apache/solr/cloud/MultiThreadedOCPTest.java
      - copied unchanged from r1596815, lucene/dev/trunk/solr/core/src/test/org/apache/solr/cloud/MultiThreadedOCPTest.java
    lucene/dev/branches/lucene5675/solr/core/src/test/org/apache/solr/cloud/ReplicationFactorTest.java
      - copied unchanged from r1596815, lucene/dev/trunk/solr/core/src/test/org/apache/solr/cloud/ReplicationFactorTest.java
Removed:
    lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/search/BooleanScorer2.java
    lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/store/BufferedIndexOutput.java
Modified:
    lucene/dev/branches/lucene5675/   (props changed)
    lucene/dev/branches/lucene5675/dev-tools/   (props changed)
    lucene/dev/branches/lucene5675/dev-tools/idea/lucene/spatial/spatial.iml
    lucene/dev/branches/lucene5675/dev-tools/idea/solr/contrib/analysis-extras/analysis-extras.iml
    lucene/dev/branches/lucene5675/dev-tools/scripts/smokeTestRelease.py
    lucene/dev/branches/lucene5675/lucene/   (props changed)
    lucene/dev/branches/lucene5675/lucene/CHANGES.txt   (contents, props changed)
    lucene/dev/branches/lucene5675/lucene/codecs/   (props changed)
    lucene/dev/branches/lucene5675/lucene/codecs/src/java/org/apache/lucene/codecs/memory/FSTTermOutputs.java
    lucene/dev/branches/lucene5675/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextFieldInfosReader.java
    lucene/dev/branches/lucene5675/lucene/common-build.xml   (contents, props changed)
    lucene/dev/branches/lucene5675/lucene/core/   (props changed)
    lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40FieldInfosReader.java
    lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/codecs/lucene42/Lucene42DocValuesProducer.java
    lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/codecs/lucene42/Lucene42FieldInfosReader.java
    lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/codecs/lucene45/Lucene45DocValuesProducer.java
    lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/codecs/lucene46/Lucene46FieldInfosReader.java
    lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java
    lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/index/FieldInfo.java
    lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/index/FieldInfos.java
    lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java
    lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/index/ReadersAndUpdates.java
    lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/index/SegmentCommitInfo.java
    lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/index/SegmentDocValues.java
    lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/index/SegmentInfos.java
    lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java
    lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/index/TrackingIndexWriter.java
    lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/search/BooleanQuery.java
    lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/search/DisjunctionScorer.java
    lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/search/MinShouldMatchSumScorer.java
    lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/search/QueryRescorer.java
    lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/search/ReqExclScorer.java
    lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/search/ReqOptSumScorer.java
    lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/search/ScoreCachingWrappingScorer.java
    lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/store/FSDirectory.java
    lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/store/RateLimitedIndexOutput.java
    lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/util/fst/ByteSequenceOutputs.java
    lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/util/fst/BytesStore.java
    lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/util/fst/CharSequenceOutputs.java
    lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/util/fst/FST.java
    lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/util/fst/ForwardBytesReader.java
    lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/util/fst/IntSequenceOutputs.java
    lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/util/fst/Outputs.java
    lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/util/fst/PairOutputs.java
    lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/util/fst/ReverseBytesReader.java
    lucene/dev/branches/lucene5675/lucene/core/src/test/org/apache/lucene/index/TestBinaryDocValuesUpdates.java
    lucene/dev/branches/lucene5675/lucene/core/src/test/org/apache/lucene/index/TestDoc.java
    lucene/dev/branches/lucene5675/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterThreadsToSegments.java
    lucene/dev/branches/lucene5675/lucene/core/src/test/org/apache/lucene/index/TestNumericDocValuesUpdates.java
    lucene/dev/branches/lucene5675/lucene/core/src/test/org/apache/lucene/index/TestSegmentMerger.java
    lucene/dev/branches/lucene5675/lucene/core/src/test/org/apache/lucene/search/TestQueryRescorer.java
    lucene/dev/branches/lucene5675/lucene/core/src/test/org/apache/lucene/search/TestSubScorerFreqs.java
    lucene/dev/branches/lucene5675/lucene/memory/   (props changed)
    lucene/dev/branches/lucene5675/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java
    lucene/dev/branches/lucene5675/lucene/misc/   (props changed)
    lucene/dev/branches/lucene5675/lucene/misc/src/java/org/apache/lucene/index/IndexSplitter.java
    lucene/dev/branches/lucene5675/lucene/misc/src/java/org/apache/lucene/uninverting/DocTermOrds.java
    lucene/dev/branches/lucene5675/lucene/misc/src/java/org/apache/lucene/uninverting/UninvertingReader.java
    lucene/dev/branches/lucene5675/lucene/misc/src/java/org/apache/lucene/util/fst/ListOfOutputs.java
    lucene/dev/branches/lucene5675/lucene/misc/src/test/org/apache/lucene/uninverting/TestDocTermOrds.java
    lucene/dev/branches/lucene5675/lucene/suggest/   (props changed)
    lucene/dev/branches/lucene5675/lucene/suggest/src/java/org/apache/lucene/search/suggest/analyzing/AnalyzingInfixSuggester.java
    lucene/dev/branches/lucene5675/lucene/test-framework/   (props changed)
    lucene/dev/branches/lucene5675/lucene/test-framework/src/java/org/apache/lucene/index/BasePostingsFormatTestCase.java
    lucene/dev/branches/lucene5675/lucene/test-framework/src/java/org/apache/lucene/store/MockDirectoryWrapper.java
    lucene/dev/branches/lucene5675/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java
    lucene/dev/branches/lucene5675/lucene/tools/   (props changed)
    lucene/dev/branches/lucene5675/lucene/tools/junit4/tests.policy
    lucene/dev/branches/lucene5675/solr/   (props changed)
    lucene/dev/branches/lucene5675/solr/CHANGES.txt   (contents, props changed)
    lucene/dev/branches/lucene5675/solr/contrib/   (props changed)
    lucene/dev/branches/lucene5675/solr/contrib/analysis-extras/src/test/org/apache/solr/analysis/TestFoldingMultitermExtrasQuery.java
    lucene/dev/branches/lucene5675/solr/contrib/analysis-extras/src/test/org/apache/solr/schema/TestICUCollationFieldOptions.java
    lucene/dev/branches/lucene5675/solr/contrib/clustering/src/test/org/apache/solr/handler/clustering/AbstractClusteringTestCase.java
    lucene/dev/branches/lucene5675/solr/contrib/dataimporthandler/build.xml
    lucene/dev/branches/lucene5675/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SimplePropertiesWriter.java
    lucene/dev/branches/lucene5675/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/config/PropertyWriter.java
    lucene/dev/branches/lucene5675/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/AbstractDataImportHandlerTestCase.java
    lucene/dev/branches/lucene5675/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestDocBuilder.java
    lucene/dev/branches/lucene5675/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestFieldReader.java
    lucene/dev/branches/lucene5675/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestNonWritablePersistFile.java
    lucene/dev/branches/lucene5675/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestPlainTextEntityProcessor.java
    lucene/dev/branches/lucene5675/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineBasicMiniMRTest.java
    lucene/dev/branches/lucene5675/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineGoLiveMiniMRTest.java
    lucene/dev/branches/lucene5675/solr/contrib/velocity/src/java/org/apache/solr/response/VelocityResponseWriter.java
    lucene/dev/branches/lucene5675/solr/contrib/velocity/src/test/org/apache/solr/velocity/VelocityResponseWriterTest.java
    lucene/dev/branches/lucene5675/solr/core/   (props changed)
    lucene/dev/branches/lucene5675/solr/core/src/java/org/apache/solr/cloud/DistributedMap.java
    lucene/dev/branches/lucene5675/solr/core/src/java/org/apache/solr/cloud/DistributedQueue.java
    lucene/dev/branches/lucene5675/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java
    lucene/dev/branches/lucene5675/solr/core/src/java/org/apache/solr/cloud/Overseer.java
    lucene/dev/branches/lucene5675/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionProcessor.java
    lucene/dev/branches/lucene5675/solr/core/src/java/org/apache/solr/cloud/ZkController.java
    lucene/dev/branches/lucene5675/solr/core/src/java/org/apache/solr/core/DirectoryFactory.java
    lucene/dev/branches/lucene5675/solr/core/src/java/org/apache/solr/core/SolrCore.java
    lucene/dev/branches/lucene5675/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
    lucene/dev/branches/lucene5675/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminHandler.java
    lucene/dev/branches/lucene5675/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandler.java
    lucene/dev/branches/lucene5675/solr/core/src/java/org/apache/solr/handler/component/ShardHandler.java
    lucene/dev/branches/lucene5675/solr/core/src/java/org/apache/solr/search/Insanity.java
    lucene/dev/branches/lucene5675/solr/core/src/java/org/apache/solr/spelling/suggest/SolrSuggester.java
    lucene/dev/branches/lucene5675/solr/core/src/java/org/apache/solr/spelling/suggest/Suggester.java
    lucene/dev/branches/lucene5675/solr/core/src/java/org/apache/solr/spelling/suggest/fst/AnalyzingInfixLookupFactory.java
    lucene/dev/branches/lucene5675/solr/core/src/java/org/apache/solr/spelling/suggest/fst/BlendedInfixLookupFactory.java
    lucene/dev/branches/lucene5675/solr/core/src/java/org/apache/solr/store/hdfs/HdfsDirectory.java
    lucene/dev/branches/lucene5675/solr/core/src/java/org/apache/solr/store/hdfs/HdfsFileWriter.java
    lucene/dev/branches/lucene5675/solr/core/src/java/org/apache/solr/update/SolrCmdDistributor.java
    lucene/dev/branches/lucene5675/solr/core/src/java/org/apache/solr/update/StreamingSolrServers.java
    lucene/dev/branches/lucene5675/solr/core/src/java/org/apache/solr/update/UpdateLog.java
    lucene/dev/branches/lucene5675/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java
    lucene/dev/branches/lucene5675/solr/core/src/test/org/apache/solr/cloud/HttpPartitionTest.java
    lucene/dev/branches/lucene5675/solr/core/src/test/org/apache/solr/cloud/OverseerCollectionProcessorTest.java
    lucene/dev/branches/lucene5675/solr/core/src/test/org/apache/solr/cloud/OverseerStatusTest.java
    lucene/dev/branches/lucene5675/solr/core/src/test/org/apache/solr/cloud/ShardSplitTest.java
    lucene/dev/branches/lucene5675/solr/core/src/test/org/apache/solr/cloud/TestMiniSolrCloudCluster.java
    lucene/dev/branches/lucene5675/solr/core/src/test/org/apache/solr/core/MockShardHandlerFactory.java
    lucene/dev/branches/lucene5675/solr/core/src/test/org/apache/solr/core/TestConfigSets.java
    lucene/dev/branches/lucene5675/solr/core/src/test/org/apache/solr/handler/TestReplicationHandler.java
    lucene/dev/branches/lucene5675/solr/core/src/test/org/apache/solr/request/TestFaceting.java
    lucene/dev/branches/lucene5675/solr/core/src/test/org/apache/solr/rest/TestManagedResourceStorage.java
    lucene/dev/branches/lucene5675/solr/core/src/test/org/apache/solr/spelling/suggest/TestAnalyzeInfixSuggestions.java
    lucene/dev/branches/lucene5675/solr/core/src/test/org/apache/solr/spelling/suggest/TestBlendedInfixSuggestions.java
    lucene/dev/branches/lucene5675/solr/solrj/   (props changed)
    lucene/dev/branches/lucene5675/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrServer.java
    lucene/dev/branches/lucene5675/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateSolrServer.java
    lucene/dev/branches/lucene5675/solr/solrj/src/java/org/apache/solr/client/solrj/request/UpdateRequest.java
    lucene/dev/branches/lucene5675/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java
    lucene/dev/branches/lucene5675/solr/solrj/src/test/org/apache/solr/client/solrj/TestLBHttpSolrServer.java
    lucene/dev/branches/lucene5675/solr/solrj/src/test/org/apache/solr/client/solrj/request/SolrPingTest.java
    lucene/dev/branches/lucene5675/solr/test-framework/   (props changed)
    lucene/dev/branches/lucene5675/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java
    lucene/dev/branches/lucene5675/solr/test-framework/src/java/org/apache/solr/cloud/MiniSolrCloudCluster.java

Modified: lucene/dev/branches/lucene5675/dev-tools/idea/lucene/spatial/spatial.iml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5675/dev-tools/idea/lucene/spatial/spatial.iml?rev=1596817&r1=1596816&r2=1596817&view=diff
==============================================================================
--- lucene/dev/branches/lucene5675/dev-tools/idea/lucene/spatial/spatial.iml (original)
+++ lucene/dev/branches/lucene5675/dev-tools/idea/lucene/spatial/spatial.iml Thu May 22 11:38:47 2014
@@ -25,6 +25,7 @@
     <orderEntry type="module" scope="TEST" module-name="lucene-test-framework" />
     <orderEntry type="module" module-name="lucene-core" />
     <orderEntry type="module" module-name="queries" />
+    <orderEntry type="module" module-name="misc" />
     <orderEntry type="module" module-name="analysis-common" scope="TEST"/>
   </component>
 </module>

Modified: lucene/dev/branches/lucene5675/dev-tools/idea/solr/contrib/analysis-extras/analysis-extras.iml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5675/dev-tools/idea/solr/contrib/analysis-extras/analysis-extras.iml?rev=1596817&r1=1596816&r2=1596817&view=diff
==============================================================================
--- lucene/dev/branches/lucene5675/dev-tools/idea/solr/contrib/analysis-extras/analysis-extras.iml (original)
+++ lucene/dev/branches/lucene5675/dev-tools/idea/solr/contrib/analysis-extras/analysis-extras.iml Thu May 22 11:38:47 2014
@@ -45,5 +45,6 @@
     <orderEntry type="module" module-name="stempel" />
     <orderEntry type="module" module-name="analysis-common" />
     <orderEntry type="module" module-name="lucene-core" />
+    <orderEntry type="module" module-name="misc" />
   </component>
 </module>

Modified: lucene/dev/branches/lucene5675/dev-tools/scripts/smokeTestRelease.py
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5675/dev-tools/scripts/smokeTestRelease.py?rev=1596817&r1=1596816&r2=1596817&view=diff
==============================================================================
--- lucene/dev/branches/lucene5675/dev-tools/scripts/smokeTestRelease.py (original)
+++ lucene/dev/branches/lucene5675/dev-tools/scripts/smokeTestRelease.py Thu May 22 11:38:47 2014
@@ -468,12 +468,22 @@ def testChangesText(dir, version, projec
       checkChangesContent(open(fullPath, encoding='UTF-8').read(), version, fullPath, project, False)
 
 reChangesSectionHREF = re.compile('<a id="(.*?)".*?>(.*?)</a>', re.IGNORECASE)
-
+reUnderbarNotDashHTML = re.compile(r'<li>(\s*(LUCENE|SOLR)_\d\d\d\d+)')
+reUnderbarNotDashTXT = re.compile(r'\s+((LUCENE|SOLR)_\d\d\d\d+)', re.MULTILINE)
 def checkChangesContent(s, version, name, project, isHTML):
 
   if isHTML and s.find('Release %s' % version) == -1:
     raise RuntimeError('did not see "Release %s" in %s' % (version, name))
 
+  if isHTML:
+    r = reUnderbarNotDashHTML
+  else:
+    r = reUnderbarNotDashTXT
+
+  m = r.search(s)
+  if m is not None:
+    raise RuntimeError('incorrect issue (_ instead of -) in %s: %s' % (name, m.group(1)))
+    
   if s.lower().find('not yet released') != -1:
     raise RuntimeError('saw "not yet released" in %s' % name)
 

Modified: lucene/dev/branches/lucene5675/lucene/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5675/lucene/CHANGES.txt?rev=1596817&r1=1596816&r2=1596817&view=diff
==============================================================================
--- lucene/dev/branches/lucene5675/lucene/CHANGES.txt (original)
+++ lucene/dev/branches/lucene5675/lucene/CHANGES.txt Thu May 22 11:38:47 2014
@@ -80,6 +80,12 @@ API Changes
 * LUCENE-4371: Removed IndexInputSlicer and Directory.createSlicer() and replaced
   with IndexInput.slice(). (Robert Muir)
 
+* LUCENE-5678: IndexOutput no longer allows seeking, so it is no longer required
+  to use RandomAccessFile to write Indexes. Lucene now uses standard FileOutputStream
+  wrapped with OutputStreamIndexOutput to write index data. BufferedIndexOutput was
+  removed, because buffering and checksumming is provided by FilterOutputStreams,
+  provided by the JDK.  (Uwe Schindler, Mike McCandless)
+
 Documentation
 
 * LUCENE-5392: Add/improve analysis package documentation to reflect
@@ -148,6 +154,10 @@ API Changes
 * LUCENE-5640: The Token class was deprecated. Since Lucene 2.9, TokenStreams
   are using Attributes, Token is no longer used.  (Uwe Schindler, Robert Muir)
 
+* LUCENE-5679: Consolidated IndexWriter.deleteDocuments(Term) and 
+  IndexWriter.deleteDocuments(Query) with their varargs counterparts.
+  (Shai Erera)
+
 Optimizations
 
 * LUCENE-5603: hunspell stemmer more efficiently strips prefixes
@@ -172,22 +182,64 @@ Optimizations
   to 8 (for int/float) and 16 (for long/double), for faster indexing
   time and smaller indices. (Robert Muir, Uwe Schindler, Mike McCandless)
 
+* LUCENE-5670: Add skip/FinalOutput to FST Outputs.  (Christian
+  Ziech via Mike McCandless).
+
+* LUCENE-4236: Optimize BooleanQuery's in-order scoring. This speeds up
+  some types of boolean queries.  (Robert Muir)
+
 Bug fixes
 
-* LUCENE-5600: HttpClientBase did not properly consume a connection if a server
-  error occurred. (Christoph Kaser via Shai Erera)
+* LUCENE-5673: MMapDirectory: Work around a "bug" in the JDK that throws
+  a confusing OutOfMemoryError wrapped inside IOException if the FileChannel
+  mapping failed because of lack of virtual address space. The IOException is
+  rethrown with more useful information about the problem, omitting the
+  incorrect OutOfMemoryError.  (Robert Muir, Uwe Schindler)
 
-* LUCENE-5559: Add additional argument validation for CapitalizationFilter
-  and CodepointCountFilter. (Ahmet Arslan via Robert Muir)
+* LUCENE-5682: NPE in QueryRescorer when Scorer is null
+  (Joel Bernstein, Mike McCandless)
+
+* LUCENE-5691: DocTermOrds lookupTerm(BytesRef) would return incorrect results
+  if the underlying TermsEnum supports ord() and the insertion point would
+  be at the end. (Robert Muir)
+
+* LUCENE-5618, LUCENE-5636: SegmentReader referenced unneeded files following 
+  doc-values updates. Now doc-values field updates are written in separate file
+  per field. (Shai Erera, Robert Muir)
+
+Test Framework
+
+* LUCENE-5622: Fail tests if they print over the given limit of bytes to 
+  System.out or System.err. (Robert Muir, Dawid Weiss)
+  
+* LUCENE-5619: Added backwards compatibility tests to ensure we can update existing
+  indexes with doc-values updates. (Shai Erera, Robert Muir)
+
+======================= Lucene 4.8.1 =======================
+
+Bug fixes
 
 * LUCENE-5639: Fix PositionLengthAttribute implementation in Token class.
   (Uwe Schindler, Robert Muir)
 
+* LUCENE-5635: IndexWriter didn't properly handle IOException on TokenStream.reset(),
+  which could leave the analyzer in an inconsistent state.  (Robert Muir)
+
+* LUCENE-5599: HttpReplicator did not properly delegate bulk read() to wrapped
+  InputStream. (Christoph Kaser via Shai Erera)
+  
+* LUCENE-5600: HttpClientBase did not properly consume a connection if a server
+  error occurred. (Christoph Kaser via Shai Erera)
+
 * LUCENE-5628: Change getFiniteStrings to iterative not recursive
   implementation, so that building suggesters on a long suggestion
   doesn't risk overflowing the stack; previously it consumed one Java
-  stack frame per character in the expanded suggestion (Robert Muir,
-  Simon Willnauer, Mike McCandless).
+  stack frame per character in the expanded suggestion.  If you are building
+  a suggester this is a nasty trap. (Robert Muir, Simon Willnauer,
+  Mike McCandless).
+
+* LUCENE-5559: Add additional argument validation for CapitalizationFilter
+  and CodepointCountFilter. (Ahmet Arslan via Robert Muir)
 
 * LUCENE-5641: SimpleRateLimiter would silently rate limit at 8 MB/sec
   even if you asked for higher rates.  (Mike McCandless)
@@ -198,24 +250,32 @@ Bug fixes
   many segments are flushed on a full-flush (e.g. to obtain a
   near-real-time reader).  (Simon Willnauer, Mike McCandless)
 
+* LUCENE-5653: JoinUtil with ScoreMode.Avg on a multi-valued field
+  with more than 256 values would throw exception.
+  (Mikhail Khludnev via Robert Muir)
+
+* LUCENE-5654: Fix various close() methods that could suppress 
+  throwables such as OutOfMemoryError, instead returning scary messages
+  that look like index corruption.  (Mike McCandless, Robert Muir)
+
+* LUCENE-5656: Fix rare fd leak in SegmentReader when multiple docvalues
+  fields have been updated with IndexWriter.updateXXXDocValue and one
+  hits exception. (Shai Erera, Robert Muir)
+
 * LUCENE-5660: AnalyzingSuggester.build will now throw IllegalArgumentException if
   you give it a longer suggestion than it can handle (Robert Muir, Mike McCandless)
 
-* LUCENE-5668: Fix off-by-one in TieredMergePolicy (Mike McCandless)
+* LUCENE-5662: Add missing checks to Field to prevent IndexWriter.abort
+  if a stored value is null. (Robert Muir)
 
-* LUCENE-5673: MMapDirectory: Work around a "bug" in the JDK that throws
-  a confusing OutOfMemoryError wrapped inside IOException if the FileChannel
-  mapping failed because of lack of virtual address space. The IOException is
-  rethrown with more useful information about the problem, omitting the
-  incorrect OutOfMemoryError.  (Robert Muir, Uwe Schindler)
+* LUCENE-5668: Fix off-by-one in TieredMergePolicy (Mike McCandless)
 
-Test Framework
+* LUCENE-5671: Upgrade ICU version to fix an ICU concurrency problem that
+  could cause exceptions when indexing. (feedly team, Robert Muir)
 
-* LUCENE-5622: Fail tests if they print over the given limit of bytes to 
-  System.out or System.err. (Robert Muir, Dawid Weiss)
-  
-* LUCENE-5619: Added backwards compatibility tests to ensure we can update existing
-  indexes with doc-values updates. (Shai Erera, Robert Muir)
+* LUCENE-5650: Enforce read-only access to any path outside the temporary
+  folder via security manager, and make test temp dirs absolute.
+  (Ryan Ernst, Dawid Weiss)
 
 ======================= Lucene 4.8.0 =======================
 

Modified: lucene/dev/branches/lucene5675/lucene/codecs/src/java/org/apache/lucene/codecs/memory/FSTTermOutputs.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5675/lucene/codecs/src/java/org/apache/lucene/codecs/memory/FSTTermOutputs.java?rev=1596817&r1=1596816&r2=1596817&view=diff
==============================================================================
--- lucene/dev/branches/lucene5675/lucene/codecs/src/java/org/apache/lucene/codecs/memory/FSTTermOutputs.java (original)
+++ lucene/dev/branches/lucene5675/lucene/codecs/src/java/org/apache/lucene/codecs/memory/FSTTermOutputs.java Thu May 22 11:38:47 2014
@@ -299,6 +299,33 @@ class FSTTermOutputs extends Outputs<FST
     }
     return new TermData(longs, bytes, docFreq, totalTermFreq);
   }
+  
+
+  @Override
+  public void skipOutput(DataInput in) throws IOException {
+    int bits = in.readByte() & 0xff;
+    int bit0 = bits & 1;
+    int bit1 = bits & 2;
+    int bit2 = bits & 4;
+    int bytesSize = (bits >>> 3);
+    if (bit1 > 0 && bytesSize == 0) {  // determine extra length
+      bytesSize = in.readVInt();
+    }
+    if (bit0 > 0) {  // not all-zero case
+      for (int pos = 0; pos < longsSize; pos++) {
+        in.readVLong();
+      }
+    }
+    if (bit1 > 0) {  // bytes exists
+      in.skipBytes(bytesSize);
+    }
+    if (bit2 > 0) {  // stats exist
+      int code = in.readVInt();
+      if (hasPos && (code & 1) == 0) {
+        in.readVLong();
+      }
+    }
+  }
 
   @Override
   public TermData getNoOutput() {

Modified: lucene/dev/branches/lucene5675/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextFieldInfosReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5675/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextFieldInfosReader.java?rev=1596817&r1=1596816&r2=1596817&view=diff
==============================================================================
--- lucene/dev/branches/lucene5675/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextFieldInfosReader.java (original)
+++ lucene/dev/branches/lucene5675/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextFieldInfosReader.java Thu May 22 11:38:47 2014
@@ -125,8 +125,7 @@ public class SimpleTextFieldInfosReader 
         }
 
         infos[i] = new FieldInfo(name, isIndexed, fieldNumber, storeTermVector, 
-          omitNorms, storePayloads, indexOptions, docValuesType, normsType, Collections.unmodifiableMap(atts));
-        infos[i].setDocValuesGen(dvGen);
+          omitNorms, storePayloads, indexOptions, docValuesType, normsType, dvGen, Collections.unmodifiableMap(atts));
       }
 
       SimpleTextUtil.checkFooter(input);

Modified: lucene/dev/branches/lucene5675/lucene/common-build.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5675/lucene/common-build.xml?rev=1596817&r1=1596816&r2=1596817&view=diff
==============================================================================
--- lucene/dev/branches/lucene5675/lucene/common-build.xml (original)
+++ lucene/dev/branches/lucene5675/lucene/common-build.xml Thu May 22 11:38:47 2014
@@ -974,9 +974,9 @@
             <!-- TODO: create propertyset for test properties, so each project can have its own set -->
             <sysproperty key="tests.multiplier" value="@{tests.multiplier}"/>
             
-            <!-- Temporary directory in the cwd. -->
-            <sysproperty key="tempDir" value="." />
-            <sysproperty key="java.io.tmpdir" value="." />
+            <!-- Temporary directory a subdir of the cwd. -->
+            <sysproperty key="tempDir" value="./temp" />
+            <sysproperty key="java.io.tmpdir" value="./temp" />
 
             <!-- Restrict access to certain Java features and install security manager: -->
             <sysproperty key="junit4.tempDir" file="@{workDir}/temp" />
@@ -2202,7 +2202,7 @@ ${ant.project.name}.test.dependencies=${
 
   <!-- GROOVY scripting engine for ANT tasks -->
   <target name="resolve-groovy" unless="groovy.loaded" depends="ivy-availability-check,ivy-configure">
-    <ivy:cachepath organisation="org.codehaus.groovy" module="groovy-all" revision="2.3.0"
+    <ivy:cachepath organisation="org.codehaus.groovy" module="groovy-all" revision="2.3.1"
       inline="true" conf="default" type="jar" transitive="true" pathid="groovy.classpath"/>
     <taskdef name="groovy"
       classname="org.codehaus.groovy.ant.Groovy"

Modified: lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40FieldInfosReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40FieldInfosReader.java?rev=1596817&r1=1596816&r2=1596817&view=diff
==============================================================================
--- lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40FieldInfosReader.java (original)
+++ lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40FieldInfosReader.java Thu May 22 11:38:47 2014
@@ -104,7 +104,7 @@ class Lucene40FieldInfosReader extends F
           attributes.put(LEGACY_NORM_TYPE_KEY, oldNormsType.name());
         }
         infos[i] = new FieldInfo(name, isIndexed, fieldNumber, storeTermVector, 
-          omitNorms, storePayloads, indexOptions, oldValuesType.mapping, oldNormsType.mapping, Collections.unmodifiableMap(attributes));
+          omitNorms, storePayloads, indexOptions, oldValuesType.mapping, oldNormsType.mapping, -1, Collections.unmodifiableMap(attributes));
       }
 
       CodecUtil.checkEOF(input);

Modified: lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/codecs/lucene42/Lucene42DocValuesProducer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/codecs/lucene42/Lucene42DocValuesProducer.java?rev=1596817&r1=1596816&r2=1596817&view=diff
==============================================================================
--- lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/codecs/lucene42/Lucene42DocValuesProducer.java (original)
+++ lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/codecs/lucene42/Lucene42DocValuesProducer.java Thu May 22 11:38:47 2014
@@ -148,12 +148,10 @@ class Lucene42DocValuesProducer extends 
   private void readFields(IndexInput meta, FieldInfos infos) throws IOException {
     int fieldNumber = meta.readVInt();
     while (fieldNumber != -1) {
-      // check should be: infos.fieldInfo(fieldNumber) != null, which incorporates negative check
-      // but docvalues updates are currently buggy here (loading extra stuff, etc): LUCENE-5616
-      if (fieldNumber < 0) {
+      if (infos.fieldInfo(fieldNumber) == null) {
         // trickier to validate more: because we re-use for norms, because we use multiple entries
         // for "composite" types like sortedset, etc.
-        throw new CorruptIndexException("Invalid field number: " + fieldNumber + ", input=" + meta);
+        throw new CorruptIndexException("Invalid field number: " + fieldNumber + " (resource=" + meta + ")");
       }
       int fieldType = meta.readByte();
       if (fieldType == NUMBER) {

Modified: lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/codecs/lucene42/Lucene42FieldInfosReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/codecs/lucene42/Lucene42FieldInfosReader.java?rev=1596817&r1=1596816&r2=1596817&view=diff
==============================================================================
--- lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/codecs/lucene42/Lucene42FieldInfosReader.java (original)
+++ lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/codecs/lucene42/Lucene42FieldInfosReader.java Thu May 22 11:38:47 2014
@@ -89,7 +89,7 @@ final class Lucene42FieldInfosReader ext
         final DocValuesType normsType = getDocValuesType(input, (byte) ((val >>> 4) & 0x0F));
         final Map<String,String> attributes = input.readStringStringMap();
         infos[i] = new FieldInfo(name, isIndexed, fieldNumber, storeTermVector, 
-          omitNorms, storePayloads, indexOptions, docValuesType, normsType, Collections.unmodifiableMap(attributes));
+          omitNorms, storePayloads, indexOptions, docValuesType, normsType, -1, Collections.unmodifiableMap(attributes));
       }
 
       CodecUtil.checkEOF(input);

Modified: lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/codecs/lucene45/Lucene45DocValuesProducer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/codecs/lucene45/Lucene45DocValuesProducer.java?rev=1596817&r1=1596816&r2=1596817&view=diff
==============================================================================
--- lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/codecs/lucene45/Lucene45DocValuesProducer.java (original)
+++ lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/codecs/lucene45/Lucene45DocValuesProducer.java Thu May 22 11:38:47 2014
@@ -57,6 +57,7 @@ import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.IOUtils;
 import org.apache.lucene.util.LongValues;
 import org.apache.lucene.util.RamUsageEstimator;
+import org.apache.lucene.util.Version;
 import org.apache.lucene.util.packed.BlockPackedReader;
 import org.apache.lucene.util.packed.MonotonicBlockPackedReader;
 import org.apache.lucene.util.packed.PackedInts;
@@ -72,13 +73,29 @@ public class Lucene45DocValuesProducer e
   private final IndexInput data;
   private final int maxDoc;
   private final int version;
+  
+  // We need this for pre-4.9 indexes which recorded multiple fields' DocValues
+  // updates under the same generation, and therefore the passed FieldInfos may
+  // not include all the fields that are encoded in this generation. In that
+  // case, we are more lenient about the fields we read and the passed-in
+  // FieldInfos.
+  @Deprecated
+  private final boolean lenientFieldInfoCheck;
 
   // memory-resident structures
   private final Map<Integer,MonotonicBlockPackedReader> addressInstances = new HashMap<>();
   private final Map<Integer,MonotonicBlockPackedReader> ordIndexInstances = new HashMap<>();
   
   /** expert: instantiates a new reader */
+  @SuppressWarnings("deprecation")
   protected Lucene45DocValuesProducer(SegmentReadState state, String dataCodec, String dataExtension, String metaCodec, String metaExtension) throws IOException {
+    Version ver;
+    try {
+      ver = Version.parseLeniently(state.segmentInfo.getVersion());
+    } catch (IllegalArgumentException e) {
+      ver = null;
+    }
+    lenientFieldInfoCheck = ver == null || !ver.onOrAfter(Version.LUCENE_4_9);
     String metaName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension);
     // read in the entries from the metadata file.
     ChecksumIndexInput in = state.directory.openChecksumInput(metaName, state.context);
@@ -185,9 +202,7 @@ public class Lucene45DocValuesProducer e
   private void readFields(IndexInput meta, FieldInfos infos) throws IOException {
     int fieldNumber = meta.readVInt();
     while (fieldNumber != -1) {
-      // check should be: infos.fieldInfo(fieldNumber) != null, which incorporates negative check
-      // but docvalues updates are currently buggy here (loading extra stuff, etc): LUCENE-5616
-      if (fieldNumber < 0) {
+      if ((lenientFieldInfoCheck && fieldNumber < 0) || (!lenientFieldInfoCheck && infos.fieldInfo(fieldNumber) == null)) {
         // trickier to validate more: because we re-use for norms, because we use multiple entries
         // for "composite" types like sortedset, etc.
         throw new CorruptIndexException("Invalid field number: " + fieldNumber + " (resource=" + meta + ")");

Modified: lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/codecs/lucene46/Lucene46FieldInfosReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/codecs/lucene46/Lucene46FieldInfosReader.java?rev=1596817&r1=1596816&r2=1596817&view=diff
==============================================================================
--- lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/codecs/lucene46/Lucene46FieldInfosReader.java (original)
+++ lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/codecs/lucene46/Lucene46FieldInfosReader.java Thu May 22 11:38:47 2014
@@ -89,8 +89,7 @@ final class Lucene46FieldInfosReader ext
         final long dvGen = input.readLong();
         final Map<String,String> attributes = input.readStringStringMap();
         infos[i] = new FieldInfo(name, isIndexed, fieldNumber, storeTermVector, 
-          omitNorms, storePayloads, indexOptions, docValuesType, normsType, Collections.unmodifiableMap(attributes));
-        infos[i].setDocValuesGen(dvGen);
+          omitNorms, storePayloads, indexOptions, docValuesType, normsType, dvGen, Collections.unmodifiableMap(attributes));
       }
       
       if (codecVersion >= Lucene46FieldInfosFormat.FORMAT_CHECKSUM) {

Modified: lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java?rev=1596817&r1=1596816&r2=1596817&view=diff
==============================================================================
--- lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java (original)
+++ lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java Thu May 22 11:38:47 2014
@@ -416,9 +416,9 @@ class DocumentsWriterPerThread {
       pendingUpdates.terms.clear();
       segmentInfo.setFiles(new HashSet<>(directory.getCreatedFiles()));
 
-      final SegmentCommitInfo segmentInfoPerCommit = new SegmentCommitInfo(segmentInfo, 0, -1L, -1L);
+      final SegmentCommitInfo segmentInfoPerCommit = new SegmentCommitInfo(segmentInfo, 0, -1L, -1L, -1L);
       if (infoStream.isEnabled("DWPT")) {
-        infoStream.message("DWPT", "new segment has " + (flushState.liveDocs == null ? 0 : (flushState.segmentInfo.getDocCount() - flushState.delCountOnFlush)) + " deleted docs");
+        infoStream.message("DWPT", "new segment has " + (flushState.liveDocs == null ? 0 : flushState.delCountOnFlush) + " deleted docs");
         infoStream.message("DWPT", "new segment has " +
                            (flushState.fieldInfos.hasVectors() ? "vectors" : "no vectors") + "; " +
                            (flushState.fieldInfos.hasNorms() ? "norms" : "no norms") + "; " + 

Modified: lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/index/FieldInfo.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/index/FieldInfo.java?rev=1596817&r1=1596816&r2=1596817&view=diff
==============================================================================
--- lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/index/FieldInfo.java (original)
+++ lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/index/FieldInfo.java Thu May 22 11:38:47 2014
@@ -47,7 +47,7 @@ public final class FieldInfo {
 
   private Map<String,String> attributes;
 
-  private long dvGen = -1; // the DocValues generation of this field
+  private long dvGen;
   
   /**
    * Controls how much information is stored in the postings lists.
@@ -121,7 +121,7 @@ public final class FieldInfo {
    */
   public FieldInfo(String name, boolean indexed, int number, boolean storeTermVector, boolean omitNorms, 
       boolean storePayloads, IndexOptions indexOptions, DocValuesType docValues, DocValuesType normsType, 
-      Map<String,String> attributes) {
+      long dvGen, Map<String,String> attributes) {
     this.name = name;
     this.indexed = indexed;
     this.number = number;
@@ -139,6 +139,7 @@ public final class FieldInfo {
       this.indexOptions = null;
       this.normType = null;
     }
+    this.dvGen = dvGen;
     this.attributes = attributes;
     assert checkConsistency();
   }
@@ -158,6 +159,10 @@ public final class FieldInfo {
       // Cannot store payloads unless positions are indexed:
       assert indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0 || !this.storePayloads;
     }
+    
+    if (dvGen != -1) {
+      assert docValueType != null;
+    }
 
     return true;
   }
@@ -221,8 +226,9 @@ public final class FieldInfo {
   }
   
   /** Sets the docValues generation of this field. */
-  public void setDocValuesGen(long dvGen) {
+  void setDocValuesGen(long dvGen) {
     this.dvGen = dvGen;
+    assert checkConsistency();
   }
   
   /**

Modified: lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/index/FieldInfos.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/index/FieldInfos.java?rev=1596817&r1=1596816&r2=1596817&view=diff
==============================================================================
--- lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/index/FieldInfos.java (original)
+++ lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/index/FieldInfos.java Thu May 22 11:38:47 2014
@@ -302,7 +302,7 @@ public class FieldInfos implements Itera
         // before then we'll get the same name and number,
         // else we'll allocate a new one:
         final int fieldNumber = globalFieldNumbers.addOrGet(name, preferredFieldNumber, docValues);
-        fi = new FieldInfo(name, isIndexed, fieldNumber, storeTermVector, omitNorms, storePayloads, indexOptions, docValues, normType, null);
+        fi = new FieldInfo(name, isIndexed, fieldNumber, storeTermVector, omitNorms, storePayloads, indexOptions, docValues, normType, -1, null);
         assert !byName.containsKey(fi.name);
         assert globalFieldNumbers.containsConsistent(Integer.valueOf(fi.number), fi.name, fi.getDocValuesType());
         byName.put(fi.name, fi);

Modified: lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java?rev=1596817&r1=1596816&r2=1596817&view=diff
==============================================================================
--- lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java (original)
+++ lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java Thu May 22 11:38:47 2014
@@ -76,8 +76,8 @@ import org.apache.lucene.util.Version;
   and otherwise open the existing index.</p>
 
   <p>In either case, documents are added with {@link #addDocument(IndexDocument)
-  addDocument} and removed with {@link #deleteDocuments(Term)} or {@link
-  #deleteDocuments(Query)}. A document can be updated with {@link
+  addDocument} and removed with {@link #deleteDocuments(Term...)} or {@link
+  #deleteDocuments(Query...)}. A document can be updated with {@link
   #updateDocument(Term, IndexDocument) updateDocument} (which just deletes
   and then adds the entire document). When finished adding, deleting 
   and updating documents, {@link #close() close} should be called.</p>
@@ -1216,28 +1216,6 @@ public class IndexWriter implements Clos
     }
   }
 
-  /**
-   * Deletes the document(s) containing <code>term</code>.
-   *
-   * <p><b>NOTE</b>: if this method hits an OutOfMemoryError
-   * you should immediately close the writer.  See <a
-   * href="#OOME">above</a> for details.</p>
-   *
-   * @param term the term to identify the documents to be deleted
-   * @throws CorruptIndexException if the index is corrupt
-   * @throws IOException if there is a low-level IO error
-   */
-  public void deleteDocuments(Term term) throws IOException {
-    ensureOpen();
-    try {
-      if (docWriter.deleteTerms(term)) {
-        processEvents(true, false);
-      }
-    } catch (OutOfMemoryError oom) {
-      handleOOM(oom, "deleteDocuments(Term)");
-    }
-  }
-
   /** Expert: attempts to delete by document ID, as long as
    *  the provided reader is a near-real-time reader (from {@link
    *  DirectoryReader#open(IndexWriter,boolean)}).  If the
@@ -1250,8 +1228,7 @@ public class IndexWriter implements Clos
    *  <b>NOTE</b>: this method can only delete documents
    *  visible to the currently open NRT reader.  If you need
    *  to delete documents indexed after opening the NRT
-   *  reader you must use the other deleteDocument methods
-   *  (e.g., {@link #deleteDocuments(Term)}). */
+   *  reader you must use {@link #deleteDocuments(Term...)}). */
   public synchronized boolean tryDeleteDocument(IndexReader readerIn, int docID) throws IOException {
 
     final AtomicReader reader;
@@ -1340,28 +1317,6 @@ public class IndexWriter implements Clos
   }
 
   /**
-   * Deletes the document(s) matching the provided query.
-   *
-   * <p><b>NOTE</b>: if this method hits an OutOfMemoryError
-   * you should immediately close the writer.  See <a
-   * href="#OOME">above</a> for details.</p>
-   *
-   * @param query the query to identify the documents to be deleted
-   * @throws CorruptIndexException if the index is corrupt
-   * @throws IOException if there is a low-level IO error
-   */
-  public void deleteDocuments(Query query) throws IOException {
-    ensureOpen();
-    try {
-      if (docWriter.deleteQueries(query)) {
-        processEvents(true, false);
-      }
-    } catch (OutOfMemoryError oom) {
-      handleOOM(oom, "deleteDocuments(Query)");
-    }
-  }
-
-  /**
    * Deletes the document(s) matching any of the provided queries.
    * All given deletes are applied and flushed atomically at the same time.
    *
@@ -2078,25 +2033,30 @@ public class IndexWriter implements Clos
 
   /**
    * Delete all documents in the index.
-   *
-   * <p>This method will drop all buffered documents and will
-   *    remove all segments from the index. This change will not be
-   *    visible until a {@link #commit()} has been called. This method
-   *    can be rolled back using {@link #rollback()}.</p>
-   *
-   * <p>NOTE: this method is much faster than using deleteDocuments( new MatchAllDocsQuery() ). 
-   *    Yet, this method also has different semantics compared to {@link #deleteDocuments(Query)} 
-   *    / {@link #deleteDocuments(Query...)} since internal data-structures are cleared as well 
-   *    as all segment information is forcefully dropped anti-viral semantics like omitting norms
-   *    are reset or doc value types are cleared. Essentially a call to {@link #deleteAll()} is equivalent
-   *    to creating a new {@link IndexWriter} with {@link OpenMode#CREATE} which a delete query only marks
-   *    documents as deleted.</p>
-   *
-   * <p>NOTE: this method will forcefully abort all merges
-   *    in progress.  If other threads are running {@link
-   *    #forceMerge}, {@link #addIndexes(IndexReader[])} or
-   *    {@link #forceMergeDeletes} methods, they may receive
-   *    {@link MergePolicy.MergeAbortedException}s.
+   * 
+   * <p>
+   * This method will drop all buffered documents and will remove all segments
+   * from the index. This change will not be visible until a {@link #commit()}
+   * has been called. This method can be rolled back using {@link #rollback()}.
+   * </p>
+   * 
+   * <p>
+   * NOTE: this method is much faster than using deleteDocuments( new
+   * MatchAllDocsQuery() ). Yet, this method also has different semantics
+   * compared to {@link #deleteDocuments(Query...)} since internal
+   * data-structures are cleared as well as all segment information is
+   * forcefully dropped anti-viral semantics like omitting norms are reset or
+   * doc value types are cleared. Essentially a call to {@link #deleteAll()} is
+   * equivalent to creating a new {@link IndexWriter} with
+   * {@link OpenMode#CREATE} which a delete query only marks documents as
+   * deleted.
+   * </p>
+   * 
+   * <p>
+   * NOTE: this method will forcefully abort all merges in progress. If other
+   * threads are running {@link #forceMerge}, {@link #addIndexes(IndexReader[])}
+   * or {@link #forceMergeDeletes} methods, they may receive
+   * {@link MergePolicy.MergeAbortedException}s.
    */
   public void deleteAll() throws IOException {
     ensureOpen();
@@ -2571,7 +2531,7 @@ public class IndexWriter implements Clos
         }
       }
 
-      SegmentCommitInfo infoPerCommit = new SegmentCommitInfo(info, 0, -1L, -1L);
+      SegmentCommitInfo infoPerCommit = new SegmentCommitInfo(info, 0, -1L, -1L, -1L);
 
       info.setFiles(new HashSet<>(trackingDir.getCreatedFiles()));
       trackingDir.getCreatedFiles().clear();
@@ -2649,7 +2609,9 @@ public class IndexWriter implements Clos
     SegmentInfo newInfo = new SegmentInfo(directory, info.info.getVersion(), segName, info.info.getDocCount(),
                                           info.info.getUseCompoundFile(), info.info.getCodec(), 
                                           info.info.getDiagnostics());
-    SegmentCommitInfo newInfoPerCommit = new SegmentCommitInfo(newInfo, info.getDelCount(), info.getDelGen(), info.getFieldInfosGen());
+    SegmentCommitInfo newInfoPerCommit = new SegmentCommitInfo(newInfo,
+        info.getDelCount(), info.getDelGen(), info.getFieldInfosGen(),
+        info.getDocValuesGen());
 
     Set<String> segFiles = new HashSet<>();
 
@@ -3772,7 +3734,7 @@ public class IndexWriter implements Clos
     details.put("mergeMaxNumSegments", "" + merge.maxNumSegments);
     details.put("mergeFactor", Integer.toString(merge.segments.size()));
     setDiagnostics(si, SOURCE_MERGE, details);
-    merge.setInfo(new SegmentCommitInfo(si, 0, -1L, -1L));
+    merge.setInfo(new SegmentCommitInfo(si, 0, -1L, -1L, -1L));
 
 //    System.out.println("[" + Thread.currentThread().getName() + "] IW._mergeInit: " + segString(merge.segments) + " into " + si);
 

Modified: lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/index/ReadersAndUpdates.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/index/ReadersAndUpdates.java?rev=1596817&r1=1596816&r2=1596817&view=diff
==============================================================================
--- lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/index/ReadersAndUpdates.java (original)
+++ lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/index/ReadersAndUpdates.java Thu May 22 11:38:47 2014
@@ -29,6 +29,7 @@ import java.util.concurrent.atomic.Atomi
 import org.apache.lucene.codecs.Codec;
 import org.apache.lucene.codecs.DocValuesConsumer;
 import org.apache.lucene.codecs.DocValuesFormat;
+import org.apache.lucene.codecs.FieldInfosFormat;
 import org.apache.lucene.codecs.LiveDocsFormat;
 import org.apache.lucene.document.BinaryDocValuesField;
 import org.apache.lucene.document.NumericDocValuesField;
@@ -38,7 +39,6 @@ import org.apache.lucene.store.IOContext
 import org.apache.lucene.store.TrackingDirectoryWrapper;
 import org.apache.lucene.util.Bits;
 import org.apache.lucene.util.BytesRef;
-import org.apache.lucene.util.IOUtils;
 import org.apache.lucene.util.MutableBits;
 
 // Used by IndexWriter to hold open SegmentReaders (for
@@ -294,6 +294,170 @@ class ReadersAndUpdates {
     
     return true;
   }
+  
+  @SuppressWarnings("synthetic-access")
+  private void handleNumericDVUpdates(FieldInfos infos, Map<String,NumericDocValuesFieldUpdates> updates,
+      Directory dir, DocValuesFormat dvFormat, final SegmentReader reader, Map<Integer,Set<String>> fieldFiles) throws IOException {
+    for (Entry<String,NumericDocValuesFieldUpdates> e : updates.entrySet()) {
+      final String field = e.getKey();
+      final NumericDocValuesFieldUpdates fieldUpdates = e.getValue();
+
+      final long nextDocValuesGen = info.getNextDocValuesGen();
+      final String segmentSuffix = Long.toString(nextDocValuesGen, Character.MAX_RADIX);
+      final long estUpdatesSize = fieldUpdates.ramBytesPerDoc() * info.info.getDocCount();
+      final IOContext updatesContext = new IOContext(new FlushInfo(info.info.getDocCount(), estUpdatesSize));
+      final FieldInfo fieldInfo = infos.fieldInfo(field);
+      assert fieldInfo != null;
+      fieldInfo.setDocValuesGen(nextDocValuesGen);
+      final FieldInfos fieldInfos = new FieldInfos(new FieldInfo[] { fieldInfo });
+      // separately also track which files were created for this gen
+      final TrackingDirectoryWrapper trackingDir = new TrackingDirectoryWrapper(dir);
+      final SegmentWriteState state = new SegmentWriteState(null, trackingDir, info.info, fieldInfos, null, updatesContext, segmentSuffix);
+      try (final DocValuesConsumer fieldsConsumer = dvFormat.fieldsConsumer(state)) {
+        // write the numeric updates to a new gen'd docvalues file
+        fieldsConsumer.addNumericField(fieldInfo, new Iterable<Number>() {
+          final NumericDocValues currentValues = reader.getNumericDocValues(field);
+          final Bits docsWithField = reader.getDocsWithField(field);
+          final int maxDoc = reader.maxDoc();
+          final NumericDocValuesFieldUpdates.Iterator updatesIter = fieldUpdates.iterator();
+          @Override
+          public Iterator<Number> iterator() {
+            updatesIter.reset();
+            return new Iterator<Number>() {
+
+              int curDoc = -1;
+              int updateDoc = updatesIter.nextDoc();
+              
+              @Override
+              public boolean hasNext() {
+                return curDoc < maxDoc - 1;
+              }
+
+              @Override
+              public Number next() {
+                if (++curDoc >= maxDoc) {
+                  throw new NoSuchElementException("no more documents to return values for");
+                }
+                if (curDoc == updateDoc) { // this document has an updated value
+                  Long value = updatesIter.value(); // either null (unset value) or updated value
+                  updateDoc = updatesIter.nextDoc(); // prepare for next round
+                  return value;
+                } else {
+                  // no update for this document
+                  assert curDoc < updateDoc;
+                  if (currentValues != null && docsWithField.get(curDoc)) {
+                    // only read the current value if the document had a value before
+                    return currentValues.get(curDoc);
+                  } else {
+                    return null;
+                  }
+                }
+              }
+
+              @Override
+              public void remove() {
+                throw new UnsupportedOperationException("this iterator does not support removing elements");
+              }
+            };
+          }
+        });
+      }
+      info.advanceDocValuesGen();
+      assert !fieldFiles.containsKey(fieldInfo.number);
+      fieldFiles.put(fieldInfo.number, trackingDir.getCreatedFiles());
+    }
+  }
+
+  @SuppressWarnings("synthetic-access")
+  private void handleBinaryDVUpdates(FieldInfos infos, Map<String,BinaryDocValuesFieldUpdates> updates, 
+      TrackingDirectoryWrapper dir, DocValuesFormat dvFormat, final SegmentReader reader, Map<Integer,Set<String>> fieldFiles) throws IOException {
+    for (Entry<String,BinaryDocValuesFieldUpdates> e : updates.entrySet()) {
+      final String field = e.getKey();
+      final BinaryDocValuesFieldUpdates fieldUpdates = e.getValue();
+
+      final long nextDocValuesGen = info.getNextDocValuesGen();
+      final String segmentSuffix = Long.toString(nextDocValuesGen, Character.MAX_RADIX);
+      final long estUpdatesSize = fieldUpdates.ramBytesPerDoc() * info.info.getDocCount();
+      final IOContext updatesContext = new IOContext(new FlushInfo(info.info.getDocCount(), estUpdatesSize));
+      final FieldInfo fieldInfo = infos.fieldInfo(field);
+      assert fieldInfo != null;
+      fieldInfo.setDocValuesGen(nextDocValuesGen);
+      final FieldInfos fieldInfos = new FieldInfos(new FieldInfo[] { fieldInfo });
+      // separately also track which files were created for this gen
+      final TrackingDirectoryWrapper trackingDir = new TrackingDirectoryWrapper(dir);
+      final SegmentWriteState state = new SegmentWriteState(null, trackingDir, info.info, fieldInfos, null, updatesContext, segmentSuffix);
+      try (final DocValuesConsumer fieldsConsumer = dvFormat.fieldsConsumer(state)) {
+        // write the binary updates to a new gen'd docvalues file
+        fieldsConsumer.addBinaryField(fieldInfo, new Iterable<BytesRef>() {
+          final BinaryDocValues currentValues = reader.getBinaryDocValues(field);
+          final Bits docsWithField = reader.getDocsWithField(field);
+          final int maxDoc = reader.maxDoc();
+          final BinaryDocValuesFieldUpdates.Iterator updatesIter = fieldUpdates.iterator();
+          @Override
+          public Iterator<BytesRef> iterator() {
+            updatesIter.reset();
+            return new Iterator<BytesRef>() {
+              
+              int curDoc = -1;
+              int updateDoc = updatesIter.nextDoc();
+              BytesRef scratch = new BytesRef();
+              
+              @Override
+              public boolean hasNext() {
+                return curDoc < maxDoc - 1;
+              }
+              
+              @Override
+              public BytesRef next() {
+                if (++curDoc >= maxDoc) {
+                  throw new NoSuchElementException("no more documents to return values for");
+                }
+                if (curDoc == updateDoc) { // this document has an updated value
+                  BytesRef value = updatesIter.value(); // either null (unset value) or updated value
+                  updateDoc = updatesIter.nextDoc(); // prepare for next round
+                  return value;
+                } else {
+                  // no update for this document
+                  assert curDoc < updateDoc;
+                  if (currentValues != null && docsWithField.get(curDoc)) {
+                    // only read the current value if the document had a value before
+                    currentValues.get(curDoc, scratch);
+                    return scratch;
+                  } else {
+                    return null;
+                  }
+                }
+              }
+              
+              @Override
+              public void remove() {
+                throw new UnsupportedOperationException("this iterator does not support removing elements");
+              }
+            };
+          }
+        });
+      }
+      info.advanceDocValuesGen();
+      assert !fieldFiles.containsKey(fieldInfo.number);
+      fieldFiles.put(fieldInfo.number, trackingDir.getCreatedFiles());
+    }
+  }
+  
+  private Set<String> writeFieldInfosGen(FieldInfos fieldInfos, Directory dir, DocValuesFormat dvFormat, 
+      FieldInfosFormat infosFormat) throws IOException {
+    final long nextFieldInfosGen = info.getNextFieldInfosGen();
+    final String segmentSuffix = Long.toString(nextFieldInfosGen, Character.MAX_RADIX);
+    // we write approximately that many bytes (based on Lucene46DVF):
+    // HEADER + FOOTER: 40
+    // 90 bytes per-field (over estimating long name and attributes map)
+    final long estInfosSize = 40 + 90 * fieldInfos.size();
+    final IOContext infosContext = new IOContext(new FlushInfo(info.info.getDocCount(), estInfosSize));
+    // separately also track which files were created for this gen
+    final TrackingDirectoryWrapper trackingDir = new TrackingDirectoryWrapper(dir);
+    infosFormat.getFieldInfosWriter().write(trackingDir, info.info.name, segmentSuffix, fieldInfos, infosContext);
+    info.advanceFieldInfosGen();
+    return trackingDir.getCreatedFiles();
+  }
 
   // Writes field updates (new _X_N updates files) to the directory
   public synchronized void writeFieldUpdates(Directory dir, DocValuesFieldUpdates.Container dvUpdates) throws IOException {
@@ -307,6 +471,8 @@ class ReadersAndUpdates {
     // it:
     TrackingDirectoryWrapper trackingDir = new TrackingDirectoryWrapper(dir);
     
+    final Map<Integer,Set<String>> newDVFiles = new HashMap<>();
+    Set<String> fieldInfosFiles = null;
     FieldInfos fieldInfos = null;
     boolean success = false;
     try {
@@ -341,147 +507,16 @@ class ReadersAndUpdates {
         }
         
         fieldInfos = builder.finish();
-        final long nextFieldInfosGen = info.getNextFieldInfosGen();
-        final String segmentSuffix = Long.toString(nextFieldInfosGen, Character.MAX_RADIX);
-        final long estUpdatesSize = dvUpdates.ramBytesPerDoc() * info.info.getDocCount();
-        final IOContext updatesContext = new IOContext(new FlushInfo(info.info.getDocCount(), estUpdatesSize));
-        final SegmentWriteState state = new SegmentWriteState(null, trackingDir, info.info, fieldInfos, null, updatesContext, segmentSuffix);
         final DocValuesFormat docValuesFormat = codec.docValuesFormat();
-        final DocValuesConsumer fieldsConsumer = docValuesFormat.fieldsConsumer(state);
-        boolean fieldsConsumerSuccess = false;
-        try {
+        
 //          System.out.println("[" + Thread.currentThread().getName() + "] RLD.writeFieldUpdates: applying numeric updates; seg=" + info + " updates=" + numericFieldUpdates);
-          for (Entry<String,NumericDocValuesFieldUpdates> e : dvUpdates.numericDVUpdates.entrySet()) {
-            final String field = e.getKey();
-            final NumericDocValuesFieldUpdates fieldUpdates = e.getValue();
-            final FieldInfo fieldInfo = fieldInfos.fieldInfo(field);
-            assert fieldInfo != null;
-
-            fieldInfo.setDocValuesGen(nextFieldInfosGen);
-            // write the numeric updates to a new gen'd docvalues file
-            fieldsConsumer.addNumericField(fieldInfo, new Iterable<Number>() {
-              final NumericDocValues currentValues = reader.getNumericDocValues(field);
-              final Bits docsWithField = reader.getDocsWithField(field);
-              final int maxDoc = reader.maxDoc();
-              final NumericDocValuesFieldUpdates.Iterator updatesIter = fieldUpdates.iterator();
-              @Override
-              public Iterator<Number> iterator() {
-                updatesIter.reset();
-                return new Iterator<Number>() {
-
-                  int curDoc = -1;
-                  int updateDoc = updatesIter.nextDoc();
-                  
-                  @Override
-                  public boolean hasNext() {
-                    return curDoc < maxDoc - 1;
-                  }
-
-                  @Override
-                  public Number next() {
-                    if (++curDoc >= maxDoc) {
-                      throw new NoSuchElementException("no more documents to return values for");
-                    }
-                    if (curDoc == updateDoc) { // this document has an updated value
-                      Long value = updatesIter.value(); // either null (unset value) or updated value
-                      updateDoc = updatesIter.nextDoc(); // prepare for next round
-                      return value;
-                    } else {
-                      // no update for this document
-                      assert curDoc < updateDoc;
-                      if (currentValues != null && docsWithField.get(curDoc)) {
-                        // only read the current value if the document had a value before
-                        return currentValues.get(curDoc);
-                      } else {
-                        return null;
-                      }
-                    }
-                  }
-
-                  @Override
-                  public void remove() {
-                    throw new UnsupportedOperationException("this iterator does not support removing elements");
-                  }
-                };
-              }
-            });
-          }
-
+        handleNumericDVUpdates(fieldInfos, dvUpdates.numericDVUpdates, trackingDir, docValuesFormat, reader, newDVFiles);
+        
 //        System.out.println("[" + Thread.currentThread().getName() + "] RAU.writeFieldUpdates: applying binary updates; seg=" + info + " updates=" + dvUpdates.binaryDVUpdates);
-        for (Entry<String,BinaryDocValuesFieldUpdates> e : dvUpdates.binaryDVUpdates.entrySet()) {
-          final String field = e.getKey();
-          final BinaryDocValuesFieldUpdates dvFieldUpdates = e.getValue();
-          final FieldInfo fieldInfo = fieldInfos.fieldInfo(field);
-          assert fieldInfo != null;
-
-//          System.out.println("[" + Thread.currentThread().getName() + "] RAU.writeFieldUpdates: applying binary updates; seg=" + info + " f=" + dvFieldUpdates + ", updates=" + dvFieldUpdates);
-
-          fieldInfo.setDocValuesGen(nextFieldInfosGen);
-          // write the numeric updates to a new gen'd docvalues file
-          fieldsConsumer.addBinaryField(fieldInfo, new Iterable<BytesRef>() {
-            final BinaryDocValues currentValues = reader.getBinaryDocValues(field);
-            final Bits docsWithField = reader.getDocsWithField(field);
-            final int maxDoc = reader.maxDoc();
-            final BinaryDocValuesFieldUpdates.Iterator updatesIter = dvFieldUpdates.iterator();
-            @Override
-            public Iterator<BytesRef> iterator() {
-              updatesIter.reset();
-              return new Iterator<BytesRef>() {
-
-                int curDoc = -1;
-                int updateDoc = updatesIter.nextDoc();
-                BytesRef scratch = new BytesRef();
-                
-                @Override
-                public boolean hasNext() {
-                  return curDoc < maxDoc - 1;
-                }
+        handleBinaryDVUpdates(fieldInfos, dvUpdates.binaryDVUpdates, trackingDir, docValuesFormat, reader, newDVFiles);
 
-                @Override
-                public BytesRef next() {
-                  if (++curDoc >= maxDoc) {
-                    throw new NoSuchElementException("no more documents to return values for");
-                  }
-                  if (curDoc == updateDoc) { // this document has an updated value
-                    BytesRef value = updatesIter.value(); // either null (unset value) or updated value
-                    updateDoc = updatesIter.nextDoc(); // prepare for next round
-                    return value;
-                  } else {
-                    // no update for this document
-                    assert curDoc < updateDoc;
-                    if (currentValues != null && docsWithField.get(curDoc)) {
-                      // only read the current value if the document had a value before
-                      currentValues.get(curDoc, scratch);
-                      return scratch;
-                    } else {
-                      return null;
-                    }
-                  }
-                }
-
-                @Override
-                public void remove() {
-                  throw new UnsupportedOperationException("this iterator does not support removing elements");
-                }
-              };
-            }
-          });
-        }
-        
-        // we write approximately that many bytes (based on Lucene46DVF):
-        // HEADER + FOOTER: 40
-        // 90 bytes per-field (over estimating long name and attributes map)
-        final long estInfosSize = 40 + 90 * fieldInfos.size();
-        final IOContext infosContext = new IOContext(new FlushInfo(info.info.getDocCount(), estInfosSize));
-        codec.fieldInfosFormat().getFieldInfosWriter().write(trackingDir, info.info.name, segmentSuffix, fieldInfos, infosContext);
-        fieldsConsumerSuccess = true;
-        } finally {
-          if (fieldsConsumerSuccess) {
-            fieldsConsumer.close();
-          } else {
-            IOUtils.closeWhileHandlingException(fieldsConsumer);
-          }
-        }
+//        System.out.println("[" + Thread.currentThread().getName() + "] RAU.writeFieldUpdates: write fieldInfos; seg=" + info);
+        fieldInfosFiles = writeFieldInfosGen(fieldInfos, trackingDir, docValuesFormat, codec.fieldInfosFormat());
       } finally {
         if (reader != this.reader) {
 //          System.out.println("[" + Thread.currentThread().getName() + "] RLD.writeLiveDocs: closeReader " + reader);
@@ -492,9 +527,10 @@ class ReadersAndUpdates {
       success = true;
     } finally {
       if (!success) {
-        // Advance only the nextWriteFieldInfosGen so that a 2nd
-        // attempt to write will write to a new file
+        // Advance only the nextWriteFieldInfosGen and nextWriteDocValuesGen, so
+        // that a 2nd attempt to write will write to a new file
         info.advanceNextWriteFieldInfosGen();
+        info.advanceNextWriteDocValuesGen();
         
         // Delete any partially created file(s):
         for (String fileName : trackingDir.getCreatedFiles()) {
@@ -507,7 +543,6 @@ class ReadersAndUpdates {
       }
     }
     
-    info.advanceFieldInfosGen();
     // copy all the updates to mergingUpdates, so they can later be applied to the merged segment
     if (isMerging) {
       for (Entry<String,NumericDocValuesFieldUpdates> e : dvUpdates.numericDVUpdates.entrySet()) {
@@ -528,22 +563,21 @@ class ReadersAndUpdates {
       }
     }
     
-    // create a new map, keeping only the gens that are in use
-    Map<Long,Set<String>> genUpdatesFiles = info.getUpdatesFiles();
-    Map<Long,Set<String>> newGenUpdatesFiles = new HashMap<>();
-    final long fieldInfosGen = info.getFieldInfosGen();
-    for (FieldInfo fi : fieldInfos) {
-      long dvGen = fi.getDocValuesGen();
-      if (dvGen != -1 && !newGenUpdatesFiles.containsKey(dvGen)) {
-        if (dvGen == fieldInfosGen) {
-          newGenUpdatesFiles.put(fieldInfosGen, trackingDir.getCreatedFiles());
-        } else {
-          newGenUpdatesFiles.put(dvGen, genUpdatesFiles.get(dvGen));
-        }
+    // writing field updates succeeded
+    assert fieldInfosFiles != null;
+    info.setFieldInfosFiles(fieldInfosFiles);
+    
+    // update the doc-values updates files. the files map each field to its set
+    // of files, hence we copy from the existing map all fields w/ updates that
+    // were not updated in this session, and add new mappings for fields that
+    // were updated now.
+    assert !newDVFiles.isEmpty();
+    for (Entry<Integer,Set<String>> e : info.getDocValuesUpdatesFiles().entrySet()) {
+      if (!newDVFiles.containsKey(e.getKey())) {
+        newDVFiles.put(e.getKey(), e.getValue());
       }
     }
-    
-    info.setGenUpdatesFiles(newGenUpdatesFiles);
+    info.setDocValuesUpdatesFiles(newDVFiles);
     
     // wrote new files, should checkpoint()
     writer.checkpoint();

Modified: lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/index/SegmentCommitInfo.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/index/SegmentCommitInfo.java?rev=1596817&r1=1596816&r2=1596817&view=diff
==============================================================================
--- lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/index/SegmentCommitInfo.java (original)
+++ lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/index/SegmentCommitInfo.java Thu May 22 11:38:47 2014
@@ -51,11 +51,27 @@ public class SegmentCommitInfo {
   // Generation number of the FieldInfos (-1 if there are no updates)
   private long fieldInfosGen;
   
-  // Normally 1 + fieldInfosGen, unless an exception was hit on last attempt to
+  // Normally 1+fieldInfosGen, unless an exception was hit on last attempt to
   // write
   private long nextWriteFieldInfosGen;
+  
+  // Generation number of the DocValues (-1 if there are no updates)
+  private long docValuesGen;
+  
+  // Normally 1+dvGen, unless an exception was hit on last attempt to
+  // write
+  private long nextWriteDocValuesGen;
 
+  // Track the per-field DocValues update files
+  private final Map<Integer,Set<String>> dvUpdatesFiles = new HashMap<>();
+  
+  // TODO should we add .files() to FieldInfosFormat, like we have on
+  // LiveDocsFormat?
+  // track the fieldInfos update files
+  private final Set<String> fieldInfosFiles = new HashSet<>();
+  
   // Track the per-generation updates files
+  @Deprecated
   private final Map<Long,Set<String>> genUpdatesFiles = new HashMap<>();
   
   private volatile long sizeInBytes = -1;
@@ -71,36 +87,53 @@ public class SegmentCommitInfo {
    *          deletion generation number (used to name deletion files)
    * @param fieldInfosGen
    *          FieldInfos generation number (used to name field-infos files)
-   **/
-  public SegmentCommitInfo(SegmentInfo info, int delCount, long delGen, long fieldInfosGen) {
+   * @param docValuesGen
+   *          DocValues generation number (used to name doc-values updates files)
+   */
+  public SegmentCommitInfo(SegmentInfo info, int delCount, long delGen, long fieldInfosGen, long docValuesGen) {
     this.info = info;
     this.delCount = delCount;
     this.delGen = delGen;
-    if (delGen == -1) {
-      nextWriteDelGen = 1;
-    } else {
-      nextWriteDelGen = delGen+1;
-    }
-    
+    this.nextWriteDelGen = delGen == -1 ? 1 : delGen + 1;
     this.fieldInfosGen = fieldInfosGen;
-    if (fieldInfosGen == -1) {
-      nextWriteFieldInfosGen = 1;
-    } else {
-      nextWriteFieldInfosGen = fieldInfosGen + 1;
-    }
+    this.nextWriteFieldInfosGen = fieldInfosGen == -1 ? 1 : fieldInfosGen + 1;
+    this.docValuesGen = docValuesGen;
+    this.nextWriteDocValuesGen = docValuesGen == -1 ? 1 : docValuesGen + 1;
   }
 
-  /** Returns the per generation updates files. */
-  public Map<Long,Set<String>> getUpdatesFiles() {
-    return Collections.unmodifiableMap(genUpdatesFiles);
-  }
-  
-  /** Sets the updates file names per generation. Does not deep clone the map. */
+  /**
+   * Sets the updates file names per generation. Does not deep clone the map.
+   * 
+   * @deprecated required to support 4.6-4.8 indexes.
+   */
+  @Deprecated
   public void setGenUpdatesFiles(Map<Long,Set<String>> genUpdatesFiles) {
     this.genUpdatesFiles.clear();
     this.genUpdatesFiles.putAll(genUpdatesFiles);
   }
   
+  /** Returns the per-field DocValues updates files. */
+  public Map<Integer,Set<String>> getDocValuesUpdatesFiles() {
+    return Collections.unmodifiableMap(dvUpdatesFiles);
+  }
+  
+  /** Sets the DocValues updates file names, per field number. Does not deep clone the map. */
+  public void setDocValuesUpdatesFiles(Map<Integer,Set<String>> dvUpdatesFiles) {
+    this.dvUpdatesFiles.clear();
+    this.dvUpdatesFiles.putAll(dvUpdatesFiles);
+  }
+  
+  /** Returns the FieldInfos file names. */
+  public Set<String> getFieldInfosFiles() {
+    return Collections.unmodifiableSet(fieldInfosFiles);
+  }
+  
+  /** Sets the FieldInfos file names. */
+  public void setFieldInfosFiles(Set<String> fieldInfosFiles) {
+    this.fieldInfosFiles.clear();
+    this.fieldInfosFiles.addAll(fieldInfosFiles);
+  }
+
   /** Called when we succeed in writing deletes */
   void advanceDelGen() {
     delGen = nextWriteDelGen;
@@ -129,6 +162,21 @@ public class SegmentCommitInfo {
   void advanceNextWriteFieldInfosGen() {
     nextWriteFieldInfosGen++;
   }
+  
+  /** Called when we succeed in writing a new DocValues generation. */
+  void advanceDocValuesGen() {
+    docValuesGen = nextWriteDocValuesGen;
+    nextWriteDocValuesGen = docValuesGen + 1;
+    sizeInBytes = -1;
+  }
+  
+  /**
+   * Called if there was an exception while writing a new generation of
+   * DocValues, so that we don't try to write to the same file more than once.
+   */
+  void advanceNextWriteDocValuesGen() {
+    nextWriteDocValuesGen++;
+  }
 
   /** Returns total size in bytes of all files for this
    *  segment. */
@@ -155,11 +203,20 @@ public class SegmentCommitInfo {
     // Must separately add any live docs files:
     info.getCodec().liveDocsFormat().files(this, files);
 
-    // Must separately add any field updates files
+    // Must separately add any per-gen updates files. This can go away when we
+    // get rid of genUpdatesFiles (6.0)
     for (Set<String> updateFiles : genUpdatesFiles.values()) {
       files.addAll(updateFiles);
     }
     
+    // must separately add any field updates files
+    for (Set<String> updatefiles : dvUpdatesFiles.values()) {
+      files.addAll(updatefiles);
+    }
+    
+    // must separately add fieldInfos files
+    files.addAll(fieldInfosFiles);
+    
     return files;
   }
 
@@ -200,6 +257,19 @@ public class SegmentCommitInfo {
     return fieldInfosGen;
   }
   
+  /** Returns the next available generation number of the DocValues files. */
+  public long getNextDocValuesGen() {
+    return nextWriteDocValuesGen;
+  }
+  
+  /**
+   * Returns the generation number of the DocValues file or -1 if there are no
+   * doc-values updates yet.
+   */
+  public long getDocValuesGen() {
+    return docValuesGen;
+  }
+  
   /**
    * Returns the next available generation number
    * of the live docs file.
@@ -239,6 +309,9 @@ public class SegmentCommitInfo {
     if (fieldInfosGen != -1) {
       s += ":fieldInfosGen=" + fieldInfosGen;
     }
+    if (docValuesGen != -1) {
+      s += ":dvGen=" + docValuesGen;
+    }
     return s;
   }
 
@@ -249,19 +322,27 @@ public class SegmentCommitInfo {
 
   @Override
   public SegmentCommitInfo clone() {
-    SegmentCommitInfo other = new SegmentCommitInfo(info, delCount, delGen, fieldInfosGen);
+    SegmentCommitInfo other = new SegmentCommitInfo(info, delCount, delGen, fieldInfosGen, docValuesGen);
     // Not clear that we need to carry over nextWriteDelGen
     // (i.e. do we ever clone after a failed write and
     // before the next successful write?), but just do it to
     // be safe:
     other.nextWriteDelGen = nextWriteDelGen;
     other.nextWriteFieldInfosGen = nextWriteFieldInfosGen;
+    other.nextWriteDocValuesGen = nextWriteDocValuesGen;
     
     // deep clone
     for (Entry<Long,Set<String>> e : genUpdatesFiles.entrySet()) {
       other.genUpdatesFiles.put(e.getKey(), new HashSet<>(e.getValue()));
     }
     
+    // deep clone
+    for (Entry<Integer,Set<String>> e : dvUpdatesFiles.entrySet()) {
+      other.dvUpdatesFiles.put(e.getKey(), new HashSet<>(e.getValue()));
+    }
+    
+    other.fieldInfosFiles.addAll(fieldInfosFiles);
+    
     return other;
   }
 }

Modified: lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/index/SegmentDocValues.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/index/SegmentDocValues.java?rev=1596817&r1=1596816&r2=1596817&view=diff
==============================================================================
--- lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/index/SegmentDocValues.java (original)
+++ lucene/dev/branches/lucene5675/lucene/core/src/java/org/apache/lucene/index/SegmentDocValues.java Thu May 22 11:38:47 2014
@@ -38,7 +38,7 @@ final class SegmentDocValues {
   private final Map<Long,RefCount<DocValuesProducer>> genDVProducers = new HashMap<>();
 
   private RefCount<DocValuesProducer> newDocValuesProducer(SegmentCommitInfo si, IOContext context, Directory dir,
-      DocValuesFormat dvFormat, final Long gen, List<FieldInfo> infos) throws IOException {
+      DocValuesFormat dvFormat, final Long gen, FieldInfos infos) throws IOException {
     Directory dvDir = dir;
     String segmentSuffix = "";
     if (gen.longValue() != -1) {
@@ -47,7 +47,7 @@ final class SegmentDocValues {
     }
 
     // set SegmentReadState to list only the fields that are relevant to that gen
-    SegmentReadState srs = new SegmentReadState(dvDir, si.info, new FieldInfos(infos.toArray(new FieldInfo[infos.size()])), context, segmentSuffix);
+    SegmentReadState srs = new SegmentReadState(dvDir, si.info, infos, context, segmentSuffix);
     return new RefCount<DocValuesProducer>(dvFormat.fieldsProducer(srs)) {
       @SuppressWarnings("synthetic-access")
       @Override
@@ -62,7 +62,7 @@ final class SegmentDocValues {
 
   /** Returns the {@link DocValuesProducer} for the given generation. */
   synchronized DocValuesProducer getDocValuesProducer(long gen, SegmentCommitInfo si, IOContext context, Directory dir, 
-      DocValuesFormat dvFormat, List<FieldInfo> infos) throws IOException {
+      DocValuesFormat dvFormat, FieldInfos infos) throws IOException {
     RefCount<DocValuesProducer> dvp = genDVProducers.get(gen);
     if (dvp == null) {
       dvp = newDocValuesProducer(si, context, dir, dvFormat, gen, infos);



Mime
View raw message