lucene-java-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From markrmil...@apache.org
Subject svn commit: r827772 [6/6] - in /lucene/java/branches/flex_1458: ./ contrib/ contrib/instantiated/src/java/org/apache/lucene/store/instantiated/ contrib/misc/src/java/org/apache/lucene/queryParser/precedence/ contrib/queries/src/java/org/apache/lucene/s...
Date Tue, 20 Oct 2009 19:58:22 GMT
Modified: lucene/java/branches/flex_1458/src/java/org/apache/lucene/util/cache/SimpleLRUCache.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/java/org/apache/lucene/util/cache/SimpleLRUCache.java?rev=827772&r1=827771&r2=827772&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/java/org/apache/lucene/util/cache/SimpleLRUCache.java
(original)
+++ lucene/java/branches/flex_1458/src/java/org/apache/lucene/util/cache/SimpleLRUCache.java
Tue Oct 20 19:58:18 2009
@@ -26,24 +26,19 @@
  * if needed.
  * 
  */
-public class SimpleLRUCache extends SimpleMapCache {
+public class SimpleLRUCache<K,V> extends SimpleMapCache<K,V> {
   private final static float LOADFACTOR = 0.75f;
 
-  private int cacheSize;
-
   /**
    * Creates a last-recently-used cache with the specified size. 
    */
-  public SimpleLRUCache(int cacheSize) {
-    super(null);
-    this.cacheSize = cacheSize;
-    int capacity = (int) Math.ceil(cacheSize / LOADFACTOR) + 1;
-
-    super.map = new LinkedHashMap(capacity, LOADFACTOR, true) {
-      protected boolean removeEldestEntry(Map.Entry eldest) {
-        return size() > SimpleLRUCache.this.cacheSize;
+  public SimpleLRUCache(final int cacheSize) {
+    super(new LinkedHashMap<K,V>((int) Math.ceil(cacheSize / LOADFACTOR) + 1, LOADFACTOR,
true) {
+      @Override
+      protected boolean removeEldestEntry(Map.Entry<K, V> eldest) {
+        return size() > cacheSize;
       }
-    };
+    });
   }
 
 }

Modified: lucene/java/branches/flex_1458/src/java/org/apache/lucene/util/cache/SimpleMapCache.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/java/org/apache/lucene/util/cache/SimpleMapCache.java?rev=827772&r1=827771&r2=827772&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/java/org/apache/lucene/util/cache/SimpleMapCache.java
(original)
+++ lucene/java/branches/flex_1458/src/java/org/apache/lucene/util/cache/SimpleMapCache.java
Tue Oct 20 19:58:18 2009
@@ -26,29 +26,33 @@
  * This cache is not synchronized, use {@link Cache#synchronizedCache(Cache)}
  * if needed.
  */
-public class SimpleMapCache extends Cache {
-  Map map;
+public class SimpleMapCache<K,V> extends Cache<K,V> {
+  protected Map<K,V> map;
   
   public SimpleMapCache() {
-    this(new HashMap());
+    this(new HashMap<K,V>());
   }
 
-  public SimpleMapCache(Map map) {
+  public SimpleMapCache(Map<K,V> map) {
     this.map = map;
   }
   
-  public Object get(Object key) {
+  @Override
+  public V get(Object key) {
     return map.get(key);
   }
 
-  public void put(Object key, Object value) {
+  @Override
+  public void put(K key, V value) {
     map.put(key, value);
   }
 
+  @Override
   public void close() {
     // NOOP
   }
 
+  @Override
   public boolean containsKey(Object key) {
     return map.containsKey(key);
   }
@@ -56,44 +60,51 @@
   /**
    * Returns a Set containing all keys in this cache.
    */
-  public Set keySet() {
+  public Set<K> keySet() {
     return map.keySet();
   }
   
-  Cache getSynchronizedCache() {
-    return new SynchronizedSimpleMapCache(this);
+  @Override
+  Cache<K,V> getSynchronizedCache() {
+    return new SynchronizedSimpleMapCache<K,V>(this);
   }
   
-  private static class SynchronizedSimpleMapCache extends SimpleMapCache {
-    Object mutex;
-    SimpleMapCache cache;
+  private static class SynchronizedSimpleMapCache<K,V> extends SimpleMapCache<K,V>
{
+    private Object mutex;
+    private SimpleMapCache<K,V> cache;
     
-    SynchronizedSimpleMapCache(SimpleMapCache cache) {
+    SynchronizedSimpleMapCache(SimpleMapCache<K,V> cache) {
         this.cache = cache;
         this.mutex = this;
     }
     
-    public void put(Object key, Object value) {
+    @Override
+    public void put(K key, V value) {
         synchronized(mutex) {cache.put(key, value);}
     }
     
-    public Object get(Object key) {
+    @Override
+    public V get(Object key) {
         synchronized(mutex) {return cache.get(key);}
     }
     
+    @Override
     public boolean containsKey(Object key) {
         synchronized(mutex) {return cache.containsKey(key);}
     }
     
+    @Override
     public void close() {
         synchronized(mutex) {cache.close();}
     }
     
-    public Set keySet() {
+    @Override
+    public Set<K> keySet() {
       synchronized(mutex) {return cache.keySet();}
     }
     
-    Cache getSynchronizedCache() {
+    @Override
+    Cache<K,V> getSynchronizedCache() {
       return this;
     }
   }

Propchange: lucene/java/branches/flex_1458/src/test/org/apache/lucene/analysis/TestISOLatin1AccentFilter.java
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Oct 20 19:58:18 2009
@@ -1,2 +1,2 @@
 /lucene/java/branches/lucene_2_4/src/test/org/apache/lucene/analysis/TestISOLatin1AccentFilter.java:748824
-/lucene/java/trunk/src/test/org/apache/lucene/analysis/TestISOLatin1AccentFilter.java:824912-825292,826213
+/lucene/java/trunk/src/test/org/apache/lucene/analysis/TestISOLatin1AccentFilter.java:824912-825292,826213-827705

Modified: lucene/java/branches/flex_1458/src/test/org/apache/lucene/analysis/TestToken.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/test/org/apache/lucene/analysis/TestToken.java?rev=827772&r1=827771&r2=827772&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/test/org/apache/lucene/analysis/TestToken.java (original)
+++ lucene/java/branches/flex_1458/src/test/org/apache/lucene/analysis/TestToken.java Tue
Oct 20 19:58:18 2009
@@ -150,7 +150,7 @@
     t.setTermBuffer(b, 0, 5);
     assertEquals("(aloha,0,5)", t.toString());
 
-    t.setTermText("hi there");
+    t.setTermBuffer("hi there");
     assertEquals("(hi there,0,5)", t.toString());
   }
 
@@ -171,20 +171,17 @@
   
   public void testMixedStringArray() throws Exception {
     Token t = new Token("hello", 0, 5);
-    assertEquals(t.termText(), "hello");
     assertEquals(t.termLength(), 5);
     assertEquals(t.term(), "hello");
-    t.setTermText("hello2");
+    t.setTermBuffer("hello2");
     assertEquals(t.termLength(), 6);
     assertEquals(t.term(), "hello2");
     t.setTermBuffer("hello3".toCharArray(), 0, 6);
-    assertEquals(t.termText(), "hello3");
+    assertEquals(t.term(), "hello3");
 
-    // Make sure if we get the buffer and change a character
-    // that termText() reflects the change
     char[] buffer = t.termBuffer();
     buffer[1] = 'o';
-    assertEquals(t.termText(), "hollo3");
+    assertEquals(t.term(), "hollo3");
   }
   
   public void testClone() throws Exception {

Propchange: lucene/java/branches/flex_1458/src/test/org/apache/lucene/document/TestDateTools.java
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Oct 20 19:58:18 2009
@@ -1,2 +1,2 @@
 /lucene/java/branches/lucene_2_4/src/test/org/apache/lucene/document/TestDateTools.java:748824
-/lucene/java/trunk/src/test/org/apache/lucene/document/TestDateTools.java:824912-825292,826213
+/lucene/java/trunk/src/test/org/apache/lucene/document/TestDateTools.java:824912-825292,826213-827705

Propchange: lucene/java/branches/flex_1458/src/test/org/apache/lucene/document/TestNumberTools.java
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Oct 20 19:58:18 2009
@@ -1,2 +1,2 @@
 /lucene/java/branches/lucene_2_4/src/test/org/apache/lucene/document/TestNumberTools.java:748824
-/lucene/java/trunk/src/test/org/apache/lucene/document/TestNumberTools.java:824912-825292,826213
+/lucene/java/trunk/src/test/org/apache/lucene/document/TestNumberTools.java:824912-825292,826213-827705

Propchange: lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Oct 20 19:58:18 2009
@@ -1,2 +1,2 @@
 /lucene/java/branches/lucene_2_4/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java:748824
-/lucene/java/trunk/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java:824912-825292,826213
+/lucene/java/trunk/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java:824912-825292,826213-827705

Modified: lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestIndexReaderClone.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestIndexReaderClone.java?rev=827772&r1=827771&r2=827772&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestIndexReaderClone.java
(original)
+++ lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestIndexReaderClone.java
Tue Oct 20 19:58:18 2009
@@ -335,7 +335,7 @@
     origSegmentReader.close();
     assertDelDocsRefCountEquals(1, origSegmentReader);
     // check the norm refs
-    Norm norm = (Norm) clonedSegmentReader.norms.get("field1");
+    Norm norm = clonedSegmentReader.norms.get("field1");
     assertEquals(1, norm.bytesRef().refCount());
     clonedSegmentReader.close();
     dir1.close();

Modified: lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestIndexReaderCloneNorms.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestIndexReaderCloneNorms.java?rev=827772&r1=827771&r2=827772&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestIndexReaderCloneNorms.java
(original)
+++ lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestIndexReaderCloneNorms.java
Tue Oct 20 19:58:18 2009
@@ -170,7 +170,7 @@
     TestIndexReaderReopen.createIndex(dir1, false);
     SegmentReader reader1 = SegmentReader.getOnlySegmentReader(dir1);
     reader1.norms("field1");
-    Norm r1norm = (Norm)reader1.norms.get("field1");
+    Norm r1norm = reader1.norms.get("field1");
     SegmentReader.Ref r1BytesRef = r1norm.bytesRef();
     SegmentReader reader2 = (SegmentReader)reader1.clone();
     assertEquals(2, r1norm.bytesRef().refCount());
@@ -189,14 +189,14 @@
     IndexReader reader2C = (IndexReader) reader1.clone();
     SegmentReader segmentReader2C = SegmentReader.getOnlySegmentReader(reader2C);
     segmentReader2C.norms("field1"); // load the norms for the field
-    Norm reader2CNorm = (Norm)segmentReader2C.norms.get("field1");
+    Norm reader2CNorm = segmentReader2C.norms.get("field1");
     assertTrue("reader2CNorm.bytesRef()=" + reader2CNorm.bytesRef(), reader2CNorm.bytesRef().refCount()
== 2);
     
     
     
     IndexReader reader3C = (IndexReader) reader2C.clone();
     SegmentReader segmentReader3C = SegmentReader.getOnlySegmentReader(reader3C);
-    Norm reader3CCNorm = (Norm)segmentReader3C.norms.get("field1");
+    Norm reader3CCNorm = segmentReader3C.norms.get("field1");
     assertEquals(3, reader3CCNorm.bytesRef().refCount());
     
     // edit a norm and the refcount should be 1
@@ -215,13 +215,13 @@
     
     // norm values should be different 
     assertTrue(Similarity.decodeNorm(segmentReader3C.norms("field1")[5]) != Similarity.decodeNorm(segmentReader4C.norms("field1")[5]));
-    Norm reader4CCNorm = (Norm)segmentReader4C.norms.get("field1");
+    Norm reader4CCNorm = segmentReader4C.norms.get("field1");
     assertEquals(3, reader3CCNorm.bytesRef().refCount());
     assertEquals(1, reader4CCNorm.bytesRef().refCount());
         
     IndexReader reader5C = (IndexReader) reader4C.clone();
     SegmentReader segmentReader5C = SegmentReader.getOnlySegmentReader(reader5C);
-    Norm reader5CCNorm = (Norm)segmentReader5C.norms.get("field1");
+    Norm reader5CCNorm = segmentReader5C.norms.get("field1");
     reader5C.setNorm(5, "field1", 0.7f);
     assertEquals(1, reader5CCNorm.bytesRef().refCount());    
 

Modified: lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestIndexWriter.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestIndexWriter.java?rev=827772&r1=827771&r2=827772&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestIndexWriter.java (original)
+++ lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestIndexWriter.java Tue
Oct 20 19:58:18 2009
@@ -2699,8 +2699,6 @@
     failure.setDoFail();
 
     ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
-    // We expect sync exceptions in the merge threads
-    cms.setSuppressExceptions();
     writer.setMergeScheduler(cms);
     writer.setMaxBufferedDocs(2);
     writer.setMergeFactor(5);

Modified: lucene/java/branches/flex_1458/src/test/org/apache/lucene/search/payloads/TestPayloadNearQuery.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/test/org/apache/lucene/search/payloads/TestPayloadNearQuery.java?rev=827772&r1=827771&r2=827772&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/test/org/apache/lucene/search/payloads/TestPayloadNearQuery.java
(original)
+++ lucene/java/branches/flex_1458/src/test/org/apache/lucene/search/payloads/TestPayloadNearQuery.java
Tue Oct 20 19:58:18 2009
@@ -37,185 +37,221 @@
 import org.apache.lucene.search.Searcher;
 import org.apache.lucene.search.TopDocs;
 import org.apache.lucene.search.spans.SpanQuery;
+import org.apache.lucene.search.spans.SpanNearQuery;
 import org.apache.lucene.store.RAMDirectory;
 import org.apache.lucene.util.English;
 import org.apache.lucene.util.LuceneTestCase;
 
 
 public class TestPayloadNearQuery extends LuceneTestCase {
-	private IndexSearcher searcher;
-	private BoostingSimilarity similarity = new BoostingSimilarity();
-	private byte[] payload2 = new byte[]{2};
-	private byte[] payload4 = new byte[]{4};
-
-	public TestPayloadNearQuery(String s) {
-		super(s);
-	}
-
-	private class PayloadAnalyzer extends Analyzer {
-		public TokenStream tokenStream(String fieldName, Reader reader) {
-			TokenStream result = new LowerCaseTokenizer(reader);
-			result = new PayloadFilter(result, fieldName);
-			return result;
-		}
-	}
-
-	private class PayloadFilter extends TokenFilter {
-		String fieldName;
-		int numSeen = 0;
+  private IndexSearcher searcher;
+  private BoostingSimilarity similarity = new BoostingSimilarity();
+  private byte[] payload2 = new byte[]{2};
+  private byte[] payload4 = new byte[]{4};
+
+  public TestPayloadNearQuery(String s) {
+    super(s);
+  }
+
+  private class PayloadAnalyzer extends Analyzer {
+    public TokenStream tokenStream(String fieldName, Reader reader) {
+      TokenStream result = new LowerCaseTokenizer(reader);
+      result = new PayloadFilter(result, fieldName);
+      return result;
+    }
+  }
+
+  private class PayloadFilter extends TokenFilter {
+    String fieldName;
+    int numSeen = 0;
     protected PayloadAttribute payAtt;
 
-		public PayloadFilter(TokenStream input, String fieldName) {
-			super(input);
-			this.fieldName = fieldName;
+    public PayloadFilter(TokenStream input, String fieldName) {
+      super(input);
+      this.fieldName = fieldName;
       payAtt = addAttribute(PayloadAttribute.class);
-		}
+    }
 
     public boolean incrementToken() throws IOException {
       boolean result = false;
       if (input.incrementToken() == true){
         if (numSeen % 2 == 0) {
-					payAtt.setPayload(new Payload(payload2));
-				} else {
-					payAtt.setPayload(new Payload(payload4));
-				}
-				numSeen++;
+          payAtt.setPayload(new Payload(payload2));
+        } else {
+          payAtt.setPayload(new Payload(payload4));
+        }
+        numSeen++;
         result = true;
       }
       return result;
     }
   }
   
-	private PayloadNearQuery newPhraseQuery (String fieldName, String phrase, boolean inOrder)
{
-		int n;
-		String[] words = phrase.split("[\\s]+");
-		SpanQuery clauses[] = new SpanQuery[words.length];
-		for (int i=0;i<clauses.length;i++) {
-			clauses[i] = new PayloadTermQuery(new Term(fieldName, words[i]), new AveragePayloadFunction());
 
-		} 
-		return new PayloadNearQuery(clauses, 0, inOrder);
-	}
-
-	protected void setUp() throws Exception {
-		super.setUp();
-		RAMDirectory directory = new RAMDirectory();
-		PayloadAnalyzer analyzer = new PayloadAnalyzer();
-		IndexWriter writer
-		= new IndexWriter(directory, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
-		writer.setSimilarity(similarity);
-		//writer.infoStream = System.out;
-		for (int i = 0; i < 1000; i++) {
-			Document doc = new Document();
-			doc.add(new Field("field", English.intToEnglish(i), Field.Store.YES, Field.Index.ANALYZED));
-			writer.addDocument(doc);
-		}
-		writer.optimize();
-		writer.close();
-
-		searcher = new IndexSearcher(directory, true);
-		searcher.setSimilarity(similarity);
-	}
-
-	public void test() throws IOException {
-		PayloadNearQuery query;
-		TopDocs hits;
+  private PayloadNearQuery newPhraseQuery (String fieldName, String phrase, boolean inOrder)
{
+    int n;
+    String[] words = phrase.split("[\\s]+");
+    SpanQuery clauses[] = new SpanQuery[words.length];
+    for (int i=0;i<clauses.length;i++) {
+      clauses[i] = new PayloadTermQuery(new Term(fieldName, words[i]), new AveragePayloadFunction());
 
+    } 
+    return new PayloadNearQuery(clauses, 0, inOrder);
+  }
+
+  protected void setUp() throws Exception {
+    super.setUp();
+    RAMDirectory directory = new RAMDirectory();
+    PayloadAnalyzer analyzer = new PayloadAnalyzer();
+    IndexWriter writer
+      = new IndexWriter(directory, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
+    writer.setSimilarity(similarity);
+    //writer.infoStream = System.out;
+    for (int i = 0; i < 1000; i++) {
+      Document doc = new Document();
+      doc.add(new Field("field", English.intToEnglish(i), Field.Store.YES, Field.Index.ANALYZED));
+      String txt = English.intToEnglish(i) +' '+English.intToEnglish(i+1);
+      doc.add(new Field("field2",  txt, Field.Store.YES, Field.Index.ANALYZED));
+      writer.addDocument(doc);
+    }
+    writer.optimize();
+    writer.close();
+
+    searcher = new IndexSearcher(directory, true);
+    searcher.setSimilarity(similarity);
+  }
 
-		query = newPhraseQuery("field", "twenty two", true);
-		QueryUtils.check(query);
+  public void test() throws IOException {
+    PayloadNearQuery query;
+    TopDocs hits;
+
+    query = newPhraseQuery("field", "twenty two", true);
+    QueryUtils.check(query);
 		
-		// all 10 hits should have score = 3 because adjacent terms have payloads of 2,4
-		// and all the similarity factors are set to 1
-		hits = searcher.search(query, null, 100);
-		assertTrue("hits is null and it shouldn't be", hits != null);
-		assertTrue("should be 10 hits", hits.totalHits == 10);
-		for (int j = 0; j < hits.scoreDocs.length; j++) {
-			ScoreDoc doc = hits.scoreDocs[j];
-			assertTrue(doc.score + " does not equal: " + 3, doc.score == 3);
-		}
-		for (int i=1;i<10;i++) {
-			query = newPhraseQuery("field", English.intToEnglish(i)+" hundred", true);
-			// all should have score = 3 because adjacent terms have payloads of 2,4
-			// and all the similarity factors are set to 1
-			hits = searcher.search(query, null, 100);
-			assertTrue("hits is null and it shouldn't be", hits != null);
-			assertTrue("should be 100 hits", hits.totalHits == 100);
-			for (int j = 0; j < hits.scoreDocs.length; j++) {
-				ScoreDoc doc = hits.scoreDocs[j];
-//				System.out.println("Doc: " + doc.toString());
-//				System.out.println("Explain: " + searcher.explain(query, doc.doc));
-				assertTrue(doc.score + " does not equal: " + 3, doc.score == 3);
-			}
-		}
-	}
-
-	public void testLongerSpan() throws IOException {
-		PayloadNearQuery query;
-		TopDocs hits;
-		query = newPhraseQuery("field", "nine hundred ninety nine", true);
-		hits = searcher.search(query, null, 100);
-		ScoreDoc doc = hits.scoreDocs[0];
-//		System.out.println("Doc: " + doc.toString());
-//		System.out.println("Explain: " + searcher.explain(query, doc.doc));
-		assertTrue("hits is null and it shouldn't be", hits != null);
-		assertTrue("there should only be one hit", hits.totalHits == 1);
-		// should have score = 3 because adjacent terms have payloads of 2,4
-		assertTrue(doc.score + " does not equal: " + 3, doc.score == 3); 
-	}
-
-	public void testComplexNested() throws IOException {
-		PayloadNearQuery query;
-		TopDocs hits;
-
-		// combine ordered and unordered spans with some nesting to make sure all payloads are
counted
-
-		SpanQuery q1 = newPhraseQuery("field", "nine hundred", true);
-		SpanQuery q2 = newPhraseQuery("field", "ninety nine", true);
-		SpanQuery q3 = newPhraseQuery("field", "nine ninety", false);
-		SpanQuery q4 = newPhraseQuery("field", "hundred nine", false);
-		SpanQuery[]clauses = new SpanQuery[] {new PayloadNearQuery(new SpanQuery[] {q1,q2}, 0,
true), new PayloadNearQuery(new SpanQuery[] {q3,q4}, 0, false)};
-		query = new PayloadNearQuery(clauses, 0, false);
-		hits = searcher.search(query, null, 100);
-		assertTrue("hits is null and it shouldn't be", hits != null);
-		// should be only 1 hit - doc 999
-		assertTrue("should only be one hit", hits.scoreDocs.length == 1);
-		// the score should be 3 - the average of all the underlying payloads
-		ScoreDoc doc = hits.scoreDocs[0];
-//		System.out.println("Doc: " + doc.toString());
-//		System.out.println("Explain: " + searcher.explain(query, doc.doc));
-		assertTrue(doc.score + " does not equal: " + 3, doc.score == 3);  
-	}
-	// must be static for weight serialization tests 
-	static class BoostingSimilarity extends DefaultSimilarity {
+    // all 10 hits should have score = 3 because adjacent terms have payloads of 2,4
+    // and all the similarity factors are set to 1
+    hits = searcher.search(query, null, 100);
+    assertTrue("hits is null and it shouldn't be", hits != null);
+    assertTrue("should be 10 hits", hits.totalHits == 10);
+    for (int j = 0; j < hits.scoreDocs.length; j++) {
+      ScoreDoc doc = hits.scoreDocs[j];
+      assertTrue(doc.score + " does not equal: " + 3, doc.score == 3);
+    }
+    for (int i=1;i<10;i++) {
+      query = newPhraseQuery("field", English.intToEnglish(i)+" hundred", true);
+      // all should have score = 3 because adjacent terms have payloads of 2,4
+      // and all the similarity factors are set to 1
+      hits = searcher.search(query, null, 100);
+      assertTrue("hits is null and it shouldn't be", hits != null);
+      assertTrue("should be 100 hits", hits.totalHits == 100);
+      for (int j = 0; j < hits.scoreDocs.length; j++) {
+        ScoreDoc doc = hits.scoreDocs[j];
+        //				System.out.println("Doc: " + doc.toString());
+        //				System.out.println("Explain: " + searcher.explain(query, doc.doc));
+        assertTrue(doc.score + " does not equal: " + 3, doc.score == 3);
+      }
+    }
+  }
 
-// TODO: Remove warning after API has been finalized
+
+  public void testPayloadNear() throws IOException {
+    SpanNearQuery q1, q2;
+    PayloadNearQuery query;
+    TopDocs hits;
+    //SpanNearQuery(clauses, 10000, false)
+    q1 = spanNearQuery("field2", "twenty two");
+    q2 = spanNearQuery("field2", "twenty three");
+    SpanQuery[] clauses = new SpanQuery[2];
+    clauses[0] = q1;
+    clauses[1] = q2;
+    query = new PayloadNearQuery(clauses, 10, false); 
+    //System.out.println(query.toString());
+    assertEquals(12, searcher.search(query, null, 100).totalHits);
+    /*
+    System.out.println(hits.totalHits);
+    for (int j = 0; j < hits.scoreDocs.length; j++) {
+      ScoreDoc doc = hits.scoreDocs[j];
+      System.out.println("doc: "+doc.doc+", score: "+doc.score);
+    }
+    */
+  }
+
+  private SpanNearQuery spanNearQuery(String fieldName, String words) {
+    String[] wordList = words.split("[\\s]+");
+    SpanQuery clauses[] = new SpanQuery[wordList.length];
+    for (int i=0;i<clauses.length;i++) {
+      clauses[i] = new PayloadTermQuery(new Term(fieldName, wordList[i]), new AveragePayloadFunction());
 
+    } 
+    return new SpanNearQuery(clauses, 10000, false);
+  }
+
+  public void testLongerSpan() throws IOException {
+    PayloadNearQuery query;
+    TopDocs hits;
+    query = newPhraseQuery("field", "nine hundred ninety nine", true);
+    hits = searcher.search(query, null, 100);
+    ScoreDoc doc = hits.scoreDocs[0];
+    //		System.out.println("Doc: " + doc.toString());
+    //		System.out.println("Explain: " + searcher.explain(query, doc.doc));
+    assertTrue("hits is null and it shouldn't be", hits != null);
+    assertTrue("there should only be one hit", hits.totalHits == 1);
+    // should have score = 3 because adjacent terms have payloads of 2,4
+    assertTrue(doc.score + " does not equal: " + 3, doc.score == 3); 
+  }
+
+  public void testComplexNested() throws IOException {
+    PayloadNearQuery query;
+    TopDocs hits;
+
+    // combine ordered and unordered spans with some nesting to make sure all payloads are
counted
+
+    SpanQuery q1 = newPhraseQuery("field", "nine hundred", true);
+    SpanQuery q2 = newPhraseQuery("field", "ninety nine", true);
+    SpanQuery q3 = newPhraseQuery("field", "nine ninety", false);
+    SpanQuery q4 = newPhraseQuery("field", "hundred nine", false);
+    SpanQuery[]clauses = new SpanQuery[] {new PayloadNearQuery(new SpanQuery[] {q1,q2}, 0,
true), new PayloadNearQuery(new SpanQuery[] {q3,q4}, 0, false)};
+    query = new PayloadNearQuery(clauses, 0, false);
+    hits = searcher.search(query, null, 100);
+    assertTrue("hits is null and it shouldn't be", hits != null);
+    // should be only 1 hit - doc 999
+    assertTrue("should only be one hit", hits.scoreDocs.length == 1);
+    // the score should be 3 - the average of all the underlying payloads
+    ScoreDoc doc = hits.scoreDocs[0];
+    //		System.out.println("Doc: " + doc.toString());
+    //		System.out.println("Explain: " + searcher.explain(query, doc.doc));
+    assertTrue(doc.score + " does not equal: " + 3, doc.score == 3);  
+  }
+
+  // must be static for weight serialization tests 
+  static class BoostingSimilarity extends DefaultSimilarity {
+
+    // TODO: Remove warning after API has been finalized
     public float scorePayload(int docId, String fieldName, int start, int end, byte[] payload,
int offset, int length) {
       //we know it is size 4 here, so ignore the offset/length
       return payload[0];
     }
-		//!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
-		//Make everything else 1 so we see the effect of the payload
-		//!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
-		public float lengthNorm(String fieldName, int numTerms) {
-			return 1;
-		}
-
-		public float queryNorm(float sumOfSquaredWeights) {
-			return 1;
-		}
-
-		public float sloppyFreq(int distance) {
-			return 1;
-		}
-
-		public float coord(int overlap, int maxOverlap) {
-			return 1;
-		}
-		public float tf(float freq) {
-			return 1;
-		}
-		// idf used for phrase queries
-		public float idf(Collection terms, Searcher searcher) {
-			return 1;
-		}
-	}
+    //!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+    //Make everything else 1 so we see the effect of the payload
+    //!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+    public float lengthNorm(String fieldName, int numTerms) {
+      return 1;
+    }
+
+    public float queryNorm(float sumOfSquaredWeights) {
+      return 1;
+    }
+
+    public float sloppyFreq(int distance) {
+      return 1;
+    }
+
+    public float coord(int overlap, int maxOverlap) {
+      return 1;
+    }
+    public float tf(float freq) {
+      return 1;
+    }
+    // idf used for phrase queries
+    public float idf(Collection terms, Searcher searcher) {
+      return 1;
+    }
+  }
 }

Modified: lucene/java/branches/flex_1458/src/test/org/apache/lucene/store/MockRAMDirectory.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/test/org/apache/lucene/store/MockRAMDirectory.java?rev=827772&r1=827771&r2=827772&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/test/org/apache/lucene/store/MockRAMDirectory.java
(original)
+++ lucene/java/branches/flex_1458/src/test/org/apache/lucene/store/MockRAMDirectory.java
Tue Oct 20 19:58:18 2009
@@ -43,8 +43,8 @@
   Random randomState;
   boolean noDeleteOpenFile = true;
   boolean preventDoubleWrite = true;
-  private Set unSyncedFiles;
-  private Set createdFiles;
+  private Set<String> unSyncedFiles;
+  private Set<String> createdFiles;
   volatile boolean crashed;
 
   // NOTE: we cannot initialize the Map here due to the
@@ -90,12 +90,12 @@
   public synchronized void crash() throws IOException {
     crashed = true;
     openFiles = new HashMap();
-    Iterator it = unSyncedFiles.iterator();
+    Iterator<String> it = unSyncedFiles.iterator();
     unSyncedFiles = new HashSet();
     int count = 0;
     while(it.hasNext()) {
-      String name = (String) it.next();
-      RAMFile file = (RAMFile) fileMap.get(name);
+      String name = it.next();
+      RAMFile file = fileMap.get(name);
       if (count % 3 == 0) {
         deleteFile(name, true);
       } else if (count % 3 == 1) {
@@ -206,7 +206,7 @@
       throw new IOException("cannot createOutput after crash");
     unSyncedFiles.add(name);
     createdFiles.add(name);
-    RAMFile existing = (RAMFile)fileMap.get(name);
+    RAMFile existing = fileMap.get(name);
     // Enforce write once:
     if (existing!=null && !name.equals("segments.gen") && preventDoubleWrite)
       throw new IOException("file " + name + " already exists");
@@ -232,7 +232,7 @@
   }
 
   public synchronized IndexInput openInput(String name) throws IOException {
-    RAMFile file = (RAMFile)fileMap.get(name);
+    RAMFile file = fileMap.get(name);
     if (file == null)
       throw new FileNotFoundException(name);
     else {
@@ -245,9 +245,9 @@
   /** Provided for testing purposes.  Use sizeInBytes() instead. */
   public synchronized final long getRecomputedSizeInBytes() {
     long size = 0;
-    Iterator it = fileMap.values().iterator();
-    while (it.hasNext())
-      size += ((RAMFile) it.next()).getSizeInBytes();
+    for(final RAMFile file: fileMap.values()) {
+      size += file.getSizeInBytes();
+    }
     return size;
   }
 
@@ -259,9 +259,8 @@
 
   public final synchronized long getRecomputedActualSizeInBytes() {
     long size = 0;
-    Iterator it = fileMap.values().iterator();
-    while (it.hasNext())
-      size += ((RAMFile) it.next()).length;
+    for (final RAMFile file : fileMap.values())
+      size += file.length;
     return size;
   }
 

Added: lucene/java/branches/flex_1458/test/file
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/test/file?rev=827772&view=auto
==============================================================================
    (empty)



Mime
View raw message