lucene-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From uschind...@apache.org
Subject svn commit: r1592929 [2/2] - in /lucene/dev/branches/branch_4x: ./ lucene/ lucene/analysis/ lucene/analysis/common/src/java/org/apache/lucene/analysis/ar/ lucene/analysis/common/src/java/org/apache/lucene/analysis/cjk/ lucene/analysis/common/src/java/o...
Date Wed, 07 May 2014 01:02:37 GMT
Modified: lucene/dev/branches/branch_4x/lucene/analysis/uima/src/java/org/apache/lucene/analysis/uima/UIMATypeAwareAnnotationsTokenizer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/lucene/analysis/uima/src/java/org/apache/lucene/analysis/uima/UIMATypeAwareAnnotationsTokenizer.java?rev=1592929&r1=1592928&r2=1592929&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/lucene/analysis/uima/src/java/org/apache/lucene/analysis/uima/UIMATypeAwareAnnotationsTokenizer.java (original)
+++ lucene/dev/branches/branch_4x/lucene/analysis/uima/src/java/org/apache/lucene/analysis/uima/UIMATypeAwareAnnotationsTokenizer.java Wed May  7 01:02:35 2014
@@ -17,11 +17,11 @@ package org.apache.lucene.analysis.uima;
  * limitations under the License.
  */
 
-import org.apache.lucene.analysis.Token;
 import org.apache.lucene.analysis.Tokenizer;
 import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
 import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
 import org.apache.lucene.analysis.tokenattributes.TypeAttribute;
+import org.apache.lucene.util.AttributeFactory;
 import org.apache.uima.analysis_engine.AnalysisEngineProcessException;
 import org.apache.uima.cas.CASException;
 import org.apache.uima.cas.FeaturePath;
@@ -54,7 +54,7 @@ public final class UIMATypeAwareAnnotati
   private int finalOffset = 0;
 
   public UIMATypeAwareAnnotationsTokenizer(String descriptorPath, String tokenType, String typeAttributeFeaturePath, Map<String, Object> configurationParameters, Reader input) {
-    this(descriptorPath, tokenType, typeAttributeFeaturePath, configurationParameters, Token.TOKEN_ATTRIBUTE_FACTORY, input);
+    this(descriptorPath, tokenType, typeAttributeFeaturePath, configurationParameters,DEFAULT_TOKEN_ATTRIBUTE_FACTORY, input);
   }
 
   public UIMATypeAwareAnnotationsTokenizer(String descriptorPath, String tokenType, String typeAttributeFeaturePath, 

Modified: lucene/dev/branches/branch_4x/lucene/analysis/uima/src/java/org/apache/lucene/analysis/uima/UIMATypeAwareAnnotationsTokenizerFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/lucene/analysis/uima/src/java/org/apache/lucene/analysis/uima/UIMATypeAwareAnnotationsTokenizerFactory.java?rev=1592929&r1=1592928&r2=1592929&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/lucene/analysis/uima/src/java/org/apache/lucene/analysis/uima/UIMATypeAwareAnnotationsTokenizerFactory.java (original)
+++ lucene/dev/branches/branch_4x/lucene/analysis/uima/src/java/org/apache/lucene/analysis/uima/UIMATypeAwareAnnotationsTokenizerFactory.java Wed May  7 01:02:35 2014
@@ -18,7 +18,7 @@ package org.apache.lucene.analysis.uima;
  */
 
 import org.apache.lucene.analysis.util.TokenizerFactory;
-import org.apache.lucene.util.AttributeSource.AttributeFactory;
+import org.apache.lucene.util.AttributeFactory;
 
 import java.io.Reader;
 import java.util.HashMap;

Modified: lucene/dev/branches/branch_4x/lucene/core/src/java/org/apache/lucene/analysis/NumericTokenStream.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/lucene/core/src/java/org/apache/lucene/analysis/NumericTokenStream.java?rev=1592929&r1=1592928&r2=1592929&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/lucene/core/src/java/org/apache/lucene/analysis/NumericTokenStream.java (original)
+++ lucene/dev/branches/branch_4x/lucene/core/src/java/org/apache/lucene/analysis/NumericTokenStream.java Wed May  7 01:02:35 2014
@@ -28,6 +28,7 @@ import org.apache.lucene.document.LongFi
 import org.apache.lucene.search.NumericRangeFilter; // for javadocs
 import org.apache.lucene.search.NumericRangeQuery;
 import org.apache.lucene.util.Attribute;
+import org.apache.lucene.util.AttributeFactory;
 import org.apache.lucene.util.AttributeImpl;
 import org.apache.lucene.util.AttributeReflector;
 import org.apache.lucene.util.BytesRef;
@@ -233,7 +234,7 @@ public final class NumericTokenStream ex
   /**
    * Expert: Creates a token stream for numeric values with the specified
    * <code>precisionStep</code> using the given
-   * {@link org.apache.lucene.util.AttributeSource.AttributeFactory}.
+   * {@link org.apache.lucene.util.AttributeFactory}.
    * The stream is not yet initialized,
    * before using set a value using the various set<em>???</em>Value() methods.
    */

Modified: lucene/dev/branches/branch_4x/lucene/core/src/java/org/apache/lucene/analysis/Token.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/lucene/core/src/java/org/apache/lucene/analysis/Token.java?rev=1592929&r1=1592928&r2=1592929&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/lucene/core/src/java/org/apache/lucene/analysis/Token.java (original)
+++ lucene/dev/branches/branch_4x/lucene/core/src/java/org/apache/lucene/analysis/Token.java Wed May  7 01:02:35 2014
@@ -17,16 +17,12 @@ package org.apache.lucene.analysis;
  * limitations under the License.
  */
 
-import org.apache.lucene.analysis.tokenattributes.CharTermAttributeImpl;
-import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
 import org.apache.lucene.analysis.tokenattributes.FlagsAttribute;
+import org.apache.lucene.analysis.tokenattributes.PackedTokenAttributeImpl;
 import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
-import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
-import org.apache.lucene.analysis.tokenattributes.PositionLengthAttribute;
-import org.apache.lucene.analysis.tokenattributes.TypeAttribute;
 import org.apache.lucene.index.DocsAndPositionsEnum; // for javadoc
 import org.apache.lucene.util.Attribute;
-import org.apache.lucene.util.AttributeSource;
+import org.apache.lucene.util.AttributeFactory;
 import org.apache.lucene.util.AttributeImpl;
 import org.apache.lucene.util.AttributeReflector;
 import org.apache.lucene.util.BytesRef;
@@ -57,54 +53,7 @@ import org.apache.lucene.util.BytesRef;
   Even though it is not necessary to use Token anymore, with the new TokenStream API it can
   be used as convenience class that implements all {@link Attribute}s, which is especially useful
   to easily switch from the old to the new TokenStream API.
-  
-  <br><br>
-  
-  <p>Tokenizers and TokenFilters should try to re-use a Token
-  instance when possible for best performance, by
-  implementing the {@link TokenStream#incrementToken()} API.
-  Failing that, to create a new Token you should first use
-  one of the constructors that starts with null text.  To load
-  the token from a char[] use {@link #copyBuffer(char[], int, int)}.
-  To load from a String use {@link #setEmpty} followed by {@link #append(CharSequence)} or {@link #append(CharSequence, int, int)}.
-  Alternatively you can get the Token's termBuffer by calling either {@link #buffer()},
-  if you know that your text is shorter than the capacity of the termBuffer
-  or {@link #resizeBuffer(int)}, if there is any possibility
-  that you may need to grow the buffer. Fill in the characters of your term into this
-  buffer, with {@link String#getChars(int, int, char[], int)} if loading from a string,
-  or with {@link System#arraycopy(Object, int, Object, int, int)}, and finally call {@link #setLength(int)} to
-  set the length of the term text.  See <a target="_top"
-  href="https://issues.apache.org/jira/browse/LUCENE-969">LUCENE-969</a>
-  for details.</p>
-  <p>Typical Token reuse patterns:
-  <ul>
-  <li> Copying text from a string (type is reset to {@link #DEFAULT_TYPE} if not specified):<br/>
-  <pre class="prettyprint">
-    return reusableToken.reinit(string, startOffset, endOffset[, type]);
-  </pre>
-  </li>
-  <li> Copying some text from a string (type is reset to {@link #DEFAULT_TYPE} if not specified):<br/>
-  <pre class="prettyprint">
-    return reusableToken.reinit(string, 0, string.length(), startOffset, endOffset[, type]);
-  </pre>
-  </li>
-  </li>
-  <li> Copying text from char[] buffer (type is reset to {@link #DEFAULT_TYPE} if not specified):<br/>
-  <pre class="prettyprint">
-    return reusableToken.reinit(buffer, 0, buffer.length, startOffset, endOffset[, type]);
-  </pre>
-  </li>
-  <li> Copying some text from a char[] buffer (type is reset to {@link #DEFAULT_TYPE} if not specified):<br/>
-  <pre class="prettyprint">
-    return reusableToken.reinit(buffer, start, end - start, startOffset, endOffset[, type]);
-  </pre>
-  </li>
-  <li> Copying from one one Token to another (type is reset to {@link #DEFAULT_TYPE} if not specified):<br/>
-  <pre class="prettyprint">
-    return reusableToken.reinit(source.buffer(), 0, source.length(), source.startOffset(), source.endOffset()[, source.type()]);
-  </pre>
-  </li>
-  </ul>
+ 
   A few things to note:
   <ul>
   <li>clear() initializes all of the fields to default values. This was changed in contrast to Lucene 2.4, but should affect no one.</li>
@@ -118,17 +67,13 @@ import org.apache.lucene.util.BytesRef;
   {@link CharSequence} interface introduced by the interface {@link org.apache.lucene.analysis.tokenattributes.CharTermAttribute}.
   This method now only prints the term text, no additional information anymore.
   </p>
+  @deprecated This class is outdated and no longer used since Lucene 2.9. Nuke it finally!
 */
-public class Token extends CharTermAttributeImpl 
-                   implements TypeAttribute, PositionIncrementAttribute,
-                              FlagsAttribute, OffsetAttribute, PayloadAttribute, PositionLengthAttribute {
+@Deprecated
+public class Token extends PackedTokenAttributeImpl implements FlagsAttribute, PayloadAttribute {
 
-  private int startOffset,endOffset;
-  private String type = DEFAULT_TYPE;
   private int flags;
   private BytesRef payload;
-  private int positionIncrement = 1;
-  private int positionLength = 1;
 
   /** Constructs a Token will null text. */
   public Token() {
@@ -139,9 +84,7 @@ public class Token extends CharTermAttri
    *  @param start start offset in the source text
    *  @param end end offset in the source text */
   public Token(int start, int end) {
-    checkOffsets(start, end);
-    startOffset = start;
-    endOffset = end;
+    setOffset(start, end);
   }
 
   /** Constructs a Token with null text and start & end
@@ -150,10 +93,8 @@ public class Token extends CharTermAttri
    *  @param end end offset in the source text
    *  @param typ the lexical type of this Token */
   public Token(int start, int end, String typ) {
-    checkOffsets(start, end);
-    startOffset = start;
-    endOffset = end;
-    type = typ;
+    setOffset(start, end);
+    setType(typ);
   }
 
   /**
@@ -164,10 +105,8 @@ public class Token extends CharTermAttri
    *  @param flags The bits to set for this token
    */
   public Token(int start, int end, int flags) {
-    checkOffsets(start, end);
-    startOffset = start;
-    endOffset = end;
-    this.flags = flags;
+    setOffset(start, end);
+    setFlags(flags);
   }
 
   /** Constructs a Token with the given term text, and start
@@ -179,11 +118,9 @@ public class Token extends CharTermAttri
    *  @param start start offset in the source text
    *  @param end end offset in the source text
    */
-  public Token(String text, int start, int end) {
-    checkOffsets(start, end);
+  public Token(CharSequence text, int start, int end) {
     append(text);
-    startOffset = start;
-    endOffset = end;
+    setOffset(start, end);
   }
 
   /** Constructs a Token with the given text, start and end
@@ -196,11 +133,9 @@ public class Token extends CharTermAttri
    *  @param typ token type
    */
   public Token(String text, int start, int end, String typ) {
-    checkOffsets(start, end);
     append(text);
-    startOffset = start;
-    endOffset = end;
-    type = typ;
+    setOffset(start, end);
+    setType(typ);
   }
 
   /**
@@ -214,11 +149,9 @@ public class Token extends CharTermAttri
    * @param flags token type bits
    */
   public Token(String text, int start, int end, int flags) {
-    checkOffsets(start, end);
     append(text);
-    startOffset = start;
-    endOffset = end;
-    this.flags = flags;
+    setOffset(start, end);
+    setFlags(flags);
   }
 
   /**
@@ -232,96 +165,8 @@ public class Token extends CharTermAttri
    * @param end end offset in the source text
    */
   public Token(char[] startTermBuffer, int termBufferOffset, int termBufferLength, int start, int end) {
-    checkOffsets(start, end);
     copyBuffer(startTermBuffer, termBufferOffset, termBufferLength);
-    startOffset = start;
-    endOffset = end;
-  }
-
-  /**
-   * {@inheritDoc}
-   * @see PositionIncrementAttribute
-   */
-  @Override
-  public void setPositionIncrement(int positionIncrement) {
-    if (positionIncrement < 0)
-      throw new IllegalArgumentException
-        ("Increment must be zero or greater: " + positionIncrement);
-    this.positionIncrement = positionIncrement;
-  }
-
-  /**
-   * {@inheritDoc}
-   * @see PositionIncrementAttribute
-   */
-  @Override
-  public int getPositionIncrement() {
-    return positionIncrement;
-  }
-
-  /**
-   * {@inheritDoc}
-   * @see PositionLengthAttribute
-   */
-  @Override
-  public void setPositionLength(int positionLength) {
-    this.positionLength = positionLength;
-  }
-
-  /**
-   * {@inheritDoc}
-   * @see PositionLengthAttribute
-   */
-  @Override
-  public int getPositionLength() {
-    return positionLength;
-  }
-
-  /**
-   * {@inheritDoc}
-   * @see OffsetAttribute
-   */
-  @Override
-  public final int startOffset() {
-    return startOffset;
-  }
-
-  /**
-   * {@inheritDoc}
-   * @see OffsetAttribute
-   */
-  @Override
-  public final int endOffset() {
-    return endOffset;
-  }
-
-  /**
-   * {@inheritDoc}
-   * @see OffsetAttribute
-   */
-  @Override
-  public void setOffset(int startOffset, int endOffset) {
-    checkOffsets(startOffset, endOffset);
-    this.startOffset = startOffset;
-    this.endOffset = endOffset;
-  }
-
-  /**
-   * {@inheritDoc}
-   * @see TypeAttribute
-   */
-  @Override
-  public final String type() {
-    return type;
-  }
-
-  /**
-   * {@inheritDoc}
-   * @see TypeAttribute
-   */
-  @Override
-  public final void setType(String type) {
-    this.type = type;
+    setOffset(start, end);
   }
 
   /**
@@ -366,11 +211,8 @@ public class Token extends CharTermAttri
   @Override
   public void clear() {
     super.clear();
-    payload = null;
-    positionIncrement = positionLength = 1;
     flags = 0;
-    startOffset = endOffset = 0;
-    type = DEFAULT_TYPE;
+    payload = null;
   }
 
   @Override
@@ -383,22 +225,6 @@ public class Token extends CharTermAttri
     return t;
   }
 
-  /** Makes a clone, but replaces the term buffer &
-   * start/end offset in the process.  This is more
-   * efficient than doing a full clone (and then calling
-   * {@link #copyBuffer}) because it saves a wasted copy of the old
-   * termBuffer. */
-  public Token clone(char[] newTermBuffer, int newTermOffset, int newTermLength, int newStartOffset, int newEndOffset) {
-    final Token t = new Token(newTermBuffer, newTermOffset, newTermLength, newStartOffset, newEndOffset);
-    t.positionIncrement = positionIncrement;
-    t.positionLength = positionLength;
-    t.flags = flags;
-    t.type = type;
-    if (payload != null)
-      t.payload = payload.clone();
-    return t;
-  }
-
   @Override
   public boolean equals(Object obj) {
     if (obj == this)
@@ -406,14 +232,10 @@ public class Token extends CharTermAttri
 
     if (obj instanceof Token) {
       final Token other = (Token) obj;
-      return (startOffset == other.startOffset &&
-          endOffset == other.endOffset && 
-          flags == other.flags &&
-          positionIncrement == other.positionIncrement &&
-          positionLength == other.positionLength &&
-          (type == null ? other.type == null : type.equals(other.type)) &&
-          (payload == null ? other.payload == null : payload.equals(other.payload)) &&
-          super.equals(obj)
+      return (
+        flags == other.flags &&
+        (payload == null ? other.payload == null : payload.equals(other.payload)) &&
+        super.equals(obj)
       );
     } else
       return false;
@@ -422,26 +244,12 @@ public class Token extends CharTermAttri
   @Override
   public int hashCode() {
     int code = super.hashCode();
-    code = code * 31 + startOffset;
-    code = code * 31 + endOffset;
     code = code * 31 + flags;
-    code = code * 31 + positionIncrement;
-    code = code * 31 + positionLength;
-    if (type != null)
-      code = code * 31 + type.hashCode();
-    if (payload != null)
+    if (payload != null) {
       code = code * 31 + payload.hashCode();
+    }
     return code;
   }
-      
-  // like clear() but doesn't clear termBuffer/text
-  private void clearNoTermBuffer() {
-    payload = null;
-    positionIncrement = positionLength = 1;
-    flags = 0;
-    startOffset = endOffset = 0;
-    type = DEFAULT_TYPE;
-  }
 
   /** Shorthand for calling {@link #clear},
    *  {@link #copyBuffer(char[], int, int)},
@@ -449,14 +257,10 @@ public class Token extends CharTermAttri
    *  {@link #setType}
    *  @return this Token instance */
   public Token reinit(char[] newTermBuffer, int newTermOffset, int newTermLength, int newStartOffset, int newEndOffset, String newType) {
-    checkOffsets(newStartOffset, newEndOffset);
-    clearNoTermBuffer();
+    clear();
     copyBuffer(newTermBuffer, newTermOffset, newTermLength);
-    payload = null;
-    positionIncrement = positionLength = 1;
-    startOffset = newStartOffset;
-    endOffset = newEndOffset;
-    type = newType;
+    setOffset(newStartOffset, newEndOffset);
+    setType(newType);
     return this;
   }
 
@@ -466,12 +270,9 @@ public class Token extends CharTermAttri
    *  {@link #setType} on Token.DEFAULT_TYPE
    *  @return this Token instance */
   public Token reinit(char[] newTermBuffer, int newTermOffset, int newTermLength, int newStartOffset, int newEndOffset) {
-    checkOffsets(newStartOffset, newEndOffset);
-    clearNoTermBuffer();
+    clear();
     copyBuffer(newTermBuffer, newTermOffset, newTermLength);
-    startOffset = newStartOffset;
-    endOffset = newEndOffset;
-    type = DEFAULT_TYPE;
+    setOffset(newStartOffset, newEndOffset);
     return this;
   }
 
@@ -481,12 +282,10 @@ public class Token extends CharTermAttri
    *  {@link #setType}
    *  @return this Token instance */
   public Token reinit(String newTerm, int newStartOffset, int newEndOffset, String newType) {
-    checkOffsets(newStartOffset, newEndOffset);
     clear();
     append(newTerm);
-    startOffset = newStartOffset;
-    endOffset = newEndOffset;
-    type = newType;
+    setOffset(newStartOffset, newEndOffset);
+    setType(newType);
     return this;
   }
 
@@ -496,12 +295,10 @@ public class Token extends CharTermAttri
    *  {@link #setType}
    *  @return this Token instance */
   public Token reinit(String newTerm, int newTermOffset, int newTermLength, int newStartOffset, int newEndOffset, String newType) {
-    checkOffsets(newStartOffset, newEndOffset);
     clear();
     append(newTerm, newTermOffset, newTermOffset + newTermLength);
-    startOffset = newStartOffset;
-    endOffset = newEndOffset;
-    type = newType;
+    setOffset(newStartOffset, newEndOffset);
+    setType(newType);
     return this;
   }
 
@@ -511,12 +308,9 @@ public class Token extends CharTermAttri
    *  {@link #setType} on Token.DEFAULT_TYPE
    *  @return this Token instance */
   public Token reinit(String newTerm, int newStartOffset, int newEndOffset) {
-    checkOffsets(newStartOffset, newEndOffset);
     clear();
     append(newTerm);
-    startOffset = newStartOffset;
-    endOffset = newEndOffset;
-    type = DEFAULT_TYPE;
+    setOffset(newStartOffset, newEndOffset);
     return this;
   }
 
@@ -526,12 +320,9 @@ public class Token extends CharTermAttri
    *  {@link #setType} on Token.DEFAULT_TYPE
    *  @return this Token instance */
   public Token reinit(String newTerm, int newTermOffset, int newTermLength, int newStartOffset, int newEndOffset) {
-    checkOffsets(newStartOffset, newEndOffset);
     clear();
     append(newTerm, newTermOffset, newTermOffset + newTermLength);
-    startOffset = newStartOffset;
-    endOffset = newEndOffset;
-    type = DEFAULT_TYPE;
+    setOffset(newStartOffset, newEndOffset);
     return this;
   }
 
@@ -540,30 +331,24 @@ public class Token extends CharTermAttri
    * @param prototype source Token to copy fields from
    */
   public void reinit(Token prototype) {
-    copyBuffer(prototype.buffer(), 0, prototype.length());
-    positionIncrement = prototype.positionIncrement;
-    positionLength = prototype.positionLength;
-    flags = prototype.flags;
-    startOffset = prototype.startOffset;
-    endOffset = prototype.endOffset;
-    type = prototype.type;
-    payload =  prototype.payload;
+    // this is a bad hack to emulate no cloning of payload!
+    prototype.copyToWithoutPayloadClone(this);
   }
 
+  private void copyToWithoutPayloadClone(AttributeImpl target) {
+    super.copyTo(target);
+    ((FlagsAttribute) target).setFlags(flags);
+    ((PayloadAttribute) target).setPayload(payload);
+  }
+  
   /**
    * Copy the prototype token's fields into this one, with a different term. Note: Payloads are shared.
    * @param prototype existing Token
    * @param newTerm new term text
    */
   public void reinit(Token prototype, String newTerm) {
+    reinit(prototype);
     setEmpty().append(newTerm);
-    positionIncrement = prototype.positionIncrement;
-    positionLength = prototype.positionLength;
-    flags = prototype.flags;
-    startOffset = prototype.startOffset;
-    endOffset = prototype.endOffset;
-    type = prototype.type;
-    payload =  prototype.payload;
   }
 
   /**
@@ -574,53 +359,22 @@ public class Token extends CharTermAttri
    * @param length number of valid characters in the buffer
    */
   public void reinit(Token prototype, char[] newTermBuffer, int offset, int length) {
+    reinit(prototype);
     copyBuffer(newTermBuffer, offset, length);
-    positionIncrement = prototype.positionIncrement;
-    positionLength = prototype.positionLength;
-    flags = prototype.flags;
-    startOffset = prototype.startOffset;
-    endOffset = prototype.endOffset;
-    type = prototype.type;
-    payload =  prototype.payload;
   }
 
   @Override
   public void copyTo(AttributeImpl target) {
-    if (target instanceof Token) {
-      final Token to = (Token) target;
-      to.reinit(this);
-      // reinit shares the payload, so clone it:
-      if (payload !=null) {
-        to.payload = payload.clone();
-      }
-    } else {
-      super.copyTo(target);
-      ((OffsetAttribute) target).setOffset(startOffset, endOffset);
-      ((PositionIncrementAttribute) target).setPositionIncrement(positionIncrement);
-      ((PositionLengthAttribute) target).setPositionLength(positionLength);
-      ((PayloadAttribute) target).setPayload((payload == null) ? null : payload.clone());
-      ((FlagsAttribute) target).setFlags(flags);
-      ((TypeAttribute) target).setType(type);
-    }
+    super.copyTo(target);
+    ((FlagsAttribute) target).setFlags(flags);
+    ((PayloadAttribute) target).setPayload((payload == null) ? null : payload.clone());
   }
 
   @Override
   public void reflectWith(AttributeReflector reflector) {
     super.reflectWith(reflector);
-    reflector.reflect(OffsetAttribute.class, "startOffset", startOffset);
-    reflector.reflect(OffsetAttribute.class, "endOffset", endOffset);
-    reflector.reflect(PositionIncrementAttribute.class, "positionIncrement", positionIncrement);
-    reflector.reflect(PositionLengthAttribute.class, "positionLength", positionLength);
-    reflector.reflect(PayloadAttribute.class, "payload", payload);
     reflector.reflect(FlagsAttribute.class, "flags", flags);
-    reflector.reflect(TypeAttribute.class, "type", type);
-  }
-  
-  private void checkOffsets(int startOffset, int endOffset) {
-    if (startOffset < 0 || endOffset < startOffset) {
-      throw new IllegalArgumentException("startOffset must be non-negative, and endOffset must be >= startOffset, "
-          + "startOffset=" + startOffset + ",endOffset=" + endOffset);
-    }
+    reflector.reflect(PayloadAttribute.class, "payload", payload);
   }
 
   /** Convenience factory that returns <code>Token</code> as implementation for the basic
@@ -628,43 +382,6 @@ public class Token extends CharTermAttri
    * attributes.
    * @since 3.0
    */
-  public static final AttributeSource.AttributeFactory TOKEN_ATTRIBUTE_FACTORY =
-    new TokenAttributeFactory(AttributeSource.AttributeFactory.DEFAULT_ATTRIBUTE_FACTORY);
-  
-  /** <b>Expert:</b> Creates a TokenAttributeFactory returning {@link Token} as instance for the basic attributes
-   * and for all other attributes calls the given delegate factory.
-   * @since 3.0
-   */
-  public static final class TokenAttributeFactory extends AttributeSource.AttributeFactory {
-    
-    private final AttributeSource.AttributeFactory delegate;
-    
-    /** <b>Expert</b>: Creates an AttributeFactory returning {@link Token} as instance for the basic attributes
-     * and for all other attributes calls the given delegate factory. */
-    public TokenAttributeFactory(AttributeSource.AttributeFactory delegate) {
-      this.delegate = delegate;
-    }
-  
-    @Override
-    public AttributeImpl createAttributeInstance(Class<? extends Attribute> attClass) {
-      return attClass.isAssignableFrom(Token.class)
-        ? new Token() : delegate.createAttributeInstance(attClass);
-    }
-    
-    @Override
-    public boolean equals(Object other) {
-      if (this == other) return true;
-      if (other instanceof TokenAttributeFactory) {
-        final TokenAttributeFactory af = (TokenAttributeFactory) other;
-        return this.delegate.equals(af.delegate);
-      }
-      return false;
-    }
-    
-    @Override
-    public int hashCode() {
-      return delegate.hashCode() ^ 0x0a45aa31;
-    }
-  }
-
+  public static final AttributeFactory TOKEN_ATTRIBUTE_FACTORY =
+      AttributeFactory.getStaticImplementation(AttributeFactory.DEFAULT_ATTRIBUTE_FACTORY, Token.class);
 }

Modified: lucene/dev/branches/branch_4x/lucene/core/src/java/org/apache/lucene/analysis/TokenStream.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/lucene/core/src/java/org/apache/lucene/analysis/TokenStream.java?rev=1592929&r1=1592928&r2=1592929&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/lucene/core/src/java/org/apache/lucene/analysis/TokenStream.java (original)
+++ lucene/dev/branches/branch_4x/lucene/core/src/java/org/apache/lucene/analysis/TokenStream.java Wed May  7 01:02:35 2014
@@ -21,11 +21,13 @@ import java.io.IOException;
 import java.io.Closeable;
 import java.lang.reflect.Modifier;
 
+import org.apache.lucene.analysis.tokenattributes.PackedTokenAttributeImpl;
 import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.Field;
 import org.apache.lucene.index.IndexWriter;
 import org.apache.lucene.util.Attribute;
+import org.apache.lucene.util.AttributeFactory;
 import org.apache.lucene.util.AttributeImpl;
 import org.apache.lucene.util.AttributeSource;
 
@@ -84,12 +86,16 @@ import org.apache.lucene.util.AttributeS
  * assertions are enabled.
  */
 public abstract class TokenStream extends AttributeSource implements Closeable {
+  
+  /** Default {@link AttributeFactory} instance that should be used for TokenStreams. */
+  public static final AttributeFactory DEFAULT_TOKEN_ATTRIBUTE_FACTORY =
+    AttributeFactory.getStaticImplementation(AttributeFactory.DEFAULT_ATTRIBUTE_FACTORY, PackedTokenAttributeImpl.class);
 
   /**
    * A TokenStream using the default attribute factory.
    */
   protected TokenStream() {
-    super(Token.TOKEN_ATTRIBUTE_FACTORY);
+    super(DEFAULT_TOKEN_ATTRIBUTE_FACTORY);
     assert assertFinal();
   }
   

Modified: lucene/dev/branches/branch_4x/lucene/core/src/java/org/apache/lucene/analysis/Tokenizer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/lucene/core/src/java/org/apache/lucene/analysis/Tokenizer.java?rev=1592929&r1=1592928&r2=1592929&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/lucene/core/src/java/org/apache/lucene/analysis/Tokenizer.java (original)
+++ lucene/dev/branches/branch_4x/lucene/core/src/java/org/apache/lucene/analysis/Tokenizer.java Wed May  7 01:02:35 2014
@@ -17,6 +17,7 @@ package org.apache.lucene.analysis;
  * limitations under the License.
  */
 
+import org.apache.lucene.util.AttributeFactory;
 import org.apache.lucene.util.AttributeSource;
 
 import java.io.Reader;

Modified: lucene/dev/branches/branch_4x/lucene/core/src/java/org/apache/lucene/analysis/package.html
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/lucene/core/src/java/org/apache/lucene/analysis/package.html?rev=1592929&r1=1592928&r2=1592929&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/lucene/core/src/java/org/apache/lucene/analysis/package.html (original)
+++ lucene/dev/branches/branch_4x/lucene/core/src/java/org/apache/lucene/analysis/package.html Wed May  7 01:02:35 2014
@@ -783,7 +783,7 @@ Now we're going to implement our own cus
 </p>
 <p>
   This should be the usual behavior. However, there is also an expert-API that allows changing these naming conventions:
-  {@link org.apache.lucene.util.AttributeSource.AttributeFactory}. The factory accepts an Attribute interface as argument
+  {@link org.apache.lucene.util.AttributeFactory}. The factory accepts an Attribute interface as argument
   and returns an actual instance. You can implement your own factory if you need to change the default behavior.
 </p>
 <p>

Modified: lucene/dev/branches/branch_4x/lucene/core/src/java/org/apache/lucene/util/AttributeImpl.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/lucene/core/src/java/org/apache/lucene/util/AttributeImpl.java?rev=1592929&r1=1592928&r2=1592929&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/lucene/core/src/java/org/apache/lucene/util/AttributeImpl.java (original)
+++ lucene/dev/branches/branch_4x/lucene/core/src/java/org/apache/lucene/util/AttributeImpl.java Wed May  7 01:02:35 2014
@@ -19,8 +19,7 @@ package org.apache.lucene.util;
 
 import java.lang.reflect.Field;
 import java.lang.reflect.Modifier;
-import java.lang.ref.WeakReference;
-import java.util.LinkedList;
+import java.lang.ref.Reference;
 
 /**
  * Base class for Attributes that can be added to a 
@@ -91,12 +90,14 @@ public abstract class AttributeImpl impl
    */
   public void reflectWith(AttributeReflector reflector) {
     final Class<? extends AttributeImpl> clazz = this.getClass();
-    final LinkedList<WeakReference<Class<? extends Attribute>>> interfaces = AttributeSource.getAttributeInterfaces(clazz);
-    if (interfaces.size() != 1) {
+    final Reference<Class<? extends Attribute>>[] interfaces = AttributeSource.getAttributeInterfaces(clazz);
+    if (interfaces.length != 1) {
       throw new UnsupportedOperationException(clazz.getName() +
         " implements more than one Attribute interface, the default reflectWith() implementation cannot handle this.");
     }
-    final Class<? extends Attribute> interf = interfaces.getFirst().get();
+    final Class<? extends Attribute> interf = interfaces[0].get();
+    assert (interf != null) :
+      "We have a strong reference on the class holding the interfaces, so they should never get evicted";
     final Field[] fields = clazz.getDeclaredFields();
     try {
       for (int i = 0; i < fields.length; i++) {

Modified: lucene/dev/branches/branch_4x/lucene/core/src/java/org/apache/lucene/util/AttributeSource.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/lucene/core/src/java/org/apache/lucene/util/AttributeSource.java?rev=1592929&r1=1592928&r2=1592929&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/lucene/core/src/java/org/apache/lucene/util/AttributeSource.java (original)
+++ lucene/dev/branches/branch_4x/lucene/core/src/java/org/apache/lucene/util/AttributeSource.java Wed May  7 01:02:35 2014
@@ -17,12 +17,14 @@ package org.apache.lucene.util;
  * limitations under the License.
  */
 
+import java.lang.ref.Reference;
 import java.lang.ref.WeakReference;
+import java.util.ArrayList;
 import java.util.Collections;
+import java.util.List;
 import java.util.NoSuchElementException;
 import java.util.Iterator;
 import java.util.LinkedHashMap;
-import java.util.LinkedList;
 import java.util.Map;
 import java.util.Map.Entry;
 
@@ -38,59 +40,15 @@ import org.apache.lucene.analysis.TokenS
  * it creates a new instance and returns it.
  */
 public class AttributeSource {
+  
   /**
-   * An AttributeFactory creates instances of {@link AttributeImpl}s.
+   * This is the default factory that creates {@link AttributeImpl}s using the
+   * class name of the supplied {@link Attribute} interface class by appending <code>Impl</code> to it.
+   * @deprecated use {@link AttributeFactory#DEFAULT_ATTRIBUTE_FACTORY}
    */
-  public static abstract class AttributeFactory {
-    /**
-     * returns an {@link AttributeImpl} for the supplied {@link Attribute} interface class.
-     */
-    public abstract AttributeImpl createAttributeInstance(Class<? extends Attribute> attClass);
-    
-    /**
-     * This is the default factory that creates {@link AttributeImpl}s using the
-     * class name of the supplied {@link Attribute} interface class by appending <code>Impl</code> to it.
-     */
-    public static final AttributeFactory DEFAULT_ATTRIBUTE_FACTORY = new DefaultAttributeFactory();
-    
-    private static final class DefaultAttributeFactory extends AttributeFactory {
-      private static final WeakIdentityMap<Class<? extends Attribute>, WeakReference<Class<? extends AttributeImpl>>> attClassImplMap =
-        WeakIdentityMap.newConcurrentHashMap(false);
-      
-      DefaultAttributeFactory() {}
-    
-      @Override
-      public AttributeImpl createAttributeInstance(Class<? extends Attribute> attClass) {
-        try {
-          return getClassForInterface(attClass).newInstance();
-        } catch (InstantiationException e) {
-          throw new IllegalArgumentException("Could not instantiate implementing class for " + attClass.getName());
-        } catch (IllegalAccessException e) {
-          throw new IllegalArgumentException("Could not instantiate implementing class for " + attClass.getName());
-        }
-      }
-      
-      private static Class<? extends AttributeImpl> getClassForInterface(Class<? extends Attribute> attClass) {
-        final WeakReference<Class<? extends AttributeImpl>> ref = attClassImplMap.get(attClass);
-        Class<? extends AttributeImpl> clazz = (ref == null) ? null : ref.get();
-        if (clazz == null) {
-          // we have the slight chance that another thread may do the same, but who cares?
-          try {
-            attClassImplMap.put(attClass,
-              new WeakReference<Class<? extends AttributeImpl>>(
-                clazz = Class.forName(attClass.getName() + "Impl", true, attClass.getClassLoader())
-                .asSubclass(AttributeImpl.class)
-              )
-            );
-          } catch (ClassNotFoundException e) {
-            throw new IllegalArgumentException("Could not find implementing class for " + attClass.getName());
-          }
-        }
-        return clazz;
-      }
-    }
-  }
-      
+  @Deprecated
+  public static final AttributeFactory DEFAULT_ATTRIBUTE_FACTORY = AttributeFactory.DEFAULT_ATTRIBUTE_FACTORY;
+  
   /**
    * This class holds the state of an AttributeSource.
    * @see #captureState
@@ -122,7 +80,7 @@ public class AttributeSource {
   private final AttributeFactory factory;
   
   /**
-   * An AttributeSource using the default attribute factory {@link AttributeSource.AttributeFactory#DEFAULT_ATTRIBUTE_FACTORY}.
+   * An AttributeSource using the default attribute factory {@link AttributeFactory#DEFAULT_ATTRIBUTE_FACTORY}.
    */
   public AttributeSource() {
     this(AttributeFactory.DEFAULT_ATTRIBUTE_FACTORY);
@@ -200,26 +158,28 @@ public class AttributeSource {
   }
   
   /** a cache that stores all interfaces for known implementation classes for performance (slow reflection) */
-  private static final WeakIdentityMap<Class<? extends AttributeImpl>,LinkedList<WeakReference<Class<? extends Attribute>>>> knownImplClasses =
+  private static final WeakIdentityMap<Class<? extends AttributeImpl>,Reference<Class<? extends Attribute>>[]> knownImplClasses =
     WeakIdentityMap.newConcurrentHashMap(false);
   
-  static LinkedList<WeakReference<Class<? extends Attribute>>> getAttributeInterfaces(final Class<? extends AttributeImpl> clazz) {
-    LinkedList<WeakReference<Class<? extends Attribute>>> foundInterfaces = knownImplClasses.get(clazz);
+  static Reference<Class<? extends Attribute>>[] getAttributeInterfaces(final Class<? extends AttributeImpl> clazz) {
+    Reference<Class<? extends Attribute>>[] foundInterfaces = knownImplClasses.get(clazz);
     if (foundInterfaces == null) {
       // we have the slight chance that another thread may do the same, but who cares?
-      foundInterfaces = new LinkedList<>();
+      final List<Reference<Class<? extends Attribute>>> intfList = new ArrayList<>();
       // find all interfaces that this attribute instance implements
       // and that extend the Attribute interface
       Class<?> actClazz = clazz;
       do {
         for (Class<?> curInterface : actClazz.getInterfaces()) {
           if (curInterface != Attribute.class && Attribute.class.isAssignableFrom(curInterface)) {
-            foundInterfaces.add(new WeakReference<Class<? extends Attribute>>(curInterface.asSubclass(Attribute.class)));
+            intfList.add(new WeakReference<Class<? extends Attribute>>(curInterface.asSubclass(Attribute.class)));
           }
         }
         actClazz = actClazz.getSuperclass();
       } while (actClazz != null);
-      knownImplClasses.put(clazz, foundInterfaces);
+      @SuppressWarnings({"unchecked", "rawtypes"}) final Reference<Class<? extends Attribute>>[] a =
+          intfList.toArray(new Reference[intfList.size()]);
+      knownImplClasses.put(clazz, foundInterfaces = a);
     }
     return foundInterfaces;
   }
@@ -235,11 +195,9 @@ public class AttributeSource {
   public final void addAttributeImpl(final AttributeImpl att) {
     final Class<? extends AttributeImpl> clazz = att.getClass();
     if (attributeImpls.containsKey(clazz)) return;
-    final LinkedList<WeakReference<Class<? extends Attribute>>> foundInterfaces =
-      getAttributeInterfaces(clazz);
     
     // add all interfaces of this AttributeImpl to the maps
-    for (WeakReference<Class<? extends Attribute>> curInterfaceRef : foundInterfaces) {
+    for (Reference<Class<? extends Attribute>> curInterfaceRef : getAttributeInterfaces(clazz)) {
       final Class<? extends Attribute> curInterface = curInterfaceRef.get();
       assert (curInterface != null) :
         "We have a strong reference on the class holding the interfaces, so they should never get evicted";

Modified: lucene/dev/branches/branch_4x/lucene/core/src/test/org/apache/lucene/analysis/TestToken.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/lucene/core/src/test/org/apache/lucene/analysis/TestToken.java?rev=1592929&r1=1592928&r2=1592929&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/lucene/core/src/test/org/apache/lucene/analysis/TestToken.java (original)
+++ lucene/dev/branches/branch_4x/lucene/core/src/test/org/apache/lucene/analysis/TestToken.java Wed May  7 01:02:35 2014
@@ -27,167 +27,43 @@ import org.apache.lucene.util.TestUtil;
 import java.io.StringReader;
 import java.util.HashMap;
 
+@Deprecated
 public class TestToken extends LuceneTestCase {
 
   public void testCtor() throws Exception {
-    Token t = new Token();
-    char[] content = "hello".toCharArray();
-    t.copyBuffer(content, 0, content.length);
-    assertNotSame(t.buffer(), content);
+    Token t = new Token("hello", 0, 0);
     assertEquals(0, t.startOffset());
     assertEquals(0, t.endOffset());
+    assertEquals(1, t.getPositionIncrement());
+    assertEquals(1, t.getPositionLength());
     assertEquals("hello", t.toString());
     assertEquals("word", t.type());
     assertEquals(0, t.getFlags());
-
-    t = new Token();
-    t.setOffset(6, 22);
-    t.setFlags(7);
-    t.copyBuffer(content, 0, content.length);
-    assertEquals("hello", t.toString());
-    assertEquals("hello", t.toString());
-    assertEquals(6, t.startOffset());
-    assertEquals(22, t.endOffset());
-    assertEquals("word", t.type());
-    assertEquals(7, t.getFlags());
-
-    t = new Token();
-    t.setOffset(6, 22);
-    t.setType("junk");
-    t.copyBuffer(content, 0, content.length);
-    assertEquals("hello", t.toString());
-    assertEquals("hello", t.toString());
-    assertEquals(6, t.startOffset());
-    assertEquals(22, t.endOffset());
-    assertEquals("junk", t.type());
-    assertEquals(0, t.getFlags());
-  }
-
-  public void testResize() {
-    Token t = new Token();
-    char[] content = "hello".toCharArray();
-    t.copyBuffer(content, 0, content.length);
-    for (int i = 0; i < 2000; i++)
-    {
-      t.resizeBuffer(i);
-      assertTrue(i <= t.buffer().length);
-      assertEquals("hello", t.toString());
-    }
-  }
-
-  public void testGrow() {
-    Token t = new Token();
-    StringBuilder buf = new StringBuilder("ab");
-    for (int i = 0; i < 20; i++)
-    {
-      char[] content = buf.toString().toCharArray();
-      t.copyBuffer(content, 0, content.length);
-      assertEquals(buf.length(), t.length());
-      assertEquals(buf.toString(), t.toString());
-      buf.append(buf.toString());
-    }
-    assertEquals(1048576, t.length());
-
-    // now as a string, second variant
-    t = new Token();
-    buf = new StringBuilder("ab");
-    for (int i = 0; i < 20; i++)
-    {
-      t.setEmpty().append(buf);
-      String content = buf.toString();
-      assertEquals(content.length(), t.length());
-      assertEquals(content, t.toString());
-      buf.append(content);
-    }
-    assertEquals(1048576, t.length());
-
-    // Test for slow growth to a long term
-    t = new Token();
-    buf = new StringBuilder("a");
-    for (int i = 0; i < 20000; i++)
-    {
-      t.setEmpty().append(buf);
-      String content = buf.toString();
-      assertEquals(content.length(), t.length());
-      assertEquals(content, t.toString());
-      buf.append("a");
-    }
-    assertEquals(20000, t.length());
-
-    // Test for slow growth to a long term
-    t = new Token();
-    buf = new StringBuilder("a");
-    for (int i = 0; i < 20000; i++)
-    {
-      t.setEmpty().append(buf);
-      String content = buf.toString();
-      assertEquals(content.length(), t.length());
-      assertEquals(content, t.toString());
-      buf.append("a");
-    }
-    assertEquals(20000, t.length());
-  }
-
-  public void testToString() throws Exception {
-    char[] b = {'a', 'l', 'o', 'h', 'a'};
-    Token t = new Token("", 0, 5);
-    t.copyBuffer(b, 0, 5);
-    assertEquals("aloha", t.toString());
-
-    t.setEmpty().append("hi there");
-    assertEquals("hi there", t.toString());
-  }
-
-  public void testTermBufferEquals() throws Exception {
-    Token t1a = new Token();
-    char[] content1a = "hello".toCharArray();
-    t1a.copyBuffer(content1a, 0, 5);
-    Token t1b = new Token();
-    char[] content1b = "hello".toCharArray();
-    t1b.copyBuffer(content1b, 0, 5);
-    Token t2 = new Token();
-    char[] content2 = "hello2".toCharArray();
-    t2.copyBuffer(content2, 0, 6);
-    assertTrue(t1a.equals(t1b));
-    assertFalse(t1a.equals(t2));
-    assertFalse(t2.equals(t1b));
-  }
-  
-  public void testMixedStringArray() throws Exception {
-    Token t = new Token("hello", 0, 5);
-    assertEquals(t.length(), 5);
-    assertEquals(t.toString(), "hello");
-    t.setEmpty().append("hello2");
-    assertEquals(t.length(), 6);
-    assertEquals(t.toString(), "hello2");
-    t.copyBuffer("hello3".toCharArray(), 0, 6);
-    assertEquals(t.toString(), "hello3");
-
-    char[] buffer = t.buffer();
-    buffer[1] = 'o';
-    assertEquals(t.toString(), "hollo3");
+    assertNull(t.getPayload());
   }
   
+  /* the CharTermAttributeStuff is tested by TestCharTermAttributeImpl */
+
   public void testClone() throws Exception {
     Token t = new Token();
     t.setOffset(0, 5);
     char[] content = "hello".toCharArray();
     t.copyBuffer(content, 0, 5);
     char[] buf = t.buffer();
-    Token copy = assertCloneIsEqual(t);
+    Token copy = TestCharTermAttributeImpl.assertCloneIsEqual(t);
     assertEquals(t.toString(), copy.toString());
     assertNotSame(buf, copy.buffer());
 
     BytesRef pl = new BytesRef(new byte[]{1,2,3,4});
     t.setPayload(pl);
-    copy = assertCloneIsEqual(t);
+    copy = TestCharTermAttributeImpl.assertCloneIsEqual(t);
     assertEquals(pl, copy.getPayload());
     assertNotSame(pl, copy.getPayload());
   }
   
   public void testCopyTo() throws Exception {
     Token t = new Token();
-    Token copy = assertCopyIsEqual(t);
+    Token copy = TestCharTermAttributeImpl.assertCopyIsEqual(t);
     assertEquals("", t.toString());
     assertEquals("", copy.toString());
 
@@ -196,13 +72,13 @@ public class TestToken extends LuceneTes
     char[] content = "hello".toCharArray();
     t.copyBuffer(content, 0, 5);
     char[] buf = t.buffer();
-    copy = assertCopyIsEqual(t);
+    copy = TestCharTermAttributeImpl.assertCopyIsEqual(t);
     assertEquals(t.toString(), copy.toString());
     assertNotSame(buf, copy.buffer());
 
     BytesRef pl = new BytesRef(new byte[]{1,2,3,4});
     t.setPayload(pl);
-    copy = assertCopyIsEqual(t);
+    copy = TestCharTermAttributeImpl.assertCopyIsEqual(t);
     assertEquals(pl, copy.getPayload());
     assertNotSame(pl, copy.getPayload());
   }
@@ -243,35 +119,19 @@ public class TestToken extends LuceneTes
   public void testAttributeReflection() throws Exception {
     Token t = new Token("foobar", 6, 22);
     t.setFlags(8);
+    t.setPositionIncrement(3);
+    t.setPositionLength(11);
     TestUtil.assertAttributeReflection(t,
         new HashMap<String, Object>() {{
           put(CharTermAttribute.class.getName() + "#term", "foobar");
           put(TermToBytesRefAttribute.class.getName() + "#bytes", new BytesRef("foobar"));
           put(OffsetAttribute.class.getName() + "#startOffset", 6);
           put(OffsetAttribute.class.getName() + "#endOffset", 22);
-          put(PositionIncrementAttribute.class.getName() + "#positionIncrement", 1);
-          put(PositionLengthAttribute.class.getName() + "#positionLength", 1);
+          put(PositionIncrementAttribute.class.getName() + "#positionIncrement", 3);
+          put(PositionLengthAttribute.class.getName() + "#positionLength", 11);
           put(PayloadAttribute.class.getName() + "#payload", null);
           put(TypeAttribute.class.getName() + "#type", TypeAttribute.DEFAULT_TYPE);
           put(FlagsAttribute.class.getName() + "#flags", 8);
         }});
   }
-
-
-  public static <T extends AttributeImpl> T assertCloneIsEqual(T att) {
-    @SuppressWarnings("unchecked")
-    T clone = (T) att.clone();
-    assertEquals("Clone must be equal", att, clone);
-    assertEquals("Clone's hashcode must be equal", att.hashCode(), clone.hashCode());
-    return clone;
-  }
-
-  public static <T extends AttributeImpl> T assertCopyIsEqual(T att) throws Exception {
-    @SuppressWarnings("unchecked")
-    T copy = (T) att.getClass().newInstance();
-    att.copyTo(copy);
-    assertEquals("Copied instance must be equal", att, copy);
-    assertEquals("Copied instance's hashcode must be equal", att.hashCode(), copy.hashCode());
-    return copy;
-  }
 }

Modified: lucene/dev/branches/branch_4x/lucene/core/src/test/org/apache/lucene/analysis/tokenattributes/TestCharTermAttributeImpl.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/lucene/core/src/test/org/apache/lucene/analysis/tokenattributes/TestCharTermAttributeImpl.java?rev=1592929&r1=1592928&r2=1592929&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/lucene/core/src/test/org/apache/lucene/analysis/tokenattributes/TestCharTermAttributeImpl.java (original)
+++ lucene/dev/branches/branch_4x/lucene/core/src/test/org/apache/lucene/analysis/tokenattributes/TestCharTermAttributeImpl.java Wed May  7 01:02:35 2014
@@ -17,7 +17,7 @@ package org.apache.lucene.analysis.token
  * limitations under the License.
  */
 
-import org.apache.lucene.analysis.TestToken;
+import org.apache.lucene.util.AttributeImpl;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.TestUtil;
@@ -95,7 +95,7 @@ public class TestCharTermAttributeImpl e
     char[] content = "hello".toCharArray();
     t.copyBuffer(content, 0, 5);
     char[] buf = t.buffer();
-    CharTermAttributeImpl copy = TestToken.assertCloneIsEqual(t);
+    CharTermAttributeImpl copy = assertCloneIsEqual(t);
     assertEquals(t.toString(), copy.toString());
     assertNotSame(buf, copy.buffer());
   }
@@ -117,7 +117,7 @@ public class TestCharTermAttributeImpl e
   
   public void testCopyTo() throws Exception {
     CharTermAttributeImpl t = new CharTermAttributeImpl();
-    CharTermAttributeImpl copy = TestToken.assertCopyIsEqual(t);
+    CharTermAttributeImpl copy = assertCopyIsEqual(t);
     assertEquals("", t.toString());
     assertEquals("", copy.toString());
 
@@ -125,7 +125,7 @@ public class TestCharTermAttributeImpl e
     char[] content = "hello".toCharArray();
     t.copyBuffer(content, 0, 5);
     char[] buf = t.buffer();
-    copy = TestToken.assertCopyIsEqual(t);
+    copy = assertCopyIsEqual(t);
     assertEquals(t.toString(), copy.toString());
     assertNotSame(buf, copy.buffer());
   }
@@ -284,6 +284,23 @@ public class TestCharTermAttributeImpl e
     }
   }
 
+  public static <T extends AttributeImpl> T assertCloneIsEqual(T att) {
+    @SuppressWarnings("unchecked")
+    T clone = (T) att.clone();
+    assertEquals("Clone must be equal", att, clone);
+    assertEquals("Clone's hashcode must be equal", att.hashCode(), clone.hashCode());
+    return clone;
+  }
+
+  public static <T extends AttributeImpl> T assertCopyIsEqual(T att) throws Exception {
+    @SuppressWarnings("unchecked")
+    T copy = (T) att.getClass().newInstance();
+    att.copyTo(copy);
+    assertEquals("Copied instance must be equal", att, copy);
+    assertEquals("Copied instance's hashcode must be equal", att.hashCode(), copy.hashCode());
+    return copy;
+  }
+  
   /*
   
   // test speed of the dynamic instanceof checks in append(CharSequence),

Copied: lucene/dev/branches/branch_4x/lucene/core/src/test/org/apache/lucene/analysis/tokenattributes/TestPackedTokenAttributeImpl.java (from r1592914, lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/analysis/tokenattributes/TestPackedTokenAttributeImpl.java)
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/lucene/core/src/test/org/apache/lucene/analysis/tokenattributes/TestPackedTokenAttributeImpl.java?p2=lucene/dev/branches/branch_4x/lucene/core/src/test/org/apache/lucene/analysis/tokenattributes/TestPackedTokenAttributeImpl.java&p1=lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/analysis/tokenattributes/TestPackedTokenAttributeImpl.java&r1=1592914&r2=1592929&rev=1592929&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/analysis/tokenattributes/TestPackedTokenAttributeImpl.java (original)
+++ lucene/dev/branches/branch_4x/lucene/core/src/test/org/apache/lucene/analysis/tokenattributes/TestPackedTokenAttributeImpl.java Wed May  7 01:02:35 2014
@@ -59,8 +59,7 @@ public class TestPackedTokenAttributeImp
   }
   
   public void testPackedTokenAttributeFactory() throws Exception {
-    TokenStream ts = new MockTokenizer(TokenStream.DEFAULT_TOKEN_ATTRIBUTE_FACTORY, MockTokenizer.WHITESPACE, false, MockTokenizer.DEFAULT_MAX_TOKEN_LENGTH);
-    ((Tokenizer)ts).setReader(new StringReader("foo bar"));
+    TokenStream ts = new MockTokenizer(TokenStream.DEFAULT_TOKEN_ATTRIBUTE_FACTORY, new StringReader("foo bar"), MockTokenizer.WHITESPACE, false, MockTokenizer.DEFAULT_MAX_TOKEN_LENGTH);
     
     assertTrue("CharTermAttribute is not implemented by Token",
       ts.addAttribute(CharTermAttribute.class) instanceof PackedTokenAttributeImpl);

Modified: lucene/dev/branches/branch_4x/lucene/core/src/test/org/apache/lucene/index/Test2BTerms.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/lucene/core/src/test/org/apache/lucene/index/Test2BTerms.java?rev=1592929&r1=1592928&r2=1592929&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/lucene/core/src/test/org/apache/lucene/index/Test2BTerms.java (original)
+++ lucene/dev/branches/branch_4x/lucene/core/src/test/org/apache/lucene/index/Test2BTerms.java Wed May  7 01:02:35 2014
@@ -38,15 +38,15 @@ import org.apache.lucene.search.TermQuer
 import org.apache.lucene.store.BaseDirectoryWrapper;
 import org.apache.lucene.store.MockDirectoryWrapper;
 import org.apache.lucene.util.Attribute;
+import org.apache.lucene.util.AttributeFactory;
 import org.apache.lucene.util.AttributeImpl;
-import org.apache.lucene.util.AttributeSource.AttributeFactory;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.LuceneTestCase.Monster;
 import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.TestUtil;
 import org.apache.lucene.util.TimeUnits;
-import org.junit.Ignore;
+
 import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite;
 
 // NOTE: this test will fail w/ PreFlexRW codec!  (Because

Modified: lucene/dev/branches/branch_4x/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/AnalyzingSuggesterTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/AnalyzingSuggesterTest.java?rev=1592929&r1=1592928&r2=1592929&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/AnalyzingSuggesterTest.java (original)
+++ lucene/dev/branches/branch_4x/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/AnalyzingSuggesterTest.java Wed May  7 01:02:35 2014
@@ -39,9 +39,9 @@ import org.apache.lucene.analysis.Canned
 import org.apache.lucene.analysis.CannedBinaryTokenStream;
 import org.apache.lucene.analysis.CannedTokenStream;
 import org.apache.lucene.analysis.MockAnalyzer;
-import org.apache.lucene.analysis.MockBytesAttributeFactory;
 import org.apache.lucene.analysis.MockTokenFilter;
 import org.apache.lucene.analysis.MockTokenizer;
+import org.apache.lucene.analysis.MockUTF16TermAttributeImpl;
 import org.apache.lucene.analysis.Token;
 import org.apache.lucene.analysis.TokenFilter;
 import org.apache.lucene.analysis.TokenStream;
@@ -52,6 +52,7 @@ import org.apache.lucene.document.Docume
 import org.apache.lucene.search.suggest.Lookup.LookupResult;
 import org.apache.lucene.search.suggest.Input;
 import org.apache.lucene.search.suggest.InputArrayIterator;
+import org.apache.lucene.util.AttributeFactory;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.LineFileDocs;
 import org.apache.lucene.util.LuceneTestCase;
@@ -621,8 +622,6 @@ public class AnalyzingSuggesterTest exte
     private int numStopChars;
     private boolean preserveHoles;
 
-    private final MockBytesAttributeFactory factory = new MockBytesAttributeFactory();
-
     public MockTokenEatingAnalyzer(int numStopChars, boolean preserveHoles) {
       this.preserveHoles = preserveHoles;
       this.numStopChars = numStopChars;
@@ -630,7 +629,7 @@ public class AnalyzingSuggesterTest exte
 
     @Override
     public TokenStreamComponents createComponents(String fieldName, Reader reader) {
-      MockTokenizer tokenizer = new MockTokenizer(factory, reader, MockTokenizer.WHITESPACE, false, MockTokenizer.DEFAULT_MAX_TOKEN_LENGTH);
+      MockTokenizer tokenizer = new MockTokenizer(MockUTF16TermAttributeImpl.UTF16_TERM_ATTRIBUTE_FACTORY, reader, MockTokenizer.WHITESPACE, false, MockTokenizer.DEFAULT_MAX_TOKEN_LENGTH);
       tokenizer.setEnableChecks(true);
       TokenStream next;
       if (numStopChars != 0) {

Modified: lucene/dev/branches/branch_4x/lucene/test-framework/src/java/org/apache/lucene/analysis/BaseTokenStreamTestCase.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/lucene/test-framework/src/java/org/apache/lucene/analysis/BaseTokenStreamTestCase.java?rev=1592929&r1=1592928&r2=1592929&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/lucene/test-framework/src/java/org/apache/lucene/analysis/BaseTokenStreamTestCase.java (original)
+++ lucene/dev/branches/branch_4x/lucene/test-framework/src/java/org/apache/lucene/analysis/BaseTokenStreamTestCase.java Wed May  7 01:02:35 2014
@@ -25,6 +25,7 @@ import java.io.Reader;
 import java.io.StringReader;
 import java.io.StringWriter;
 import java.io.Writer;
+import java.lang.reflect.Constructor;
 import java.nio.charset.StandardCharsets;
 import java.util.*;
 import java.util.concurrent.CountDownLatch;
@@ -38,8 +39,8 @@ import org.apache.lucene.index.FieldInfo
 import org.apache.lucene.index.RandomIndexWriter;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.Attribute;
+import org.apache.lucene.util.AttributeFactory;
 import org.apache.lucene.util.AttributeImpl;
-import org.apache.lucene.util.AttributeSource.AttributeFactory;
 import org.apache.lucene.util.IOUtils;
 import org.apache.lucene.util.LineFileDocs;
 import org.apache.lucene.util.LuceneTestCase;
@@ -926,16 +927,41 @@ public abstract class BaseTokenStreamTes
     return keywordMockTokenizer(new StringReader(input));
   }
   
-  /** Returns a new AttributeFactory impl */
+  /**
+   * This provides the default AttributeFactory in reflective-only mode (package private)
+   * so we can test it.
+   */
+  private final static AttributeFactory REFLECTIVE_ATTRIBUTE_FACTORY;
+  static {
+    try {
+      final Constructor<? extends AttributeFactory> constr = Class
+          .forName(AttributeFactory.class.getName() + "$DefaultAttributeFactory")
+          .asSubclass(AttributeFactory.class)
+          .getDeclaredConstructor(boolean.class);
+      constr.setAccessible(true);
+      REFLECTIVE_ATTRIBUTE_FACTORY = constr.newInstance(false);
+    } catch (ReflectiveOperationException e) {
+      throw new Error("Cannot initantiate a reflective-only DefaultAttributeFactory", e);
+    }
+  }
+  
+  /** Returns a random AttributeFactory impl */
   public static AttributeFactory newAttributeFactory(Random random) {
-    if (random.nextBoolean()) {
-      return Token.TOKEN_ATTRIBUTE_FACTORY;
-    } else {
-      return AttributeFactory.DEFAULT_ATTRIBUTE_FACTORY;
+    switch (random.nextInt(4)) {
+      case 0:
+        return TokenStream.DEFAULT_TOKEN_ATTRIBUTE_FACTORY;
+      case 1:
+        return Token.TOKEN_ATTRIBUTE_FACTORY;
+      case 2:
+        return AttributeFactory.DEFAULT_ATTRIBUTE_FACTORY;
+      case 3:
+        return REFLECTIVE_ATTRIBUTE_FACTORY;
+      default:
+        throw new AssertionError("Please fix the Random.nextInt() call above");
     }
   }
   
-  /** Returns a new AttributeFactory impl */
+  /** Returns a random AttributeFactory impl */
   public static AttributeFactory newAttributeFactory() {
     return newAttributeFactory(random());
   }

Modified: lucene/dev/branches/branch_4x/lucene/test-framework/src/java/org/apache/lucene/analysis/MockBytesAnalyzer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/lucene/test-framework/src/java/org/apache/lucene/analysis/MockBytesAnalyzer.java?rev=1592929&r1=1592928&r2=1592929&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/lucene/test-framework/src/java/org/apache/lucene/analysis/MockBytesAnalyzer.java (original)
+++ lucene/dev/branches/branch_4x/lucene/test-framework/src/java/org/apache/lucene/analysis/MockBytesAnalyzer.java Wed May  7 01:02:35 2014
@@ -22,12 +22,11 @@ import java.io.Reader;
 /**
  * Analyzer for testing that encodes terms as UTF-16 bytes.
  */
-public class MockBytesAnalyzer extends Analyzer {
-  private final MockBytesAttributeFactory factory = new MockBytesAttributeFactory();
-  
+public final class MockBytesAnalyzer extends Analyzer {  
   @Override
   protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
-    Tokenizer t = new MockTokenizer(factory, reader, MockTokenizer.KEYWORD, false, MockTokenizer.DEFAULT_MAX_TOKEN_LENGTH);
+    Tokenizer t = new MockTokenizer(MockUTF16TermAttributeImpl.UTF16_TERM_ATTRIBUTE_FACTORY,
+        reader, MockTokenizer.KEYWORD, false, MockTokenizer.DEFAULT_MAX_TOKEN_LENGTH);
     return new TokenStreamComponents(t);
   }
 }

Modified: lucene/dev/branches/branch_4x/lucene/test-framework/src/java/org/apache/lucene/analysis/MockTokenizer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/lucene/test-framework/src/java/org/apache/lucene/analysis/MockTokenizer.java?rev=1592929&r1=1592928&r2=1592929&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/lucene/test-framework/src/java/org/apache/lucene/analysis/MockTokenizer.java (original)
+++ lucene/dev/branches/branch_4x/lucene/test-framework/src/java/org/apache/lucene/analysis/MockTokenizer.java Wed May  7 01:02:35 2014
@@ -24,6 +24,7 @@ import java.util.Random;
 
 import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
 import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
+import org.apache.lucene.util.AttributeFactory;
 import org.apache.lucene.util.automaton.CharacterRunAutomaton;
 import org.apache.lucene.util.automaton.RegExp;
 
@@ -114,7 +115,7 @@ public class MockTokenizer extends Token
     this(factory, input, runAutomaton, lowerCase, DEFAULT_MAX_TOKEN_LENGTH);
   }
 
-  /** Calls {@link #MockTokenizer(org.apache.lucene.util.AttributeSource.AttributeFactory,Reader,CharacterRunAutomaton,boolean)
+  /** Calls {@link #MockTokenizer(AttributeFactory,Reader,CharacterRunAutomaton,boolean)
    *                MockTokenizer(AttributeFactory, Reader, WHITESPACE, true)} */
   public MockTokenizer(AttributeFactory factory, Reader input) {
     this(input, WHITESPACE, true);

Modified: lucene/dev/branches/branch_4x/lucene/test-framework/src/java/org/apache/lucene/analysis/MockUTF16TermAttributeImpl.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/lucene/test-framework/src/java/org/apache/lucene/analysis/MockUTF16TermAttributeImpl.java?rev=1592929&r1=1592928&r2=1592929&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/lucene/test-framework/src/java/org/apache/lucene/analysis/MockUTF16TermAttributeImpl.java (original)
+++ lucene/dev/branches/branch_4x/lucene/test-framework/src/java/org/apache/lucene/analysis/MockUTF16TermAttributeImpl.java Wed May  7 01:02:35 2014
@@ -17,9 +17,10 @@ package org.apache.lucene.analysis;
  * limitations under the License.
  */
 
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 
 import org.apache.lucene.analysis.tokenattributes.CharTermAttributeImpl;
+import org.apache.lucene.util.AttributeFactory;
 import org.apache.lucene.util.BytesRef;
 
 /**
@@ -27,12 +28,15 @@ import org.apache.lucene.util.BytesRef;
  * text as UTF-16 bytes instead of as UTF-8 bytes.
  */
 public class MockUTF16TermAttributeImpl extends CharTermAttributeImpl {
-  static final Charset charset = Charset.forName("UTF-16LE");
+  
+  /** Factory that returns an instance of this class for CharTermAttribute */
+  public static final AttributeFactory UTF16_TERM_ATTRIBUTE_FACTORY =
+      AttributeFactory.getStaticImplementation(AttributeFactory.DEFAULT_ATTRIBUTE_FACTORY, MockUTF16TermAttributeImpl.class);
   
   @Override
   public void fillBytesRef() {
     BytesRef bytes = getBytesRef();
-    byte[] utf16 = toString().getBytes(charset);
+    byte[] utf16 = toString().getBytes(StandardCharsets.UTF_16LE);
     bytes.bytes = utf16;
     bytes.offset = 0;
     bytes.length = utf16.length;

Modified: lucene/dev/branches/branch_4x/solr/core/src/java/org/apache/solr/schema/PreAnalyzedField.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/solr/core/src/java/org/apache/solr/schema/PreAnalyzedField.java?rev=1592929&r1=1592928&r2=1592929&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/solr/core/src/java/org/apache/solr/schema/PreAnalyzedField.java (original)
+++ lucene/dev/branches/branch_4x/solr/core/src/java/org/apache/solr/schema/PreAnalyzedField.java Wed May  7 01:02:35 2014
@@ -31,6 +31,7 @@ import org.apache.lucene.document.Field;
 import org.apache.lucene.index.FieldInfo.IndexOptions;
 import org.apache.lucene.index.IndexableField;
 import org.apache.lucene.search.SortField;
+import org.apache.lucene.util.AttributeFactory;
 import org.apache.lucene.util.AttributeSource;
 import org.apache.lucene.util.AttributeSource.State;
 import org.apache.solr.analysis.SolrAnalyzer;

Modified: lucene/dev/branches/branch_4x/solr/test-framework/src/java/org/apache/solr/analysis/MockTokenizerFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/solr/test-framework/src/java/org/apache/solr/analysis/MockTokenizerFactory.java?rev=1592929&r1=1592928&r2=1592929&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/solr/test-framework/src/java/org/apache/solr/analysis/MockTokenizerFactory.java (original)
+++ lucene/dev/branches/branch_4x/solr/test-framework/src/java/org/apache/solr/analysis/MockTokenizerFactory.java Wed May  7 01:02:35 2014
@@ -23,7 +23,7 @@ import java.util.Map;
 
 import org.apache.lucene.analysis.MockTokenizer;
 import org.apache.lucene.analysis.util.TokenizerFactory;
-import org.apache.lucene.util.AttributeSource.AttributeFactory;
+import org.apache.lucene.util.AttributeFactory;
 import org.apache.lucene.util.automaton.CharacterRunAutomaton;
 
 /**



Mime
View raw message