lucene-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From sar...@apache.org
Subject [06/48] lucene-solr:jira/lucene-2562: Add luke as a sub-module.
Date Tue, 17 Jul 2018 21:12:41 GMT
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dc72c4a0/lucene/luke/src/java/org/apache/lucene/luke/app/util/MessageUtils.java
----------------------------------------------------------------------
diff --git a/lucene/luke/src/java/org/apache/lucene/luke/app/util/MessageUtils.java b/lucene/luke/src/java/org/apache/lucene/luke/app/util/MessageUtils.java
new file mode 100644
index 0000000..d9e9bfc
--- /dev/null
+++ b/lucene/luke/src/java/org/apache/lucene/luke/app/util/MessageUtils.java
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.luke.app.util;
+
+import java.text.MessageFormat;
+import java.util.Locale;
+import java.util.ResourceBundle;
+
+/**
+ * Utilities for accessing message resources.
+ */
+public class MessageUtils {
+
+  private static ResourceBundle bundle = ResourceBundle.getBundle("fxml/messages", Locale.getDefault());
+
+  public static ResourceBundle getBundle() {
+    return bundle;
+  }
+
+  public static String getLocalizedMessage(String key) {
+    return bundle.getString(key);
+  }
+
+  public static String getLocalizedMessage(String key, Object... args) {
+    String pattern = bundle.getString(key);
+    return MessageFormat.format(pattern, args);
+  }
+
+  private MessageUtils() {
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dc72c4a0/lucene/luke/src/java/org/apache/lucene/luke/app/util/NumericUtils.java
----------------------------------------------------------------------
diff --git a/lucene/luke/src/java/org/apache/lucene/luke/app/util/NumericUtils.java b/lucene/luke/src/java/org/apache/lucene/luke/app/util/NumericUtils.java
new file mode 100644
index 0000000..c5885bf
--- /dev/null
+++ b/lucene/luke/src/java/org/apache/lucene/luke/app/util/NumericUtils.java
@@ -0,0 +1,104 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.luke.app.util;
+
+import com.google.common.base.Strings;
+
+import java.util.Arrays;
+
+public class NumericUtils {
+
+  public static int[] convertToIntArray(String value, boolean ignoreException) throws NumberFormatException {
+    if (Strings.isNullOrEmpty(value)) {
+      return new int[]{0};
+    }
+    try {
+      return Arrays.stream(value.trim().split(",")).mapToInt(Integer::parseInt).toArray();
+    } catch (NumberFormatException e) {
+      if (ignoreException) {
+        return new int[]{0};
+      } else {
+        throw e;
+      }
+    }
+  }
+
+  public static long[] convertToLongArray(String value, boolean ignoreException) throws NumberFormatException {
+    if (Strings.isNullOrEmpty(value)) {
+      return new long[]{0};
+    }
+    try {
+      return Arrays.stream(value.trim().split(",")).mapToLong(Long::parseLong).toArray();
+    } catch (NumberFormatException e) {
+      if (ignoreException) {
+        return new long[]{0};
+      } else {
+        throw e;
+      }
+    }
+  }
+
+  public static float[] convertToFloatArray(String value, boolean ignoreException) throws NumberFormatException {
+    if (Strings.isNullOrEmpty(value)) {
+      return new float[]{0};
+    }
+    try {
+      String[] strVals = value.trim().split(",");
+      float[] values = new float[strVals.length];
+      for (int i = 0; i < strVals.length; i++) {
+        values[i] = Float.parseFloat(strVals[i]);
+      }
+      return values;
+    } catch (NumberFormatException e) {
+      if (ignoreException) {
+        return new float[]{0};
+      } else {
+        throw e;
+      }
+    }
+  }
+
+  public static double[] convertToDoubleArray(String value, boolean ignoreException) throws NumberFormatException {
+    if (Strings.isNullOrEmpty(value)) {
+      return new double[]{0};
+    }
+    try {
+      return Arrays.stream(value.trim().split(",")).mapToDouble(Double::parseDouble).toArray();
+    } catch (NumberFormatException e) {
+      if (ignoreException) {
+        return new double[]{0};
+      } else {
+        throw e;
+      }
+    }
+  }
+
+  public static long tryConvertToLongValue(String value) throws NumberFormatException {
+    try {
+      // try parse to long
+      return Long.parseLong(value.trim());
+    } catch (NumberFormatException e) {
+      // try parse to double
+      double dvalue = Double.parseDouble(value.trim());
+      return org.apache.lucene.util.NumericUtils.doubleToSortableLong(dvalue);
+    }
+  }
+
+  private NumericUtils() {
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dc72c4a0/lucene/luke/src/java/org/apache/lucene/luke/app/util/TextAreaAppender.java
----------------------------------------------------------------------
diff --git a/lucene/luke/src/java/org/apache/lucene/luke/app/util/TextAreaAppender.java b/lucene/luke/src/java/org/apache/lucene/luke/app/util/TextAreaAppender.java
new file mode 100644
index 0000000..3f2e3ee
--- /dev/null
+++ b/lucene/luke/src/java/org/apache/lucene/luke/app/util/TextAreaAppender.java
@@ -0,0 +1,83 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.luke.app.util;
+
+import java.io.Serializable;
+import java.util.concurrent.locks.Lock;
+import java.util.concurrent.locks.ReadWriteLock;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
+
+import javafx.scene.control.TextArea;
+import org.apache.logging.log4j.core.Filter;
+import org.apache.logging.log4j.core.Layout;
+import org.apache.logging.log4j.core.LogEvent;
+import org.apache.logging.log4j.core.appender.AbstractAppender;
+import org.apache.logging.log4j.core.config.plugins.Plugin;
+import org.apache.logging.log4j.core.config.plugins.PluginAttribute;
+import org.apache.logging.log4j.core.config.plugins.PluginElement;
+import org.apache.logging.log4j.core.config.plugins.PluginFactory;
+import org.apache.logging.log4j.core.layout.PatternLayout;
+
+@Plugin(name="TextAreaAppender", category="Core", elementType="appender", printObject=true)
+public class TextAreaAppender extends AbstractAppender {
+
+  public static TextArea textArea;
+
+  private final ReadWriteLock rwLock = new ReentrantReadWriteLock();
+  private final Lock readLock = rwLock.readLock();
+
+  protected TextAreaAppender(String name, Filter filter,
+                                 Layout<? extends Serializable> layout, final boolean ignoreExceptions) {
+    super(name, filter, layout, ignoreExceptions);
+  }
+
+  @Override
+  public void append(LogEvent event) {
+    if (textArea == null) {
+      throw new IllegalStateException();
+    }
+
+    readLock.lock();
+    try {
+      String message = new String(getLayout().toByteArray(event));
+      textArea.selectEnd();
+      textArea.appendText(message);
+    } finally {
+      readLock.unlock();
+    }
+  }
+
+  @PluginFactory
+  public static TextAreaAppender createAppender(
+      @PluginAttribute("name") String name,
+      @PluginElement("Layout") Layout<? extends Serializable> layout,
+      @PluginElement("Filter") final Filter filter,
+      @PluginAttribute("otherAttribute") String otherAttribute
+  ) {
+    if (name == null) {
+      LOGGER.error("No name provided for MyCustomAppenderImpl");
+      return null;
+    }
+    if (layout == null) {
+      layout = PatternLayout.createDefaultLayout();
+    }
+
+    return new TextAreaAppender(name, filter, layout, true);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dc72c4a0/lucene/luke/src/java/org/apache/lucene/luke/app/util/TextAreaPrintStream.java
----------------------------------------------------------------------
diff --git a/lucene/luke/src/java/org/apache/lucene/luke/app/util/TextAreaPrintStream.java b/lucene/luke/src/java/org/apache/lucene/luke/app/util/TextAreaPrintStream.java
new file mode 100644
index 0000000..402f6be
--- /dev/null
+++ b/lucene/luke/src/java/org/apache/lucene/luke/app/util/TextAreaPrintStream.java
@@ -0,0 +1,69 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.luke.app.util;
+
+import javafx.scene.control.TextArea;
+import org.slf4j.Logger;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.nio.charset.StandardCharsets;
+
+public class TextAreaPrintStream extends PrintStream {
+
+
+  private Logger logger;
+
+  private ByteArrayOutputStream baos;
+
+  private TextArea textArea;
+
+  public TextAreaPrintStream(TextArea textArea, ByteArrayOutputStream baos, Logger logger) {
+    super(baos, false);
+    this.baos = baos;
+    this.textArea = textArea;
+    this.logger = logger;
+    baos.reset();
+    textArea.selectEnd();
+  }
+
+  @Override
+  public void println(String s) {
+    try {
+      baos.write(s.getBytes());
+      baos.write('\n');
+    } catch (IOException e) {
+      logger.error(e.getMessage(), e);
+    }
+  }
+
+  @Override
+  public void flush() {
+    try {
+      textArea.selectEnd();
+      textArea.appendText(baos.toString(StandardCharsets.UTF_8.name()));
+    } catch (IOException e) {
+      logger.error(e.getMessage(), e);
+    } finally {
+      baos.reset();
+    }
+  }
+
+}
+

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dc72c4a0/lucene/luke/src/java/org/apache/lucene/luke/models/LukeException.java
----------------------------------------------------------------------
diff --git a/lucene/luke/src/java/org/apache/lucene/luke/models/LukeException.java b/lucene/luke/src/java/org/apache/lucene/luke/models/LukeException.java
new file mode 100644
index 0000000..727c373
--- /dev/null
+++ b/lucene/luke/src/java/org/apache/lucene/luke/models/LukeException.java
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.luke.models;
+
+public class LukeException extends RuntimeException {
+
+  public LukeException(String message, Throwable cause) {
+    super(message, cause);
+  }
+
+  public LukeException(Throwable cause) {
+    super(cause);
+  }
+
+  public LukeException(String message) {
+    super(message);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dc72c4a0/lucene/luke/src/java/org/apache/lucene/luke/models/LukeModel.java
----------------------------------------------------------------------
diff --git a/lucene/luke/src/java/org/apache/lucene/luke/models/LukeModel.java b/lucene/luke/src/java/org/apache/lucene/luke/models/LukeModel.java
new file mode 100644
index 0000000..dfe5297
--- /dev/null
+++ b/lucene/luke/src/java/org/apache/lucene/luke/models/LukeModel.java
@@ -0,0 +1,72 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.luke.models;
+
+import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.IndexCommit;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.luke.util.IndexUtils;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.util.Bits;
+import org.slf4j.Logger;
+
+import javax.annotation.Nonnull;
+import java.io.IOException;
+import java.util.Collection;
+
+/**
+ * Abstract model class. It holds index reader object and provides basic features for all concrete sub classes.
+ */
+public abstract class LukeModel {
+
+  protected Directory dir;
+
+  protected IndexReader reader;
+
+  protected Bits liveDocs;
+
+  protected IndexCommit commit;
+
+  protected LukeModel(@Nonnull IndexReader reader) {
+    this.reader = reader;
+
+    if (reader instanceof DirectoryReader) {
+      DirectoryReader dr = (DirectoryReader) reader;
+      this.dir = dr.directory();
+      try {
+        this.commit = dr.getIndexCommit();
+      } catch (IOException e) {
+        throw new LukeException(e.getMessage(), e);
+      }
+    } else {
+      this.dir = null;
+      this.commit = null;
+    }
+
+    this.liveDocs = IndexUtils.getLiveDocs(reader);
+  }
+
+  protected LukeModel (@Nonnull Directory dir) {
+    this.dir = dir;
+  }
+
+  public Collection<String> getFieldNames() {
+    return IndexUtils.getFieldNames(reader);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dc72c4a0/lucene/luke/src/java/org/apache/lucene/luke/models/analysis/Analysis.java
----------------------------------------------------------------------
diff --git a/lucene/luke/src/java/org/apache/lucene/luke/models/analysis/Analysis.java b/lucene/luke/src/java/org/apache/lucene/luke/models/analysis/Analysis.java
new file mode 100644
index 0000000..f0f656e
--- /dev/null
+++ b/lucene/luke/src/java/org/apache/lucene/luke/models/analysis/Analysis.java
@@ -0,0 +1,153 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.luke.models.analysis;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.util.CharFilterFactory;
+import org.apache.lucene.analysis.util.TokenFilterFactory;
+import org.apache.lucene.analysis.util.TokenizerFactory;
+import org.apache.lucene.luke.models.LukeException;
+
+import javax.annotation.Nonnull;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * A dedicated interface for Luke's Analysis tab.
+ */
+public interface Analysis {
+
+  /**
+   * Holder for a token.
+   */
+  class Token {
+    private final String term;
+    private final List<TokenAttribute> attributes;
+
+    Token(@Nonnull String term, @Nonnull List<TokenAttribute> attributes) {
+      this.term = term;
+      this.attributes = attributes;
+    }
+
+    /**
+     * Returns the string representation of this token.
+     */
+    public String getTerm() {
+      return term;
+    }
+
+    /**
+     * Returns attributes of this token.
+     */
+    public List<TokenAttribute> getAttributes() {
+      return ImmutableList.copyOf(attributes);
+    }
+  }
+
+  /**
+   * Holder for a token attribute.
+   */
+  class TokenAttribute {
+    private final String attClass;
+    private final Map<String, String> attValues;
+
+    TokenAttribute(@Nonnull String attClass, @Nonnull Map<String, String> attValues) {
+      this.attClass = attClass;
+      this.attValues = attValues;
+    }
+
+    /**
+     * Returns attribute class name.
+     */
+    public String getAttClass() {
+      return attClass;
+    }
+
+    /**
+     * Returns value of this attribute.
+     */
+    public Map<String, String> getAttValues() {
+      return ImmutableMap.copyOf(attValues);
+    }
+  }
+
+  /**
+   * Returns built-in {@link Analyzer}s.
+   */
+  Collection<Class<? extends Analyzer>> getPresetAnalyzerTypes();
+
+  /**
+   * Returns available {@link CharFilterFactory}s.
+   */
+  Collection<Class<? extends CharFilterFactory>> getAvailableCharFilterFactories();
+
+  /**
+   * Returns available {@link TokenizerFactory}s.
+   */
+  Collection<Class<? extends TokenizerFactory>> getAvailableTokenizerFactories();
+
+  /**
+   * Returns available {@link TokenFilterFactory}s.
+   */
+  Collection<Class<? extends TokenFilterFactory>> getAvailableTokenFilterFactories();
+
+  /**
+   * Creates new Analyzer instance for the specified class name.
+   *
+   * @param analyzerType - instantiable class name of an Analyzer
+   * @return new Analyzer instance
+   * @throws LukeException - if failed to create new Analyzer instance
+   */
+  Analyzer createAnalyzerFromClassName(String analyzerType);
+
+  /**
+   * Creates new custom Analyzer instance with the given configurations.
+   *
+   * @param config - custom analyzer configurations
+   * @return new Analyzer instance
+   * @throws LukeException - if failed to create new Analyzer instance
+   */
+  Analyzer buildCustomAnalyzer(CustomAnalyzerConfig config);
+
+  /**
+   * Analyzes given text with the current Analyzer.
+   *
+   * @param text - text string to analyze
+   * @return the list of token
+   * @throws LukeException - if an internal error occurs when analyzing text
+   */
+  List<Token> analyze(String text);
+
+  /**
+   * Returns current analyzer.
+   * @throws LukeException - if current analyzer not set
+   */
+  Analyzer currentAnalyzer();
+
+  /**
+   * Adds external jar files to classpath and loads custom {@link CharFilterFactory}s, {@link TokenizerFactory}s, or {@link TokenFilterFactory}s.
+   *
+   * @param jarFiles - list of paths to jar file
+   * @throws LukeException - if an internal error occurs when loading jars
+   */
+  void addExternalJars(List<String> jarFiles);
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dc72c4a0/lucene/luke/src/java/org/apache/lucene/luke/models/analysis/AnalysisFactory.java
----------------------------------------------------------------------
diff --git a/lucene/luke/src/java/org/apache/lucene/luke/models/analysis/AnalysisFactory.java b/lucene/luke/src/java/org/apache/lucene/luke/models/analysis/AnalysisFactory.java
new file mode 100644
index 0000000..c0cca4b
--- /dev/null
+++ b/lucene/luke/src/java/org/apache/lucene/luke/models/analysis/AnalysisFactory.java
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.luke.models.analysis;
+
+public class AnalysisFactory {
+
+  public Analysis newInstance() {
+    return new AnalysisImpl();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dc72c4a0/lucene/luke/src/java/org/apache/lucene/luke/models/analysis/AnalysisImpl.java
----------------------------------------------------------------------
diff --git a/lucene/luke/src/java/org/apache/lucene/luke/models/analysis/AnalysisImpl.java b/lucene/luke/src/java/org/apache/lucene/luke/models/analysis/AnalysisImpl.java
new file mode 100644
index 0000000..010d8ab
--- /dev/null
+++ b/lucene/luke/src/java/org/apache/lucene/luke/models/analysis/AnalysisImpl.java
@@ -0,0 +1,229 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.luke.models.analysis;
+
+import com.google.common.collect.ImmutableList;
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.TokenStream;
+import org.apache.lucene.analysis.custom.CustomAnalyzer;
+import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
+import org.apache.lucene.analysis.util.CharFilterFactory;
+import org.apache.lucene.analysis.util.TokenFilterFactory;
+import org.apache.lucene.analysis.util.TokenizerFactory;
+import org.apache.lucene.luke.models.LukeException;
+import org.apache.lucene.util.AttributeImpl;
+import org.reflections.Reflections;
+import org.reflections.scanners.SubTypesScanner;
+import org.reflections.util.ClasspathHelper;
+import org.reflections.util.ConfigurationBuilder;
+import org.reflections.util.FilterBuilder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.annotation.Nonnull;
+import java.io.IOException;
+import java.lang.reflect.Modifier;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.nio.file.FileSystems;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Comparator;
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+public final class AnalysisImpl implements Analysis {
+
+  private static final Logger logger = LoggerFactory.getLogger(AnalysisImpl.class);
+
+  private final List<Class<? extends Analyzer>> presetAnalyzerTypes;
+
+  private Analyzer analyzer;
+
+  public AnalysisImpl() {
+    presetAnalyzerTypes = new ArrayList<>();
+    for (Class<? extends Analyzer> clazz : getInstantiableSubTypesBuiltIn(Analyzer.class)) {
+      try {
+        // add to presets if no args constructor is available
+        clazz.getConstructor();
+        presetAnalyzerTypes.add(clazz);
+      } catch (NoSuchMethodException e) {
+      }
+    }
+  }
+
+  @Override
+  public void addExternalJars(List<String> jarFiles) {
+    List<URL> urls = new ArrayList<>();
+
+    for (String jarFile : jarFiles) {
+      Path path = FileSystems.getDefault().getPath(jarFile);
+      if (!Files.exists(path) || !jarFile.endsWith(".jar")) {
+        throw new LukeException(String.format("Invalid jar file path: %s", jarFile));
+      }
+      try {
+        urls.add(path.toUri().toURL());
+      } catch (MalformedURLException e) {
+        throw new LukeException(e.getMessage(), e);
+      }
+    }
+
+    // reload available tokenizers, charfilters, and tokenfilters
+    URLClassLoader classLoader = new URLClassLoader(
+        urls.toArray(new URL[urls.size()]), ClassLoader.getSystemClassLoader());
+    CharFilterFactory.reloadCharFilters(classLoader);
+    TokenizerFactory.reloadTokenizers(classLoader);
+    TokenFilterFactory.reloadTokenFilters(classLoader);
+  }
+
+  @Override
+  public Collection<Class<? extends Analyzer>> getPresetAnalyzerTypes() {
+    return ImmutableList.copyOf(presetAnalyzerTypes);
+  }
+
+  @Override
+  public Collection<Class<? extends CharFilterFactory>> getAvailableCharFilterFactories() {
+    return CharFilterFactory.availableCharFilters().stream()
+        .map(CharFilterFactory::lookupClass)
+        .sorted(Comparator.comparing(Class::getName))
+        .collect(Collectors.toList());
+  }
+
+  @Override
+  public Collection<Class<? extends TokenizerFactory>> getAvailableTokenizerFactories() {
+    return TokenizerFactory.availableTokenizers().stream()
+        .map(TokenizerFactory::lookupClass)
+        .sorted(Comparator.comparing(Class::getName))
+        .collect(Collectors.toList());
+  }
+
+  @Override
+  public Collection<Class<? extends TokenFilterFactory>> getAvailableTokenFilterFactories() {
+    return TokenFilterFactory.availableTokenFilters().stream()
+        .map(TokenFilterFactory::lookupClass)
+        .sorted(Comparator.comparing(Class::getName))
+        .collect(Collectors.toList());
+  }
+
+  private <T> List<Class<? extends T>> getInstantiableSubTypesBuiltIn(Class<T> superType) {
+    Reflections reflections = new Reflections(new ConfigurationBuilder()
+        .setUrls(ClasspathHelper.forPackage("org.apache.lucene"))
+        .setScanners(new SubTypesScanner())
+        .filterInputsBy(new FilterBuilder().include("org\\.apache\\.lucene\\.analysis.*")));
+    return reflections.getSubTypesOf(superType).stream()
+        .filter(type -> !Modifier.isAbstract(type.getModifiers()))
+        .filter(type -> !type.getSimpleName().startsWith("Mock"))
+        .sorted(Comparator.comparing(Class::getName))
+        .collect(Collectors.toList());
+  }
+
+  @Override
+  public List<Token> analyze(@Nonnull String text) {
+    if (analyzer == null) {
+      throw new LukeException("Analyzer is not set.");
+    }
+
+    try {
+      List<Token> result = new ArrayList<>();
+
+      TokenStream stream = analyzer.tokenStream("", text);
+      stream.reset();
+
+      CharTermAttribute charAtt = stream.getAttribute(CharTermAttribute.class);
+
+      // iterate tokens
+      while (stream.incrementToken()) {
+        List<TokenAttribute> attributes = new ArrayList<>();
+        Iterator<AttributeImpl> itr = stream.getAttributeImplsIterator();
+
+        while (itr.hasNext()) {
+          AttributeImpl att = itr.next();
+          Map<String, String> attValues = new LinkedHashMap<>();
+          att.reflectWith((attClass, key, value) -> {
+            if (value != null)
+              attValues.put(key, value.toString());
+          });
+          attributes.add(new TokenAttribute(att.getClass().getSimpleName(), attValues));
+        }
+
+        result.add(new Token(charAtt.toString(), attributes));
+      }
+      stream.close();
+
+      return result;
+    } catch (IOException e) {
+      throw new LukeException(e.getMessage(), e);
+    }
+  }
+
+  @Override
+  public Analyzer createAnalyzerFromClassName(@Nonnull String analyzerType) {
+    try {
+      Class<? extends Analyzer> clazz = Class.forName(analyzerType).asSubclass(Analyzer.class);
+      this.analyzer = clazz.newInstance();
+      return analyzer;
+    } catch (ReflectiveOperationException e) {
+      throw new LukeException(String.format("Failed to instantiate class: %s", analyzerType), e);
+    }
+  }
+
+  @Override
+  public Analyzer buildCustomAnalyzer(@Nonnull CustomAnalyzerConfig config) {
+    try {
+      // create builder
+      CustomAnalyzer.Builder builder = config.getConfigDir()
+          .map(path -> CustomAnalyzer.builder(FileSystems.getDefault().getPath(path)))
+          .orElse(CustomAnalyzer.builder());
+
+      // set tokenizer
+      builder.withTokenizer(Class.forName(config.getTokenizerConfig().getName()).asSubclass(TokenizerFactory.class),
+          config.getTokenizerConfig().getParams());
+
+      // add char filters
+      for (CustomAnalyzerConfig.ComponentConfig cfConf : config.getCharFilterConfigs()) {
+        builder.addCharFilter(Class.forName(cfConf.getName()).asSubclass(CharFilterFactory.class), cfConf.getParams());
+      }
+
+      // add token filters
+      for (CustomAnalyzerConfig.ComponentConfig tfConf : config.getTokenFilterConfigs()) {
+        builder.addTokenFilter(Class.forName(tfConf.getName()).asSubclass(TokenFilterFactory.class), tfConf.getParams());
+      }
+
+      // build analyzer
+      this.analyzer = builder.build();
+      return analyzer;
+    } catch (Exception e) {
+      throw new LukeException("Failed to build custom analyzer.", e);
+    }
+  }
+
+  @Override
+  public Analyzer currentAnalyzer() {
+    if (analyzer == null) {
+      throw new LukeException("Analyzer is not set.");
+    }
+    return analyzer;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dc72c4a0/lucene/luke/src/java/org/apache/lucene/luke/models/analysis/CustomAnalyzerConfig.java
----------------------------------------------------------------------
diff --git a/lucene/luke/src/java/org/apache/lucene/luke/models/analysis/CustomAnalyzerConfig.java b/lucene/luke/src/java/org/apache/lucene/luke/models/analysis/CustomAnalyzerConfig.java
new file mode 100644
index 0000000..5c32493
--- /dev/null
+++ b/lucene/luke/src/java/org/apache/lucene/luke/models/analysis/CustomAnalyzerConfig.java
@@ -0,0 +1,126 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.luke.models.analysis;
+
+import com.google.common.collect.ImmutableList;
+
+import javax.annotation.Nonnull;
+import javax.annotation.Nullable;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+
+/**
+ * Configurations for a custom analyzer.
+ */
+public final class CustomAnalyzerConfig {
+
+  private final String configDir;
+
+  private final ComponentConfig tokenizerConfig;
+
+  private final List<ComponentConfig> charFilterConfigs;
+
+  private final List<ComponentConfig> tokenFilterConfigs;
+
+  public static class Builder {
+    private String configDir;
+    private final ComponentConfig tokenizerConfig;
+    private final List<ComponentConfig> charFilterConfigs = new ArrayList<>();
+    private final List<ComponentConfig> tokenFilterConfigs = new ArrayList<>();
+
+    public Builder(@Nonnull String tokenizerName, @Nonnull Map<String, String> tokenizerParams) {
+      tokenizerConfig = new ComponentConfig(tokenizerName, new HashMap<>(tokenizerParams));
+    }
+
+    public Builder configDir(String val) {
+      configDir = val;
+      return this;
+    }
+
+    public Builder addCharFilterConfig(@Nonnull String name, @Nonnull Map<String, String> params) {
+      charFilterConfigs.add(new ComponentConfig(name, new HashMap<>(params)));
+      return this;
+    }
+
+    public Builder addTokenFilterConfig(@Nonnull String name, @Nonnull Map<String, String> params) {
+      tokenFilterConfigs.add(new ComponentConfig(name, new HashMap<>(params)));
+      return this;
+    }
+
+    public CustomAnalyzerConfig build() {
+      return new CustomAnalyzerConfig(this);
+    }
+  }
+
+  private CustomAnalyzerConfig(Builder builder) {
+    this.tokenizerConfig = builder.tokenizerConfig;
+    this.configDir = builder.configDir;
+    this.charFilterConfigs = builder.charFilterConfigs;
+    this.tokenFilterConfigs = builder.tokenFilterConfigs;
+  }
+
+  /**
+   * Returns directory path for configuration files, or empty.
+   */
+  Optional<String> getConfigDir() {
+    return Optional.ofNullable(configDir);
+  }
+
+  /**
+   * Returns Tokenizer configurations.
+   */
+  ComponentConfig getTokenizerConfig() {
+    return tokenizerConfig;
+  }
+
+  /**
+   * Returns CharFilters configurations.
+   */
+  List<ComponentConfig> getCharFilterConfigs() {
+    return ImmutableList.copyOf(charFilterConfigs);
+  }
+
+  /**
+   * Returns TokenFilters configurations.
+   */
+  List<ComponentConfig> getTokenFilterConfigs() {
+    return ImmutableList.copyOf(tokenFilterConfigs);
+  }
+
+  static class ComponentConfig {
+
+    private final String name;
+    private final Map<String, String> params;
+
+    ComponentConfig(@Nonnull String name, @Nonnull Map<String, String> params) {
+      this.name = name;
+      this.params = params;
+    }
+
+    String getName() {
+      return this.name;
+    }
+
+    Map<String, String> getParams() {
+      return this.params;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dc72c4a0/lucene/luke/src/java/org/apache/lucene/luke/models/commits/Commit.java
----------------------------------------------------------------------
diff --git a/lucene/luke/src/java/org/apache/lucene/luke/models/commits/Commit.java b/lucene/luke/src/java/org/apache/lucene/luke/models/commits/Commit.java
new file mode 100644
index 0000000..642b76d
--- /dev/null
+++ b/lucene/luke/src/java/org/apache/lucene/luke/models/commits/Commit.java
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.luke.models.commits;
+
+import org.apache.lucene.index.IndexCommit;
+import org.apache.lucene.luke.util.IndexUtils;
+
+import java.io.IOException;
+
+/**
+ * Holder for a commit.
+ */
+public final class Commit {
+
+  private long generation;
+
+  private boolean isDeleted;
+
+  private int segCount;
+
+  private String userData;
+
+  static Commit of(IndexCommit ic) {
+    Commit commit = new Commit();
+    commit.generation = ic.getGeneration();
+    commit.isDeleted = ic.isDeleted();
+    commit.segCount = ic.getSegmentCount();
+    try {
+      commit.userData = IndexUtils.getCommitUserData(ic);
+    } catch (IOException e) {
+    }
+    return commit;
+  }
+
+  public long getGeneration() {
+    return generation;
+  }
+
+  public boolean isDeleted() {
+    return isDeleted;
+  }
+
+  public int getSegCount() {
+    return segCount;
+  }
+
+  public String getUserData() {
+    return userData;
+  }
+
+  private Commit() {
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dc72c4a0/lucene/luke/src/java/org/apache/lucene/luke/models/commits/Commits.java
----------------------------------------------------------------------
diff --git a/lucene/luke/src/java/org/apache/lucene/luke/models/commits/Commits.java b/lucene/luke/src/java/org/apache/lucene/luke/models/commits/Commits.java
new file mode 100644
index 0000000..c927219
--- /dev/null
+++ b/lucene/luke/src/java/org/apache/lucene/luke/models/commits/Commits.java
@@ -0,0 +1,84 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.luke.models.commits;
+
+import org.apache.lucene.codecs.Codec;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.luke.models.LukeException;
+import org.apache.lucene.store.Directory;
+
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+
+/**
+ * A dedicated interface for Luke's Commits tab.
+ */
+public interface Commits {
+
+  /**
+   * Returns commits that exists in this Directory.
+   * @throws LukeException - if an internal error occurs when accessing index
+   */
+  List<Commit> listCommits();
+
+  /**
+   * Returns a commit of the specified generation.
+   * @param commitGen - generation
+   * @throws LukeException - if an internal error occurs when accessing index
+   */
+  Optional<Commit> getCommit(long commitGen);
+
+  /**
+   * Returns index files for the specified generation.
+   * @param commitGen - generation
+   * @throws LukeException - if an internal error occurs when accessing index
+   */
+  List<File> getFiles(long commitGen);
+
+  /**
+   * Returns segments for the specified generation.
+   * @param commitGen - generation
+   * @throws LukeException - if an internal error occurs when accessing index
+   */
+  List<Segment> getSegments(long commitGen);
+
+  /**
+   * Returns internal codec attributes map for the specified segment.
+   * @param commitGen - generation
+   * @param name - segment name
+   * @throws LukeException - if an internal error occurs when accessing index
+   */
+  Map<String, String> getSegmentAttributes(long commitGen, String name);
+
+  /**
+   * Returns diagnotics for the specified segment.
+   * @param commitGen - generation
+   * @param name - segment name
+   * @throws LukeException - if an internal error occurs when accessing index
+   */
+  Map<String, String> getSegmentDiagnostics(long commitGen, String name);
+
+  /**
+   * Returns codec for the specified segment.
+   * @param commitGen - generation
+   * @param name - segment name
+   * @throws LukeException - if an internal error occurs when accessing index
+   */
+  Optional<Codec> getSegmentCodec(long commitGen, String name);
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dc72c4a0/lucene/luke/src/java/org/apache/lucene/luke/models/commits/CommitsFactory.java
----------------------------------------------------------------------
diff --git a/lucene/luke/src/java/org/apache/lucene/luke/models/commits/CommitsFactory.java b/lucene/luke/src/java/org/apache/lucene/luke/models/commits/CommitsFactory.java
new file mode 100644
index 0000000..a2ad1f3
--- /dev/null
+++ b/lucene/luke/src/java/org/apache/lucene/luke/models/commits/CommitsFactory.java
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.luke.models.commits;
+
+import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.store.Directory;
+
+public class CommitsFactory {
+
+  public Commits newInstance(Directory dir, String indexPath) {
+    return new CommitsImpl(dir, indexPath);
+  }
+
+  public Commits newInstance(DirectoryReader reader, String indexPath) {
+    return new CommitsImpl(reader, indexPath);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dc72c4a0/lucene/luke/src/java/org/apache/lucene/luke/models/commits/CommitsImpl.java
----------------------------------------------------------------------
diff --git a/lucene/luke/src/java/org/apache/lucene/luke/models/commits/CommitsImpl.java b/lucene/luke/src/java/org/apache/lucene/luke/models/commits/CommitsImpl.java
new file mode 100644
index 0000000..b4efb20
--- /dev/null
+++ b/lucene/luke/src/java/org/apache/lucene/luke/models/commits/CommitsImpl.java
@@ -0,0 +1,220 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.luke.models.commits;
+
+import com.google.common.collect.ImmutableMap;
+import org.apache.lucene.codecs.Codec;
+import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.IndexCommit;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.SegmentInfos;
+import org.apache.lucene.luke.models.LukeModel;
+import org.apache.lucene.luke.models.LukeException;
+import org.apache.lucene.store.Directory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.function.UnaryOperator;
+import java.util.stream.Collectors;
+
+public final class CommitsImpl extends LukeModel implements Commits {
+
+  private static final Logger logger = LoggerFactory.getLogger(CommitsImpl.class);
+
+  private final String indexPath;
+
+  private final Map<Long, IndexCommit> commitMap;
+
+  /**
+   * Constructs a CommitsImpl that holds given {@link Directory}.
+   *
+   * @param dir - the index directory
+   * @param indexPath - the path to index directory
+   */
+  public CommitsImpl(Directory dir, String indexPath) {
+    super(dir);
+    this.indexPath = indexPath;
+    this.commitMap = initCommitMap();
+  }
+
+  /**
+   * Constructs a CommitsImpl that holds the {@link Directory} wrapped in the given {@link DirectoryReader}.
+   *
+   * @param reader - the index reader
+   * @param indexPath - the path to index directory
+   */
+  public CommitsImpl(DirectoryReader reader, String indexPath) {
+    super(reader.directory());
+    this.indexPath = indexPath;
+    this.commitMap = initCommitMap();
+  }
+
+  private Map<Long, IndexCommit> initCommitMap() {
+    try {
+      List<IndexCommit> indexCommits = DirectoryReader.listCommits(dir);
+      return indexCommits.stream()
+          .collect(Collectors.toMap(IndexCommit::getGeneration, UnaryOperator.identity()));
+    } catch (IOException e) {
+      throw new LukeException("Failed to get commits list.", e);
+    }
+  }
+
+  @Override
+  public List<Commit> listCommits() throws LukeException {
+    List<Commit> commits = getCommitMap().values().stream()
+        .map(Commit::of)
+        .collect(Collectors.toList());
+    Collections.reverse(commits);
+    return commits;
+  }
+
+  @Override
+  public Optional<Commit> getCommit(long commitGen) throws LukeException {
+    IndexCommit ic = getCommitMap().get(commitGen);
+
+    if (ic == null) {
+      String msg = String.format("Commit generation %d not exists.", commitGen);
+      logger.warn(msg);
+      return Optional.empty();
+    }
+
+    return Optional.of(Commit.of(ic));
+  }
+
+  @Override
+  public List<File> getFiles(long commitGen) throws LukeException {
+    IndexCommit ic = getCommitMap().get(commitGen);
+
+    if (ic == null) {
+      String msg = String.format("Commit generation %d not exists.", commitGen);
+      logger.warn(msg);
+      return Collections.emptyList();
+    }
+
+    try {
+      return ic.getFileNames().stream()
+          .map(name -> File.of(indexPath, name))
+          .sorted(Comparator.comparing(File::getFileName))
+          .collect(Collectors.toList());
+    } catch (IOException e) {
+      throw new LukeException(String.format("Failed to load files for commit generation %d", commitGen), e);
+    }
+  }
+
+  @Override
+  public List<Segment> getSegments(long commitGen) throws LukeException {
+    try {
+      SegmentInfos infos = findSegmentInfos(commitGen);
+      if (infos == null) {
+        return Collections.emptyList();
+      }
+
+      return infos.asList().stream()
+          .map(Segment::of)
+          .sorted(Comparator.comparing(Segment::getName))
+          .collect(Collectors.toList());
+    } catch (IOException e) {
+      throw new LukeException(String.format("Failed to load segment infos for commit generation %d", commitGen), e);
+    }
+  }
+
+  @Override
+  public Map<String, String> getSegmentAttributes(long commitGen, String name) throws LukeException {
+    try {
+      SegmentInfos infos = findSegmentInfos(commitGen);
+      if (infos == null) {
+        return Collections.emptyMap();
+      }
+
+      return infos.asList().stream()
+          .filter(seg -> seg.info.name.equals(name))
+          .findAny()
+          .map(seg -> seg.info.getAttributes())
+          .orElse(Collections.emptyMap());
+    } catch (IOException e) {
+      throw new LukeException(String.format("Failed to load segment infos for commit generation %d", commitGen), e);
+    }
+  }
+
+  @Override
+  public Map<String, String> getSegmentDiagnostics(long commitGen, String name) throws LukeException {
+    try {
+      SegmentInfos infos = findSegmentInfos(commitGen);
+      if (infos == null) {
+        return Collections.emptyMap();
+      }
+
+      return infos.asList().stream()
+          .filter(seg -> seg.info.name.equals(name))
+          .findAny()
+          .map(seg -> seg.info.getDiagnostics())
+          .orElse(Collections.emptyMap());
+    } catch (IOException e) {
+      throw new LukeException(String.format("Failed to load segment infos for commit generation %d", commitGen), e);
+    }
+  }
+
+  @Override
+  public Optional<Codec> getSegmentCodec(long commitGen, String name) throws LukeException {
+    try {
+      SegmentInfos infos = findSegmentInfos(commitGen);
+      if (infos == null) {
+        return Optional.empty();
+      }
+
+      return infos.asList().stream()
+          .filter(seg -> seg.info.name.equals(name))
+          .findAny()
+          .map(seg -> seg.info.getCodec());
+    } catch (IOException e) {
+      throw new LukeException(String.format("Failed to load segment infos for commit generation %d", commitGen), e);
+    }
+  }
+
+  private Map<Long, IndexCommit> getCommitMap() throws LukeException {
+    if (dir == null) {
+      return Collections.emptyMap();
+    }
+    return ImmutableMap.copyOf(commitMap);
+  }
+
+  private SegmentInfos findSegmentInfos(long commitGen) throws LukeException, IOException {
+    IndexCommit ic = getCommitMap().get(commitGen);
+    if (ic == null) {
+      return null;
+    }
+    String segmentFile = ic.getSegmentsFileName();
+    return SegmentInfos.readCommit(dir, segmentFile);
+  }
+
+  static String toDisplaySize(long size) {
+    if (size < 1024) {
+      return String.valueOf(size) + " B";
+    } else if (size < 1048576) {
+      return String.valueOf(size / 1024) + " KB";
+    } else {
+      return String.valueOf(size / 1048576) + " MB";
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dc72c4a0/lucene/luke/src/java/org/apache/lucene/luke/models/commits/File.java
----------------------------------------------------------------------
diff --git a/lucene/luke/src/java/org/apache/lucene/luke/models/commits/File.java b/lucene/luke/src/java/org/apache/lucene/luke/models/commits/File.java
new file mode 100644
index 0000000..21b3dca
--- /dev/null
+++ b/lucene/luke/src/java/org/apache/lucene/luke/models/commits/File.java
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.luke.models.commits;
+
+/**
+ * Holder for a index file.
+ */
+public final class File {
+  private String fileName;
+  private String displaySize;
+
+  static File of(String indexPath, String name) {
+    File file = new File();
+    file.fileName = name;
+    java.io.File fileObject = new java.io.File(indexPath, name);
+    file.displaySize = CommitsImpl.toDisplaySize(fileObject.length());
+    return file;
+  }
+
+  public String getFileName() {
+    return fileName;
+  }
+
+  public String getDisplaySize() {
+    return displaySize;
+  }
+
+  private File() {
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dc72c4a0/lucene/luke/src/java/org/apache/lucene/luke/models/commits/Segment.java
----------------------------------------------------------------------
diff --git a/lucene/luke/src/java/org/apache/lucene/luke/models/commits/Segment.java b/lucene/luke/src/java/org/apache/lucene/luke/models/commits/Segment.java
new file mode 100644
index 0000000..6d7c800
--- /dev/null
+++ b/lucene/luke/src/java/org/apache/lucene/luke/models/commits/Segment.java
@@ -0,0 +1,95 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.luke.models.commits;
+
+import org.apache.lucene.index.SegmentCommitInfo;
+
+import java.io.IOException;
+
+/**
+ * Holder for a segment.
+ */
+public final class Segment {
+
+  private String name;
+
+  private int maxDoc;
+
+  private long delGen;
+
+  private int delCount;
+
+  private String luceneVer;
+
+  private String codecName;
+
+  private String displaySize;
+
+  private boolean useCompoundFile;
+
+  static Segment of(SegmentCommitInfo segInfo) {
+    Segment segment = new Segment();
+    segment.name = segInfo.info.name;
+    segment.maxDoc = segInfo.info.maxDoc();
+    segment.delGen = segInfo.getDelGen();
+    segment.delCount = segInfo.getDelCount();
+    segment.luceneVer = segInfo.info.getVersion().toString();
+    segment.codecName = segInfo.info.getCodec().getName();
+    try {
+      segment.displaySize = CommitsImpl.toDisplaySize(segInfo.sizeInBytes());
+    } catch (IOException e) {
+    }
+    segment.useCompoundFile = segInfo.info.getUseCompoundFile();
+    return segment;
+  }
+
+  public String getName() {
+    return name;
+  }
+
+  public int getMaxDoc() {
+    return maxDoc;
+  }
+
+  public long getDelGen() {
+    return delGen;
+  }
+
+  public int getDelCount() {
+    return delCount;
+  }
+
+  public String getLuceneVer() {
+    return luceneVer;
+  }
+
+  public String getCodecName() {
+    return codecName;
+  }
+
+  public String getDisplaySize() {
+    return displaySize;
+  }
+
+  public boolean isUseCompoundFile() {
+    return useCompoundFile;
+  }
+
+  private Segment() {
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dc72c4a0/lucene/luke/src/java/org/apache/lucene/luke/models/documents/DocValues.java
----------------------------------------------------------------------
diff --git a/lucene/luke/src/java/org/apache/lucene/luke/models/documents/DocValues.java b/lucene/luke/src/java/org/apache/lucene/luke/models/documents/DocValues.java
new file mode 100644
index 0000000..1873958
--- /dev/null
+++ b/lucene/luke/src/java/org/apache/lucene/luke/models/documents/DocValues.java
@@ -0,0 +1,84 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.luke.models.documents;
+
+import org.apache.lucene.index.DocValuesType;
+import org.apache.lucene.util.BytesRef;
+
+import java.util.List;
+import java.util.stream.Collectors;
+
+/**
+ * Holder for doc values.
+ */
+public final class DocValues {
+
+  private final DocValuesType dvType;
+
+  private final List<BytesRef> values;
+
+  private final List<Long> numericValues;
+
+  /**
+   * Returns a new doc values entry representing the specified doc values type and values.
+   * @param dvType - doc values type
+   * @param values - (string) values
+   * @param numericValues numeric values
+   * @return doc values
+   */
+  static DocValues of(DocValuesType dvType, List<BytesRef> values, List<Long> numericValues) {
+    return new DocValues(dvType, values, numericValues);
+  }
+
+  private DocValues(DocValuesType dvType, List<BytesRef> values, List<Long> numericValues) {
+    this.dvType = dvType;
+    this.values = values;
+    this.numericValues = numericValues;
+  }
+
+  /**
+   * Returns the type of this doc values.
+   */
+  public DocValuesType getDvType() {
+    return dvType;
+  }
+
+  /**
+   * Returns the list of (string) values.
+   */
+  public List<BytesRef> getValues() {
+    return values;
+  }
+
+  /**
+   * Returns the list of numeric values.
+   */
+  public List<Long> getNumericValues() {
+    return numericValues;
+  }
+
+  @Override
+  public String toString() {
+    String numValuesStr = numericValues.stream().map(String::valueOf).collect(Collectors.joining(","));
+    return "DocValues{" +
+        "dvType=" + dvType +
+        ", values=" + values +
+        ", numericValues=[" + numValuesStr + "]" +
+        '}';
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dc72c4a0/lucene/luke/src/java/org/apache/lucene/luke/models/documents/DocValuesAdapter.java
----------------------------------------------------------------------
diff --git a/lucene/luke/src/java/org/apache/lucene/luke/models/documents/DocValuesAdapter.java b/lucene/luke/src/java/org/apache/lucene/luke/models/documents/DocValuesAdapter.java
new file mode 100644
index 0000000..df83aee
--- /dev/null
+++ b/lucene/luke/src/java/org/apache/lucene/luke/models/documents/DocValuesAdapter.java
@@ -0,0 +1,168 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.luke.models.documents;
+
+import org.apache.lucene.index.BinaryDocValues;
+import org.apache.lucene.index.DocValuesType;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.NumericDocValues;
+import org.apache.lucene.index.SortedDocValues;
+import org.apache.lucene.index.SortedNumericDocValues;
+import org.apache.lucene.index.SortedSetDocValues;
+import org.apache.lucene.luke.util.IndexUtils;
+import org.apache.lucene.util.BytesRef;
+
+import javax.annotation.Nonnull;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Optional;
+
+/**
+ * An utility class to access to the doc values.
+ */
+final class DocValuesAdapter {
+
+  private final IndexReader reader;
+
+  DocValuesAdapter(@Nonnull  IndexReader reader) {
+    this.reader = reader;
+  }
+
+  /**
+   * Returns the doc values for the specified field in the specified document.
+   * Empty Optional instance is returned if no doc values is available for the field.
+   *
+   * @param docid - document id
+   * @param field - field name
+   * @return doc values, if exists, or empty
+   * @throws IOException
+   */
+  Optional<DocValues> getDocValues(int docid, String field) throws IOException {
+    DocValuesType dvType = IndexUtils.getFieldInfo(reader, field).getDocValuesType();
+
+    switch (dvType) {
+      case BINARY:
+        return createBinaryDocValues(docid, field, DocValuesType.BINARY);
+      case NUMERIC:
+        return createNumericDocValues(docid, field, DocValuesType.NUMERIC);
+      case SORTED_NUMERIC:
+        return createSortedNumericDocValues(docid, field, DocValuesType.SORTED_NUMERIC);
+      case SORTED:
+        return createSortedDocValues(docid, field, DocValuesType.SORTED);
+      case SORTED_SET:
+        return createSortedSetDocValues(docid, field, DocValuesType.SORTED_SET);
+      default:
+        return Optional.empty();
+    }
+  }
+
+  private Optional<DocValues> createBinaryDocValues(int docid, String field, DocValuesType dvType)
+      throws IOException {
+    BinaryDocValues bvalues = IndexUtils.getBinaryDocValues(reader, field);
+
+    if (bvalues.advanceExact(docid)) {
+      DocValues dv = DocValues.of(
+          dvType,
+          Collections.singletonList(BytesRef.deepCopyOf(bvalues.binaryValue())),
+          Collections.emptyList());
+      return Optional.of(dv);
+    }
+
+    return Optional.empty();
+  }
+
+  private Optional<DocValues> createNumericDocValues(int docid, String field, DocValuesType dvType)
+      throws IOException{
+    NumericDocValues nvalues = IndexUtils.getNumericDocValues(reader, field);
+
+    if (nvalues.advanceExact(docid)) {
+      DocValues dv = DocValues.of(
+          dvType,
+          Collections.emptyList(),
+          Collections.singletonList(nvalues.longValue())
+      );
+      return Optional.of(dv);
+    }
+
+    return Optional.empty();
+  }
+
+  private Optional<DocValues> createSortedNumericDocValues(int docid, String field, DocValuesType dvType)
+      throws IOException {
+    SortedNumericDocValues snvalues = IndexUtils.getSortedNumericDocValues(reader, field);
+
+    if (snvalues.advanceExact(docid)) {
+      List<Long> numericValues = new ArrayList<>();
+
+      int dvCount = snvalues.docValueCount();
+      for (int i = 0; i < dvCount; i++) {
+        numericValues.add(snvalues.nextValue());
+      }
+
+      DocValues dv = DocValues.of(
+          dvType,
+          Collections.emptyList(),
+          numericValues
+      );
+      return Optional.of(dv);
+    }
+
+    return Optional.empty();
+  }
+
+  private Optional<DocValues> createSortedDocValues(int docid, String field, DocValuesType dvType)
+      throws IOException {
+    SortedDocValues svalues = IndexUtils.getSortedDocValues(reader, field);
+
+    if (svalues.advanceExact(docid)) {
+      DocValues dv = DocValues.of(
+          dvType,
+          Collections.singletonList(BytesRef.deepCopyOf(svalues.binaryValue())),
+          Collections.emptyList()
+      );
+      return Optional.of(dv);
+    }
+
+    return Optional.empty();
+  }
+
+  private Optional<DocValues> createSortedSetDocValues(int docid, String field, DocValuesType dvType)
+      throws IOException {
+    SortedSetDocValues ssvalues = IndexUtils.getSortedSetDocvalues(reader, field);
+
+    if (ssvalues.advanceExact(docid)) {
+      List<BytesRef> values = new ArrayList<>();
+
+      long ord;
+      while ((ord = ssvalues.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) {
+        values.add(BytesRef.deepCopyOf(ssvalues.lookupOrd(ord)));
+      }
+
+      DocValues dv = DocValues.of(
+          dvType,
+          values,
+          Collections.emptyList()
+      );
+      return Optional.of(dv);
+    }
+
+    return Optional.empty();
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dc72c4a0/lucene/luke/src/java/org/apache/lucene/luke/models/documents/DocumentField.java
----------------------------------------------------------------------
diff --git a/lucene/luke/src/java/org/apache/lucene/luke/models/documents/DocumentField.java b/lucene/luke/src/java/org/apache/lucene/luke/models/documents/DocumentField.java
new file mode 100644
index 0000000..5026513
--- /dev/null
+++ b/lucene/luke/src/java/org/apache/lucene/luke/models/documents/DocumentField.java
@@ -0,0 +1,166 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.luke.models.documents;
+
+import org.apache.lucene.index.DocValuesType;
+import org.apache.lucene.index.FieldInfo;
+import org.apache.lucene.index.IndexOptions;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexableField;
+import org.apache.lucene.index.MultiDocValues;
+import org.apache.lucene.index.NumericDocValues;
+import org.apache.lucene.util.BytesRef;
+
+import javax.annotation.Nonnull;
+import java.io.IOException;
+
+/**
+ * Holder for a document field's information and data.
+ */
+public final class DocumentField {
+
+  // field name
+  private String name;
+
+  // index options
+  private IndexOptions idxOptions;
+  private boolean hasTermVectors;
+  private boolean hasPayloads;
+  private boolean hasNorms;
+  private long norm;
+
+  // stored value
+  private boolean isStored;
+  private String stringValue;
+  private BytesRef binaryValue;
+  private Number numericValue;
+
+  // doc values
+  private DocValuesType dvType;
+
+  // point values
+  private int pointDimensionCount;
+  private int pointNumBytes;
+
+  static DocumentField of(@Nonnull FieldInfo finfo, @Nonnull IndexReader reader, int docId)
+      throws IOException {
+    return of(finfo, null, reader, docId);
+  }
+
+  static DocumentField of(@Nonnull FieldInfo finfo, IndexableField field, @Nonnull IndexReader reader, int docId)
+      throws IOException {
+
+    DocumentField dfield = new DocumentField();
+
+    dfield.name = finfo.name;
+    dfield.idxOptions = finfo.getIndexOptions();
+    dfield.hasTermVectors = finfo.hasVectors();
+    dfield.hasPayloads = finfo.hasPayloads();
+    dfield.hasNorms = finfo.hasNorms();
+
+    if (finfo.hasNorms()) {
+      NumericDocValues norms = MultiDocValues.getNormValues(reader, finfo.name);
+      if (norms.advanceExact(docId)) {
+        dfield.norm = norms.longValue();
+      }
+    }
+
+    dfield.dvType = finfo.getDocValuesType();
+
+    dfield.pointDimensionCount = finfo.getPointDimensionCount();
+    dfield.pointNumBytes = finfo.getPointNumBytes();
+
+    if (field != null) {
+      dfield.isStored = field.fieldType().stored();
+      dfield.stringValue = field.stringValue();
+      if (field.binaryValue() != null) {
+        dfield.binaryValue = BytesRef.deepCopyOf(field.binaryValue());
+      }
+      dfield.numericValue = field.numericValue();
+    }
+
+    return dfield;
+  }
+
+  public String getName() {
+    return name;
+  }
+
+  public IndexOptions getIdxOptions() {
+    return idxOptions;
+  }
+
+  public boolean hasTermVectors() {
+    return hasTermVectors;
+  }
+
+  public boolean hasPayloads() {
+    return hasPayloads;
+  }
+
+  public boolean hasNorms() {
+    return hasNorms;
+  }
+
+  public long getNorm() {
+    return norm;
+  }
+
+  public boolean isStored() {
+    return isStored;
+  }
+
+  public String getStringValue() {
+    return stringValue;
+  }
+
+  public BytesRef getBinaryValue() {
+    return binaryValue;
+  }
+
+  public Number getNumericValue() {
+    return numericValue;
+  }
+
+  public DocValuesType getDvType() {
+    return dvType;
+  }
+
+  public int getPointDimensionCount() {
+    return pointDimensionCount;
+  }
+
+  public int getPointNumBytes() {
+    return pointNumBytes;
+  }
+
+  @Override
+  public String toString() {
+    return "DocumentField{" +
+        "name='" + name + '\'' +
+        ", idxOptions=" + idxOptions +
+        ", hasTermVectors=" + hasTermVectors +
+        ", isStored=" + isStored +
+        ", dvType=" + dvType +
+        ", pointDimensionCount=" + pointDimensionCount +
+        '}';
+  }
+
+  private DocumentField() {
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dc72c4a0/lucene/luke/src/java/org/apache/lucene/luke/models/documents/Documents.java
----------------------------------------------------------------------
diff --git a/lucene/luke/src/java/org/apache/lucene/luke/models/documents/Documents.java b/lucene/luke/src/java/org/apache/lucene/luke/models/documents/Documents.java
new file mode 100644
index 0000000..b597ea5
--- /dev/null
+++ b/lucene/luke/src/java/org/apache/lucene/luke/models/documents/Documents.java
@@ -0,0 +1,143 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.luke.models.documents;
+
+import org.apache.lucene.index.Term;
+import org.apache.lucene.luke.models.LukeException;
+
+import java.util.Collection;
+import java.util.List;
+import java.util.Optional;
+
+/**
+ * A dedicated interface for Luke's Documents tab.
+ */
+public interface Documents {
+
+  /**
+   * Returns one greater than the largest possible document number.
+   */
+  int getMaxDoc();
+
+  /**
+   * Returns field names in this index.
+   */
+  Collection<String> getFieldNames();
+
+  /**
+   * Returns true if the document with the specified <code>docid</code> is not deleted, otherwise false.
+   * @param docid - document id
+   */
+  boolean isLive(int docid);
+
+  /**
+   * Returns the list of field information and field data for the specified document.
+   *
+   * @param docid - document id
+   * @throws LukeException - if an internal error occurs when accessing index
+   */
+  List<DocumentField> getDocumentFields(int docid);
+
+  /**
+   * Returns the current target field name.
+   */
+  String getCurrentField();
+
+  /**
+   * Returns the first indexed term in the specified field.
+   * Empty Optional instance is returned if no terms are available for the field.
+   *
+   * @param field - field name
+   * @throws LukeException - if an internal error occurs when accessing index
+   */
+  Optional<Term> firstTerm(String field);
+
+  /**
+   * Increments the terms iterator and returns the next indexed term for the target field.
+   * Empty Optional instance is returned if the terms iterator has not been positioned yet, or has been exhausted.
+   *
+   * @return next term, if exists, or empty
+   * @throws LukeException - if an internal error occurs when accessing index
+   */
+  Optional<Term> nextTerm();
+
+  /**
+   * Seeks to the specified term, if it exists, or to the next (ceiling) term. Returns the term that was found.
+   * Empty Optional instance is returned if the terms iterator has not been positioned yet, or has been exhausted.
+   *
+   * @param termText - term to seek
+   * @return found term, if exists, or empty
+   * @throws LukeException - if an internal error occurs when accessing index
+   */
+  Optional<Term> seekTerm(String termText);
+
+  /**
+   * Returns the first document id (posting) associated with the current term.
+   * Empty Optional instance is returned if the terms iterator has not been positioned yet, or the postings iterator has been exhausted.
+   *
+   * @return document id, if exists, or empty
+   * @throws LukeException - if an internal error occurs when accessing index
+   */
+  Optional<Integer> firstTermDoc();
+
+  /**
+   * Increments the postings iterator and returns the next document id (posting) for the current term.
+   * Empty Optional instance is returned if the terms iterator has not been positioned yet, or the postings iterator has been exhausted.
+   *
+   * @return document id, if exists, or empty
+   * @throws LukeException - if an internal error occurs when accessing index
+   */
+  Optional<Integer> nextTermDoc();
+
+  /**
+   * Returns the list of the position information for the current posting.
+   *
+   * @throws LukeException - if an internal error occurs when accessing index
+   */
+  List<TermPosting> getTermPositions();
+
+  /**
+   * Returns the document frequency for the current term (the number of documents containing the current term.)
+   * Empty Optional instance is returned if the terms iterator has not been positioned yet.
+   *
+   * @throws LukeException - if an internal error occurs when accessing index
+   */
+  Optional<Integer> getDocFreq();
+
+  /**
+   * Returns the term vectors for the specified field in the specified document.
+   * If no term vector is available for the field, empty list is returned.
+   *
+   * @param docid - document id
+   * @param field - field name
+   * @return list of term vector elements
+   * @throws LukeException - if an internal error occurs when accessing index
+   */
+  List<TermVectorEntry> getTermVectors(int docid, String field);
+
+  /**
+   * Returns the doc values for the specified field in the specified document.
+   * Empty Optional instance is returned if no doc values is available for the field.
+   *
+   * @param docid - document id
+   * @param field - field name
+   * @return doc values, if exists, or empty
+   * @throws LukeException - if an internal error occurs when accessing index
+   */
+  Optional<DocValues> getDocValues(int docid, String field);
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dc72c4a0/lucene/luke/src/java/org/apache/lucene/luke/models/documents/DocumentsFactory.java
----------------------------------------------------------------------
diff --git a/lucene/luke/src/java/org/apache/lucene/luke/models/documents/DocumentsFactory.java b/lucene/luke/src/java/org/apache/lucene/luke/models/documents/DocumentsFactory.java
new file mode 100644
index 0000000..f3def43
--- /dev/null
+++ b/lucene/luke/src/java/org/apache/lucene/luke/models/documents/DocumentsFactory.java
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.luke.models.documents;
+
+import org.apache.lucene.index.IndexReader;
+
+public class DocumentsFactory {
+
+  public Documents newInstance(IndexReader reader) {
+    return new DocumentsImpl(reader);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dc72c4a0/lucene/luke/src/java/org/apache/lucene/luke/models/documents/DocumentsImpl.java
----------------------------------------------------------------------
diff --git a/lucene/luke/src/java/org/apache/lucene/luke/models/documents/DocumentsImpl.java b/lucene/luke/src/java/org/apache/lucene/luke/models/documents/DocumentsImpl.java
new file mode 100644
index 0000000..8db047c
--- /dev/null
+++ b/lucene/luke/src/java/org/apache/lucene/luke/models/documents/DocumentsImpl.java
@@ -0,0 +1,340 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.luke.models.documents;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.index.FieldInfo;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexableField;
+import org.apache.lucene.index.PostingsEnum;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.index.Terms;
+import org.apache.lucene.index.TermsEnum;
+import org.apache.lucene.luke.models.LukeModel;
+import org.apache.lucene.luke.models.LukeException;
+import org.apache.lucene.luke.util.BytesRefUtils;
+import org.apache.lucene.luke.util.IndexUtils;
+import org.apache.lucene.util.BytesRef;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.annotation.Nonnull;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Optional;
+
+public final class DocumentsImpl extends LukeModel implements Documents {
+
+  private static final Logger logger = LoggerFactory.getLogger(DocumentsImpl.class);
+
+  private final TermVectorsAdapter tvAdapter;
+
+  private final DocValuesAdapter dvAdapter;
+
+  private String curField;
+
+  private TermsEnum tenum;
+
+  private PostingsEnum penum;
+
+  /**
+   * Constructs an DocumentsImpl that holds given {@link IndexReader}.
+   * @param reader - the index reader
+   */
+  public DocumentsImpl(@Nonnull IndexReader reader) {
+    super(reader);
+    this.tvAdapter = new TermVectorsAdapter(reader);
+    this.dvAdapter = new DocValuesAdapter(reader);
+  }
+
+  @Override
+  public int getMaxDoc() {
+    return reader.maxDoc();
+  }
+
+  @Override
+  public boolean isLive(int docid) {
+    return liveDocs == null || liveDocs.get(docid);
+  }
+
+  @Override
+  public List<DocumentField> getDocumentFields(int docid) {
+    if (!isLive(docid)) {
+      logger.info("Doc #{} was deleted", docid);
+      return Collections.emptyList();
+    }
+
+    List<DocumentField> res = new ArrayList<>();
+
+    try {
+      Document doc = reader.document(docid);
+
+      for (FieldInfo finfo : IndexUtils.getFieldInfos(reader)) {
+        // iterate all fields for this document
+        IndexableField[] fields = doc.getFields(finfo.name);
+        if (fields.length == 0) {
+          // no stored data is available
+          res.add(DocumentField.of(finfo, reader, docid));
+        } else {
+          for (IndexableField field : fields) {
+            res.add(DocumentField.of(finfo, field, reader, docid));
+          }
+        }
+      }
+
+    } catch (IOException e) {
+      throw new LukeException(String.format("Fields information not available for doc %d.", docid), e);
+    }
+
+    return res;
+  }
+
+  @Override
+  public String getCurrentField() {
+    return curField;
+  }
+
+  @Override
+  public Optional<Term> firstTerm(@Nonnull String field) {
+    try {
+      Terms terms = IndexUtils.getTerms(reader, field);
+
+      if (terms == null) {
+        // no such field?
+        resetCurrentField();
+        resetTermsIterator();
+        logger.warn("Terms not available for field: {}.", field);
+        return Optional.empty();
+      } else {
+        setCurrentField(field);
+        setTermsIterator(terms.iterator());
+
+        if (tenum.next() == null) {
+          // no term available for this field
+          resetTermsIterator();
+          logger.warn("No term available for field: {}.", field);
+          return Optional.empty();
+        } else {
+          return Optional.of(new Term(curField, tenum.term()));
+        }
+      }
+
+    } catch (IOException e) {
+      resetTermsIterator();
+      throw new LukeException(String.format("Terms not available for field: %s.", field), e);
+    } finally {
+      // discard current postings enum
+      resetPostingsIterator();
+    }
+  }
+
+  @Override
+  public Optional<Term> nextTerm() {
+    if (tenum == null) {
+      // terms enum not initialized
+      logger.warn("Terms enum un-positioned.");
+      return Optional.empty();
+    }
+
+    try {
+      if (tenum.next() == null) {
+        // end of the iterator
+        resetTermsIterator();
+        logger.info("Reached the end of the term iterator for field: {}.", curField);
+        return Optional.empty();
+
+      } else {
+        return Optional.of(new Term(curField, tenum.term()));
+      }
+    } catch (IOException e) {
+      resetTermsIterator();
+      throw new LukeException(String.format("Terms not available for field: %s.", curField), e);
+    } finally {
+      // discard current postings enum
+      resetPostingsIterator();
+    }
+  }
+
+  @Override
+  public Optional<Term> seekTerm(@Nonnull String termText) {
+    if (curField == null) {
+      // field is not selected
+      logger.warn("Field not selected.");
+      return Optional.empty();
+    }
+
+    try {
+      Terms terms = IndexUtils.getTerms(reader, curField);
+      setTermsIterator(terms.iterator());
+
+      if (tenum.seekCeil(new BytesRef(termText)) == TermsEnum.SeekStatus.END) {
+        // reached to the end of the iterator
+        resetTermsIterator();
+        logger.info("Reached the end of the term iterator for field: {}.", curField);
+        return Optional.empty();
+      } else {
+        return Optional.of(new Term(curField, tenum.term()));
+      }
+    } catch (IOException e) {
+      resetTermsIterator();
+      throw new LukeException(String.format("Terms not available for field: %s.", curField), e);
+    } finally {
+      // discard current postings enum
+      resetPostingsIterator();
+    }
+  }
+
+  @Override
+  public Optional<Integer> firstTermDoc() {
+    if (tenum == null) {
+      // terms enum is not set
+      logger.warn("Terms enum un-positioned.");
+      return Optional.empty();
+    }
+
+    try {
+      setPostingsIterator(tenum.postings(penum, PostingsEnum.ALL));
+
+      if (penum.nextDoc() == PostingsEnum.NO_MORE_DOCS) {
+        // no docs available for this term
+        resetPostingsIterator();
+        logger.warn("No docs available for term: {} in field: {}.", BytesRefUtils.decode(tenum.term()), curField);
+        return Optional.empty();
+      } else {
+        return Optional.of(penum.docID());
+      }
+    } catch (IOException e) {
+      resetPostingsIterator();
+      throw new LukeException(String.format("Term docs not available for field: %s.", curField), e);
+    }
+  }
+
+  @Override
+  public Optional<Integer> nextTermDoc() {
+    if (penum == null) {
+      // postings enum is not initialized
+      logger.warn("Postings enum un-positioned for field: {}.", curField);
+      return Optional.empty();
+    }
+
+    try {
+      if (penum.nextDoc() == PostingsEnum.NO_MORE_DOCS) {
+        // end of the iterator
+        resetPostingsIterator();
+        logger.info("Reached the end of the postings iterator for term: {} in field: {}", BytesRefUtils.decode(tenum.term()), curField);
+        return Optional.empty();
+      } else {
+        return Optional.of(penum.docID());
+      }
+    } catch (IOException e) {
+      resetPostingsIterator();
+      throw new LukeException(String.format("Term docs not available for field: %s.", curField), e);
+    }
+  }
+
+  @Override
+  public List<TermPosting> getTermPositions() {
+    if (penum == null) {
+      // postings enum is not initialized
+      logger.warn("Postings enum un-positioned for field: {}.", curField);
+      return Collections.emptyList();
+    }
+
+    List<TermPosting> res = new ArrayList<>();
+
+    try {
+      int freq = penum.freq();
+
+      for (int i = 0; i < freq; i++) {
+        int position = penum.nextPosition();
+        if (position < 0) {
+          // no position information available
+          continue;
+        }
+        TermPosting posting = TermPosting.of(position, penum);
+        res.add(posting);
+      }
+
+    } catch (IOException e) {
+      throw new LukeException(String.format("Postings not available for field %s.", curField), e);
+    }
+
+    return res;
+  }
+
+
+  @Override
+  public Optional<Integer> getDocFreq() {
+    if (tenum == null) {
+      // terms enum is not initialized
+      logger.warn("Terms enum un-positioned for field: {}.", curField);
+      return Optional.empty();
+    }
+
+    try {
+      return Optional.of(tenum.docFreq());
+    } catch (IOException e) {
+      throw new LukeException(String.format("Doc frequency not available for field: %s.", curField), e);
+    }
+  }
+
+  @Override
+  public List<TermVectorEntry> getTermVectors(int docid, String field) {
+    try {
+      return tvAdapter.getTermVector(docid, field);
+    } catch (IOException e) {
+      throw new LukeException(String.format("Term vector not available for doc: #%d and field: %s", docid, field), e);
+    }
+  }
+
+  @Override
+  public Optional<DocValues> getDocValues(int docid, String field) {
+    try {
+      return dvAdapter.getDocValues(docid, field);
+    } catch (IOException e) {
+      throw new LukeException(String.format("Doc values not available for doc: #%d and field: %s", docid, field), e);
+    }
+  }
+
+  private void resetCurrentField() {
+    this.curField = null;
+  }
+
+  private void setCurrentField(String field) {
+    this.curField = field;
+  }
+
+  private void resetTermsIterator() {
+    this.tenum = null;
+  }
+
+  private void setTermsIterator(TermsEnum tenum) {
+    this.tenum = tenum;
+  }
+
+  private void resetPostingsIterator() {
+    this.penum = null;
+  }
+
+  private void setPostingsIterator(PostingsEnum penum) {
+    this.penum = penum;
+  }
+
+}


Mime
View raw message