zeppelin-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From m...@apache.org
Subject [14/17] incubator-zeppelin git commit: Rename package/groupId to org.apache and apply rat plugin.
Date Mon, 06 Apr 2015 04:06:05 GMT
http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/main/java/org/apache/zeppelin/spark/dep/RepositoryListener.java
----------------------------------------------------------------------
diff --git a/spark/src/main/java/org/apache/zeppelin/spark/dep/RepositoryListener.java b/spark/src/main/java/org/apache/zeppelin/spark/dep/RepositoryListener.java
new file mode 100644
index 0000000..8a36f37
--- /dev/null
+++ b/spark/src/main/java/org/apache/zeppelin/spark/dep/RepositoryListener.java
@@ -0,0 +1,121 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.spark.dep;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.sonatype.aether.AbstractRepositoryListener;
+import org.sonatype.aether.RepositoryEvent;
+
+/**
+ * Simple listener that print log.
+ *
+ * @author anthonycorbacho
+ *
+ */
+public class RepositoryListener extends AbstractRepositoryListener {
+  Logger logger = LoggerFactory.getLogger(RepositoryListener.class);
+
+  public RepositoryListener() {}
+
+  @Override
+  public void artifactDeployed(RepositoryEvent event) {
+    logger.info("Deployed " + event.getArtifact() + " to " + event.getRepository());
+  }
+
+  @Override
+  public void artifactDeploying(RepositoryEvent event) {
+    logger.info("Deploying " + event.getArtifact() + " to " + event.getRepository());
+  }
+
+  @Override
+  public void artifactDescriptorInvalid(RepositoryEvent event) {
+    logger.info("Invalid artifact descriptor for " + event.getArtifact() + ": "
+                                                   + event.getException().getMessage());
+  }
+
+  @Override
+  public void artifactDescriptorMissing(RepositoryEvent event) {
+    logger.info("Missing artifact descriptor for " + event.getArtifact());
+  }
+
+  @Override
+  public void artifactInstalled(RepositoryEvent event) {
+    logger.info("Installed " + event.getArtifact() + " to " + event.getFile());
+  }
+
+  @Override
+  public void artifactInstalling(RepositoryEvent event) {
+    logger.info("Installing " + event.getArtifact() + " to " + event.getFile());
+  }
+
+  @Override
+  public void artifactResolved(RepositoryEvent event) {
+    logger.info("Resolved artifact " + event.getArtifact() + " from " + event.getRepository());
+  }
+
+  @Override
+  public void artifactDownloading(RepositoryEvent event) {
+    logger.info("Downloading artifact " + event.getArtifact() + " from " + event.getRepository());
+  }
+
+  @Override
+  public void artifactDownloaded(RepositoryEvent event) {
+    logger.info("Downloaded artifact " + event.getArtifact() + " from " + event.getRepository());
+  }
+
+  @Override
+  public void artifactResolving(RepositoryEvent event) {
+    logger.info("Resolving artifact " + event.getArtifact());
+  }
+
+  @Override
+  public void metadataDeployed(RepositoryEvent event) {
+    logger.info("Deployed " + event.getMetadata() + " to " + event.getRepository());
+  }
+
+  @Override
+  public void metadataDeploying(RepositoryEvent event) {
+    logger.info("Deploying " + event.getMetadata() + " to " + event.getRepository());
+  }
+
+  @Override
+  public void metadataInstalled(RepositoryEvent event) {
+    logger.info("Installed " + event.getMetadata() + " to " + event.getFile());
+  }
+
+  @Override
+  public void metadataInstalling(RepositoryEvent event) {
+    logger.info("Installing " + event.getMetadata() + " to " + event.getFile());
+  }
+
+  @Override
+  public void metadataInvalid(RepositoryEvent event) {
+    logger.info("Invalid metadata " + event.getMetadata());
+  }
+
+  @Override
+  public void metadataResolved(RepositoryEvent event) {
+    logger.info("Resolved metadata " + event.getMetadata() + " from " + event.getRepository());
+  }
+
+  @Override
+  public void metadataResolving(RepositoryEvent event) {
+    logger.info("Resolving metadata " + event.getMetadata() + " from " + event.getRepository());
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/main/java/org/apache/zeppelin/spark/dep/RepositorySystemFactory.java
----------------------------------------------------------------------
diff --git a/spark/src/main/java/org/apache/zeppelin/spark/dep/RepositorySystemFactory.java b/spark/src/main/java/org/apache/zeppelin/spark/dep/RepositorySystemFactory.java
new file mode 100644
index 0000000..00fac7f
--- /dev/null
+++ b/spark/src/main/java/org/apache/zeppelin/spark/dep/RepositorySystemFactory.java
@@ -0,0 +1,69 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.spark.dep;
+
+import org.apache.maven.repository.internal.DefaultServiceLocator;
+import org.apache.maven.wagon.Wagon;
+import org.apache.maven.wagon.providers.http.HttpWagon;
+import org.apache.maven.wagon.providers.http.LightweightHttpWagon;
+import org.sonatype.aether.RepositorySystem;
+import org.sonatype.aether.connector.file.FileRepositoryConnectorFactory;
+import org.sonatype.aether.connector.wagon.WagonProvider;
+import org.sonatype.aether.connector.wagon.WagonRepositoryConnectorFactory;
+import org.sonatype.aether.spi.connector.RepositoryConnectorFactory;
+
+/**
+ * Get maven repository instance.
+ *
+ * @author anthonycorbacho
+ *
+ */
+public class RepositorySystemFactory {
+  public static RepositorySystem newRepositorySystem() {
+    DefaultServiceLocator locator = new DefaultServiceLocator();
+    locator.addService(RepositoryConnectorFactory.class, FileRepositoryConnectorFactory.class);
+    locator.addService(RepositoryConnectorFactory.class, WagonRepositoryConnectorFactory.class);
+    locator.setServices(WagonProvider.class, new ManualWagonProvider());
+
+    return locator.getService(RepositorySystem.class);
+  }
+
+  /**
+   * ManualWagonProvider
+   */
+  public static class ManualWagonProvider implements WagonProvider {
+
+    @Override
+    public Wagon lookup(String roleHint) throws Exception {
+      if ("http".equals(roleHint)) {
+        return new LightweightHttpWagon();
+      }
+
+      if ("https".equals(roleHint)) {
+        return new HttpWagon();
+      }
+
+      return null;
+    }
+
+    @Override
+    public void release(Wagon arg0) {
+
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/main/java/org/apache/zeppelin/spark/dep/TransferListener.java
----------------------------------------------------------------------
diff --git a/spark/src/main/java/org/apache/zeppelin/spark/dep/TransferListener.java b/spark/src/main/java/org/apache/zeppelin/spark/dep/TransferListener.java
new file mode 100644
index 0000000..30daec9
--- /dev/null
+++ b/spark/src/main/java/org/apache/zeppelin/spark/dep/TransferListener.java
@@ -0,0 +1,148 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.spark.dep;
+
+import java.io.PrintStream;
+import java.text.DecimalFormat;
+import java.text.DecimalFormatSymbols;
+import java.util.Locale;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.sonatype.aether.transfer.AbstractTransferListener;
+import org.sonatype.aether.transfer.TransferEvent;
+import org.sonatype.aether.transfer.TransferResource;
+
+/**
+ * Simple listener that show deps downloading progress.
+ *
+ * @author anthonycorbacho
+ *
+ */
+public class TransferListener extends AbstractTransferListener {
+  Logger logger = LoggerFactory.getLogger(TransferListener.class);
+  private PrintStream out;
+
+  private Map<TransferResource, Long> downloads = new ConcurrentHashMap<TransferResource, Long>();
+
+  private int lastLength;
+
+  public TransferListener() {}
+
+  @Override
+  public void transferInitiated(TransferEvent event) {
+    String message =
+        event.getRequestType() == TransferEvent.RequestType.PUT ? "Uploading" : "Downloading";
+
+    logger.info(message + ": " + event.getResource().getRepositoryUrl()
+                + event.getResource().getResourceName());
+  }
+
+  @Override
+  public void transferProgressed(TransferEvent event) {
+    TransferResource resource = event.getResource();
+    downloads.put(resource, Long.valueOf(event.getTransferredBytes()));
+
+    StringBuilder buffer = new StringBuilder(64);
+
+    for (Map.Entry<TransferResource, Long> entry : downloads.entrySet()) {
+      long total = entry.getKey().getContentLength();
+      long complete = entry.getValue().longValue();
+
+      buffer.append(getStatus(complete, total)).append("  ");
+    }
+
+    int pad = lastLength - buffer.length();
+    lastLength = buffer.length();
+    pad(buffer, pad);
+    buffer.append('\r');
+
+    logger.info(buffer.toString());
+  }
+
+  private String getStatus(long complete, long total) {
+    if (total >= 1024) {
+      return toKB(complete) + "/" + toKB(total) + " KB ";
+    } else if (total >= 0) {
+      return complete + "/" + total + " B ";
+    } else if (complete >= 1024) {
+      return toKB(complete) + " KB ";
+    } else {
+      return complete + " B ";
+    }
+  }
+
+  private void pad(StringBuilder buffer, int spaces) {
+    String block = "                                        ";
+    while (spaces > 0) {
+      int n = Math.min(spaces, block.length());
+      buffer.append(block, 0, n);
+      spaces -= n;
+    }
+  }
+
+  @Override
+  public void transferSucceeded(TransferEvent event) {
+    transferCompleted(event);
+
+    TransferResource resource = event.getResource();
+    long contentLength = event.getTransferredBytes();
+    if (contentLength >= 0) {
+      String type =
+          (event.getRequestType() == TransferEvent.RequestType.PUT ? "Uploaded" : "Downloaded");
+      String len = contentLength >= 1024 ? toKB(contentLength) + " KB" : contentLength + " B";
+
+      String throughput = "";
+      long duration = System.currentTimeMillis() - resource.getTransferStartTime();
+      if (duration > 0) {
+        DecimalFormat format = new DecimalFormat("0.0", new DecimalFormatSymbols(Locale.ENGLISH));
+        double kbPerSec = (contentLength / 1024.0) / (duration / 1000.0);
+        throughput = " at " + format.format(kbPerSec) + " KB/sec";
+      }
+
+      logger.info(type + ": " + resource.getRepositoryUrl() + resource.getResourceName() + " ("
+          + len + throughput + ")");
+    }
+  }
+
+  @Override
+  public void transferFailed(TransferEvent event) {
+    transferCompleted(event);
+    event.getException().printStackTrace(out);
+  }
+
+  private void transferCompleted(TransferEvent event) {
+    downloads.remove(event.getResource());
+    StringBuilder buffer = new StringBuilder(64);
+    pad(buffer, lastLength);
+    buffer.append('\r');
+    logger.info(buffer.toString());
+  }
+
+  @Override
+  public void transferCorrupted(TransferEvent event) {
+    event.getException().printStackTrace(out);
+  }
+
+  protected long toKB(long bytes) {
+    return (bytes + 1023) / 1024;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/main/resources/python/zeppelin_pyspark.py
----------------------------------------------------------------------
diff --git a/spark/src/main/resources/python/zeppelin_pyspark.py b/spark/src/main/resources/python/zeppelin_pyspark.py
index 92baf58..5b70d85 100644
--- a/spark/src/main/resources/python/zeppelin_pyspark.py
+++ b/spark/src/main/resources/python/zeppelin_pyspark.py
@@ -1,3 +1,20 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
 import sys, getopt
 
 from py4j.java_gateway import java_import, JavaGateway, GatewayClient

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/main/scala/com/nflabs/zeppelin/spark/ReflectSparkILoop.scala
----------------------------------------------------------------------
diff --git a/spark/src/main/scala/com/nflabs/zeppelin/spark/ReflectSparkILoop.scala b/spark/src/main/scala/com/nflabs/zeppelin/spark/ReflectSparkILoop.scala
deleted file mode 100644
index 8c63c10..0000000
--- a/spark/src/main/scala/com/nflabs/zeppelin/spark/ReflectSparkILoop.scala
+++ /dev/null
@@ -1,54 +0,0 @@
-package com.nflabs.zeppelin.spark
-
-import scala.tools.nsc._
-import scala.tools.nsc.interpreter._
-import org.apache.spark.repl.SparkILoop
-import org.apache.spark.repl.SparkIMain
-import org.apache.spark.util.Utils
-import java.io.BufferedReader
-import scala.tools.nsc.util.{ ClassPath, Exceptional, stringFromWriter, stringFromStream }
-
-
-class ReflectSparkILoop(in0: Option[BufferedReader], override protected val out: JPrintWriter, override val master: Option[String])
-	  extends SparkILoop(in0, out, master) {
-  def this(in0: BufferedReader, out: JPrintWriter, master: String) = this(Some(in0), out, Some(master))
-  def this(in0: BufferedReader, out: JPrintWriter) = this(Some(in0), out, None)
-  def this() = this(None, new JPrintWriter(Console.out, true), None)  
-  
-
-  class ReflectSparkILoopInterpreter extends ReflectSparkIMain(settings, out) {
-    outer =>
-
-    override lazy val formatting = new Formatting {
-      def prompt = ReflectSparkILoop.this.prompt
-    }
-    override protected def parentClassLoader = SparkHelper.explicitParentLoader(settings).getOrElse(classOf[SparkILoop].getClassLoader)
-  }
-
-  /** Create a new interpreter. */
-  override def createInterpreter() {
-    require(settings != null)
-
-    if (addedClasspath != "") settings.classpath.append(addedClasspath)
-    // work around for Scala bug
-    val totalClassPath = SparkILoop.getAddedJars.foldLeft(
-      settings.classpath.value)((l, r) => ClassPath.join(l, r))
-    this.settings.classpath.value = totalClassPath
-
-    intp = new ReflectSparkILoopInterpreter
-  }
-  
-  /** Create a new interpreter. */
-  def createReflectInterpreter(settings : Settings) : SparkIMain = {
-    require(settings != null)
-
-    if (addedClasspath != "") settings.classpath.append(addedClasspath)
-    // work around for Scala bug
-    val totalClassPath = SparkILoop.getAddedJars.foldLeft(
-      settings.classpath.value)((l, r) => ClassPath.join(l, r))
-    this.settings.classpath.value = totalClassPath
-
-    intp = new ReflectSparkILoopInterpreter
-    intp
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/main/scala/com/nflabs/zeppelin/spark/ReflectSparkIMain.scala
----------------------------------------------------------------------
diff --git a/spark/src/main/scala/com/nflabs/zeppelin/spark/ReflectSparkIMain.scala b/spark/src/main/scala/com/nflabs/zeppelin/spark/ReflectSparkIMain.scala
deleted file mode 100644
index 0ad3696..0000000
--- a/spark/src/main/scala/com/nflabs/zeppelin/spark/ReflectSparkIMain.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-package com.nflabs.zeppelin.spark
-
-import scala.tools.nsc._
-import scala.tools.nsc.interpreter._
-import reporters._
-import org.apache.spark.repl.SparkIMain
-import scala.tools.reflect._
-class ReflectSparkIMain(initialSettings: Settings, override val out: JPrintWriter) extends SparkIMain(initialSettings, out) {
-	
-  override def newCompiler(settings: Settings, reporter: Reporter): ReplGlobal = {
-    settings.outputDirs setSingleOutput virtualDirectory
-    settings.exposeEmptyPackage.value = true
-    new ReflectGlobal(settings, reporter, classLoader) with ReplGlobal {
-      override def toString: String = "<global>"
-    }
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/main/scala/com/nflabs/zeppelin/spark/SparkRepl.scala
----------------------------------------------------------------------
diff --git a/spark/src/main/scala/com/nflabs/zeppelin/spark/SparkRepl.scala b/spark/src/main/scala/com/nflabs/zeppelin/spark/SparkRepl.scala
deleted file mode 100644
index 9f1d3b4..0000000
--- a/spark/src/main/scala/com/nflabs/zeppelin/spark/SparkRepl.scala
+++ /dev/null
@@ -1,72 +0,0 @@
-package com.nflabs.zeppelin.spark
-
-import com.nflabs.zeppelin.repl.Repl
-import com.nflabs.zeppelin.repl.ReplResult
-import com.nflabs.zeppelin.repl.ReplResult.Code
-import com.nflabs.zeppelin.repl.Repl.FormType
-import java.util.Properties
-import scala.tools.nsc.{Interpreter, Settings}
-import java.io.ByteArrayOutputStream
-import java.io.PrintStream
-import java.io.PrintWriter
-import org.apache.spark.repl.SparkILoop
-import org.apache.spark.repl.SparkIMain;
-import org.apache.spark.SparkContext
-
-class SparkRepl(properties: Properties) extends Repl(properties) {
-  
-  val out = new ByteArrayOutputStream(); 
-  var interpreter : ReflectSparkILoop = _
-  var intp : SparkIMain = _
-  
-  protected def getSparkContext() : SparkContext = {
-    null
-  }
-  
-  override def initialize() = {
-    val cl = Thread.currentThread().getContextClassLoader();
-    
-    val settings = new Settings();
-    settings.usejavacp.value = true
-
-    val printStream = new PrintStream(out)
-    interpreter = new ReflectSparkILoop(null, new PrintWriter(out))
-    interpreter.settings = settings;
-    intp = interpreter.createReflectInterpreter(settings);
-    interpreter.intp = intp
-    intp.initializeSynchronous
-    
-    
-  }
-  override def destroy() = {
-	intp.close()	
-  }
-  override def getValue(name : String) : Object = {
-
-    return null;
-  }
-  override def interpret(st : String) : ReplResult = {
-    return null;
-  }
-	
-  override def cancel() = {
-	  
-  }
-  override def bindValue(name : String, o : Object) = {
-	  
-  }
-  override def getFormType() : FormType = {
-    return FormType.NATIVE;
-  }
-  
-  def getResultCode(r : scala.tools.nsc.interpreter.Results.Result) : Code = {
-
-    if (r.isInstanceOf[instanceof scala.tools.nsc.interpreter.Results.Success$) {
-      return Code.SUCCESS;
-    } else if (r instanceof scala.tools.nsc.interpreter.Results.Incomplete$) {
-      return Code.INCOMPLETE;
-    } else {
-      return Code.ERROR;
-	}
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/test/java/com/nflabs/zeppelin/spark/DepInterpreterTest.java
----------------------------------------------------------------------
diff --git a/spark/src/test/java/com/nflabs/zeppelin/spark/DepInterpreterTest.java b/spark/src/test/java/com/nflabs/zeppelin/spark/DepInterpreterTest.java
deleted file mode 100644
index 7fe8aae..0000000
--- a/spark/src/test/java/com/nflabs/zeppelin/spark/DepInterpreterTest.java
+++ /dev/null
@@ -1,73 +0,0 @@
-package com.nflabs.zeppelin.spark;
-
-import static org.junit.Assert.assertEquals;
-
-import java.io.File;
-import java.util.HashMap;
-import java.util.Properties;
-
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-import com.nflabs.zeppelin.display.GUI;
-import com.nflabs.zeppelin.interpreter.InterpreterContext;
-import com.nflabs.zeppelin.interpreter.InterpreterGroup;
-import com.nflabs.zeppelin.interpreter.InterpreterResult;
-import com.nflabs.zeppelin.interpreter.InterpreterResult.Code;
-
-public class DepInterpreterTest {
-  private DepInterpreter dep;
-  private InterpreterContext context;
-  private File tmpDir;
-  private SparkInterpreter repl;
-
-  @Before
-  public void setUp() throws Exception {
-    tmpDir = new File(System.getProperty("java.io.tmpdir") + "/ZeppelinLTest_" + System.currentTimeMillis());
-    System.setProperty("zeppelin.dep.localrepo", tmpDir.getAbsolutePath() + "/local-repo");
-
-    tmpDir.mkdirs();
-
-    Properties p = new Properties();
-
-    dep = new DepInterpreter(p);
-    dep.open();
-
-    InterpreterGroup intpGroup = new InterpreterGroup();
-    intpGroup.add(new SparkInterpreter(p));
-    intpGroup.add(dep);
-    dep.setInterpreterGroup(intpGroup);
-
-    context = new InterpreterContext("id", "title", "text", new HashMap<String, Object>(), new GUI());
-  }
-
-  @After
-  public void tearDown() throws Exception {
-    dep.close();
-    delete(tmpDir);
-  }
-
-  private void delete(File file) {
-    if (file.isFile()) file.delete();
-    else if (file.isDirectory()) {
-      File[] files = file.listFiles();
-      if (files != null && files.length > 0) {
-        for (File f : files) {
-          delete(f);
-        }
-      }
-      file.delete();
-    }
-  }
-
-  @Test
-  public void testDefault() {
-    dep.getDependencyContext().reset();
-    InterpreterResult ret = dep.interpret("z.load(\"org.apache.commons:commons-csv:1.1\")", context);
-    assertEquals(Code.SUCCESS, ret.code());
-
-    assertEquals(1, dep.getDependencyContext().getFiles().size());
-    assertEquals(1, dep.getDependencyContext().getFilesDist().size());
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/test/java/com/nflabs/zeppelin/spark/SparkInterpreterTest.java
----------------------------------------------------------------------
diff --git a/spark/src/test/java/com/nflabs/zeppelin/spark/SparkInterpreterTest.java b/spark/src/test/java/com/nflabs/zeppelin/spark/SparkInterpreterTest.java
deleted file mode 100644
index ae9fb73..0000000
--- a/spark/src/test/java/com/nflabs/zeppelin/spark/SparkInterpreterTest.java
+++ /dev/null
@@ -1,118 +0,0 @@
-package com.nflabs.zeppelin.spark;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
-import java.io.File;
-import java.util.HashMap;
-import java.util.Properties;
-
-import org.junit.After;
-import org.junit.Before;
-import org.junit.FixMethodOrder;
-import org.junit.Test;
-import org.junit.runners.MethodSorters;
-
-import com.nflabs.zeppelin.display.GUI;
-import com.nflabs.zeppelin.interpreter.InterpreterContext;
-import com.nflabs.zeppelin.interpreter.InterpreterResult;
-import com.nflabs.zeppelin.interpreter.InterpreterResult.Code;
-
-@FixMethodOrder(MethodSorters.NAME_ASCENDING)
-public class SparkInterpreterTest {
-  public static SparkInterpreter repl;
-  private InterpreterContext context;
-  private File tmpDir;
-
-  @Before
-  public void setUp() throws Exception {
-    tmpDir = new File(System.getProperty("java.io.tmpdir") + "/ZeppelinLTest_" + System.currentTimeMillis());
-    System.setProperty("zeppelin.dep.localrepo", tmpDir.getAbsolutePath() + "/local-repo");
-
-    tmpDir.mkdirs();
-
-	  if (repl == null) {
-		  Properties p = new Properties();
-
-	    repl = new SparkInterpreter(p);
-  	  repl.open();
-	  }
-
-    context = new InterpreterContext("id", "title", "text", new HashMap<String, Object>(), new GUI());
-	}
-
-  @After
-  public void tearDown() throws Exception {
-    delete(tmpDir);
-  }
-
-  private void delete(File file) {
-    if (file.isFile()) file.delete();
-    else if (file.isDirectory()) {
-      File[] files = file.listFiles();
-      if (files != null && files.length > 0) {
-        for (File f : files) {
-          delete(f);
-        }
-      }
-      file.delete();
-    }
-  }
-
-	@Test
-	public void testBasicIntp() {
-		assertEquals(InterpreterResult.Code.SUCCESS, repl.interpret("val a = 1\nval b = 2", context).code());
-
-		// when interpret incomplete expression
-		InterpreterResult incomplete = repl.interpret("val a = \"\"\"", context);
-		assertEquals(InterpreterResult.Code.INCOMPLETE, incomplete.code());
-		assertTrue(incomplete.message().length()>0); // expecting some error message
-		/*
-		assertEquals(1, repl.getValue("a"));
-		assertEquals(2, repl.getValue("b"));
-		repl.interpret("val ver = sc.version");
-		assertNotNull(repl.getValue("ver"));
-		assertEquals("HELLO\n", repl.interpret("println(\"HELLO\")").message());
-		*/
-	}
-
-	@Test
-	public void testEndWithComment() {
-		assertEquals(InterpreterResult.Code.SUCCESS, repl.interpret("val c=1\n//comment", context).code());
-	}
-
-	@Test
-	public void testSparkSql(){
-		repl.interpret("case class Person(name:String, age:Int)\n", context);
-		repl.interpret("val people = sc.parallelize(Seq(Person(\"moon\", 33), Person(\"jobs\", 51), Person(\"gates\", 51), Person(\"park\", 34)))\n", context);
-		assertEquals(Code.SUCCESS, repl.interpret("people.take(3)", context).code());
-
-		// create new interpreter
-		Properties p = new Properties();
-		SparkInterpreter repl2 = new SparkInterpreter(p);
-		repl2.open();
-
-		repl.interpret("case class Man(name:String, age:Int)", context);
-		repl.interpret("val man = sc.parallelize(Seq(Man(\"moon\", 33), Man(\"jobs\", 51), Man(\"gates\", 51), Man(\"park\", 34)))", context);
-		assertEquals(Code.SUCCESS, repl.interpret("man.take(3)", context).code());
-		repl2.getSparkContext().stop();
-	}
-
-	@Test
-	public void testReferencingUndefinedVal(){
-		InterpreterResult result = repl.interpret("def category(min: Int) = {" +
-				       "    if (0 <= value) \"error\"" +
-                       "}", context);
-		assertEquals(Code.ERROR, result.code());
-	}
-
-  @Test
-  public void testZContextDependencyLoading() {
-    // try to import library does not exist on classpath. it'll fail
-    assertEquals(InterpreterResult.Code.ERROR, repl.interpret("import org.apache.commons.csv.CSVFormat", context).code());
-
-    // load library from maven repository and try to import again
-    repl.interpret("z.load(\"org.apache.commons:commons-csv:1.1\")", context);
-    assertEquals(InterpreterResult.Code.SUCCESS, repl.interpret("import org.apache.commons.csv.CSVFormat", context).code());
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/test/java/com/nflabs/zeppelin/spark/SparkSqlInterpreterTest.java
----------------------------------------------------------------------
diff --git a/spark/src/test/java/com/nflabs/zeppelin/spark/SparkSqlInterpreterTest.java b/spark/src/test/java/com/nflabs/zeppelin/spark/SparkSqlInterpreterTest.java
deleted file mode 100644
index 9eba227..0000000
--- a/spark/src/test/java/com/nflabs/zeppelin/spark/SparkSqlInterpreterTest.java
+++ /dev/null
@@ -1,94 +0,0 @@
-package com.nflabs.zeppelin.spark;
-
-import static org.junit.Assert.assertEquals;
-
-import java.util.HashMap;
-import java.util.Properties;
-
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-import com.nflabs.zeppelin.display.GUI;
-import com.nflabs.zeppelin.interpreter.InterpreterContext;
-import com.nflabs.zeppelin.interpreter.InterpreterGroup;
-import com.nflabs.zeppelin.interpreter.InterpreterResult;
-import com.nflabs.zeppelin.interpreter.InterpreterResult.Type;
-
-public class SparkSqlInterpreterTest {
-
-	private SparkSqlInterpreter sql;
-  private SparkInterpreter repl;
-  private InterpreterContext context;
-
-	@Before
-	public void setUp() throws Exception {
-		Properties p = new Properties();
-
-		if (repl == null) {
-
-		  if (SparkInterpreterTest.repl == null) {
-		    repl = new SparkInterpreter(p);
-		    repl.open();
-		    SparkInterpreterTest.repl = repl;
-		  } else {
-		    repl = SparkInterpreterTest.repl;
-		  }
-
-  		sql = new SparkSqlInterpreter(p);
-
-  		InterpreterGroup intpGroup = new InterpreterGroup();
-		  intpGroup.add(repl);
-		  intpGroup.add(sql);
-		  sql.setInterpreterGroup(intpGroup);
-		  sql.open();
-		}
-		context = new InterpreterContext("id", "title", "text", new HashMap<String, Object>(), new GUI());
-	}
-
-	@After
-	public void tearDown() throws Exception {
-	}
-
-	@Test
-	public void test() {
-		repl.interpret("case class Test(name:String, age:Int)", context);
-		repl.interpret("val test = sc.parallelize(Seq(Test(\"moon\", 33), Test(\"jobs\", 51), Test(\"gates\", 51), Test(\"park\", 34)))", context);
-		repl.interpret("test.registerAsTable(\"test\")", context);
-
-		InterpreterResult ret = sql.interpret("select name, age from test where age < 40", context);
-		assertEquals(InterpreterResult.Code.SUCCESS, ret.code());
-		assertEquals(Type.TABLE, ret.type());
-		assertEquals("name\tage\nmoon\t33\npark\t34\n", ret.message());
-
-		assertEquals(InterpreterResult.Code.ERROR, sql.interpret("select wrong syntax", context).code());
-		assertEquals(InterpreterResult.Code.SUCCESS, sql.interpret("select case when name==\"aa\" then name else name end from people", context).code());
-	}
-
-	@Test
-	public void testStruct(){
-		repl.interpret("case class Person(name:String, age:Int)", context);
-		repl.interpret("case class People(group:String, person:Person)", context);
-		repl.interpret("val gr = sc.parallelize(Seq(People(\"g1\", Person(\"moon\",33)), People(\"g2\", Person(\"sun\",11))))", context);
-		repl.interpret("gr.registerAsTable(\"gr\")", context);
-		InterpreterResult ret = sql.interpret("select * from gr", context);
-		assertEquals(InterpreterResult.Code.SUCCESS, ret.code());
-	}
-
-	@Test
-	public void test_null_value_in_row() {
-		repl.interpret("import org.apache.spark.sql._", context);
-		repl.interpret("def toInt(s:String): Any = {try { s.trim().toInt} catch {case e:Exception => null}}", context);
-		repl.interpret("val schema = StructType(Seq(StructField(\"name\", StringType, false),StructField(\"age\" , IntegerType, true),StructField(\"other\" , StringType, false)))", context);
-		repl.interpret("val csv = sc.parallelize(Seq((\"jobs, 51, apple\"), (\"gates, , microsoft\")))", context);
-		repl.interpret("val raw = csv.map(_.split(\",\")).map(p => Row(p(0),toInt(p(1)),p(2)))", context);
-		repl.interpret("val people = z.sqlContext.applySchema(raw, schema)", context);
-		repl.interpret("people.registerTempTable(\"people\")", context);
-
-		InterpreterResult ret = sql.interpret("select name, age from people where name = 'gates'", context);
-		System.err.println("RET=" + ret.message());
-		assertEquals(InterpreterResult.Code.SUCCESS, ret.code());
-		assertEquals(Type.TABLE, ret.type());
-		assertEquals("name\tage\ngates\tnull\n", ret.message());
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/test/java/com/nflabs/zeppelin/spark/dep/DependencyResolverTest.java
----------------------------------------------------------------------
diff --git a/spark/src/test/java/com/nflabs/zeppelin/spark/dep/DependencyResolverTest.java b/spark/src/test/java/com/nflabs/zeppelin/spark/dep/DependencyResolverTest.java
deleted file mode 100644
index 804c31e..0000000
--- a/spark/src/test/java/com/nflabs/zeppelin/spark/dep/DependencyResolverTest.java
+++ /dev/null
@@ -1,34 +0,0 @@
-package com.nflabs.zeppelin.spark.dep;
-
-import static org.junit.Assert.assertEquals;
-
-import org.junit.Test;
-
-public class DependencyResolverTest {
-
-  @Test
-  public void testInferScalaVersion() {
-    String [] version = scala.util.Properties.versionNumberString().split("[.]");
-    String scalaVersion = version[0] + "." + version[1];
-
-    assertEquals("groupId:artifactId:version",
-        DependencyResolver.inferScalaVersion("groupId:artifactId:version"));
-    assertEquals("groupId:artifactId_" + scalaVersion + ":version",
-        DependencyResolver.inferScalaVersion("groupId::artifactId:version"));
-    assertEquals("groupId:artifactId:version::test",
-        DependencyResolver.inferScalaVersion("groupId:artifactId:version::test"));
-    assertEquals("*",
-        DependencyResolver.inferScalaVersion("*"));
-    assertEquals("groupId:*",
-        DependencyResolver.inferScalaVersion("groupId:*"));
-    assertEquals("groupId:artifactId*",
-        DependencyResolver.inferScalaVersion("groupId:artifactId*"));
-    assertEquals("groupId:artifactId_" + scalaVersion,
-        DependencyResolver.inferScalaVersion("groupId::artifactId"));
-    assertEquals("groupId:artifactId_" + scalaVersion + "*",
-        DependencyResolver.inferScalaVersion("groupId::artifactId*"));
-    assertEquals("groupId:artifactId_" + scalaVersion + ":*",
-        DependencyResolver.inferScalaVersion("groupId::artifactId:*"));
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/test/java/org/apache/zeppelin/spark/DepInterpreterTest.java
----------------------------------------------------------------------
diff --git a/spark/src/test/java/org/apache/zeppelin/spark/DepInterpreterTest.java b/spark/src/test/java/org/apache/zeppelin/spark/DepInterpreterTest.java
new file mode 100644
index 0000000..8d24cc4
--- /dev/null
+++ b/spark/src/test/java/org/apache/zeppelin/spark/DepInterpreterTest.java
@@ -0,0 +1,91 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.spark;
+
+import static org.junit.Assert.assertEquals;
+
+import java.io.File;
+import java.util.HashMap;
+import java.util.Properties;
+
+import org.apache.zeppelin.display.GUI;
+import org.apache.zeppelin.interpreter.InterpreterContext;
+import org.apache.zeppelin.interpreter.InterpreterGroup;
+import org.apache.zeppelin.interpreter.InterpreterResult;
+import org.apache.zeppelin.interpreter.InterpreterResult.Code;
+import org.apache.zeppelin.spark.DepInterpreter;
+import org.apache.zeppelin.spark.SparkInterpreter;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class DepInterpreterTest {
+  private DepInterpreter dep;
+  private InterpreterContext context;
+  private File tmpDir;
+  private SparkInterpreter repl;
+
+  @Before
+  public void setUp() throws Exception {
+    tmpDir = new File(System.getProperty("java.io.tmpdir") + "/ZeppelinLTest_" + System.currentTimeMillis());
+    System.setProperty("zeppelin.dep.localrepo", tmpDir.getAbsolutePath() + "/local-repo");
+
+    tmpDir.mkdirs();
+
+    Properties p = new Properties();
+
+    dep = new DepInterpreter(p);
+    dep.open();
+
+    InterpreterGroup intpGroup = new InterpreterGroup();
+    intpGroup.add(new SparkInterpreter(p));
+    intpGroup.add(dep);
+    dep.setInterpreterGroup(intpGroup);
+
+    context = new InterpreterContext("id", "title", "text", new HashMap<String, Object>(), new GUI());
+  }
+
+  @After
+  public void tearDown() throws Exception {
+    dep.close();
+    delete(tmpDir);
+  }
+
+  private void delete(File file) {
+    if (file.isFile()) file.delete();
+    else if (file.isDirectory()) {
+      File[] files = file.listFiles();
+      if (files != null && files.length > 0) {
+        for (File f : files) {
+          delete(f);
+        }
+      }
+      file.delete();
+    }
+  }
+
+  @Test
+  public void testDefault() {
+    dep.getDependencyContext().reset();
+    InterpreterResult ret = dep.interpret("z.load(\"org.apache.commons:commons-csv:1.1\")", context);
+    assertEquals(Code.SUCCESS, ret.code());
+
+    assertEquals(1, dep.getDependencyContext().getFiles().size());
+    assertEquals(1, dep.getDependencyContext().getFilesDist().size());
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java
----------------------------------------------------------------------
diff --git a/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java b/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java
new file mode 100644
index 0000000..20f7fa4
--- /dev/null
+++ b/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java
@@ -0,0 +1,135 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.spark;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.io.File;
+import java.util.HashMap;
+import java.util.Properties;
+
+import org.apache.zeppelin.display.GUI;
+import org.apache.zeppelin.interpreter.InterpreterContext;
+import org.apache.zeppelin.interpreter.InterpreterResult;
+import org.apache.zeppelin.interpreter.InterpreterResult.Code;
+import org.apache.zeppelin.spark.SparkInterpreter;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.FixMethodOrder;
+import org.junit.Test;
+import org.junit.runners.MethodSorters;
+
+@FixMethodOrder(MethodSorters.NAME_ASCENDING)
+public class SparkInterpreterTest {
+  public static SparkInterpreter repl;
+  private InterpreterContext context;
+  private File tmpDir;
+
+  @Before
+  public void setUp() throws Exception {
+    tmpDir = new File(System.getProperty("java.io.tmpdir") + "/ZeppelinLTest_" + System.currentTimeMillis());
+    System.setProperty("zeppelin.dep.localrepo", tmpDir.getAbsolutePath() + "/local-repo");
+
+    tmpDir.mkdirs();
+
+	  if (repl == null) {
+		  Properties p = new Properties();
+
+	    repl = new SparkInterpreter(p);
+  	  repl.open();
+	  }
+
+    context = new InterpreterContext("id", "title", "text", new HashMap<String, Object>(), new GUI());
+	}
+
+  @After
+  public void tearDown() throws Exception {
+    delete(tmpDir);
+  }
+
+  private void delete(File file) {
+    if (file.isFile()) file.delete();
+    else if (file.isDirectory()) {
+      File[] files = file.listFiles();
+      if (files != null && files.length > 0) {
+        for (File f : files) {
+          delete(f);
+        }
+      }
+      file.delete();
+    }
+  }
+
+	@Test
+	public void testBasicIntp() {
+		assertEquals(InterpreterResult.Code.SUCCESS, repl.interpret("val a = 1\nval b = 2", context).code());
+
+		// when interpret incomplete expression
+		InterpreterResult incomplete = repl.interpret("val a = \"\"\"", context);
+		assertEquals(InterpreterResult.Code.INCOMPLETE, incomplete.code());
+		assertTrue(incomplete.message().length()>0); // expecting some error message
+		/*
+		assertEquals(1, repl.getValue("a"));
+		assertEquals(2, repl.getValue("b"));
+		repl.interpret("val ver = sc.version");
+		assertNotNull(repl.getValue("ver"));
+		assertEquals("HELLO\n", repl.interpret("println(\"HELLO\")").message());
+		*/
+	}
+
+	@Test
+	public void testEndWithComment() {
+		assertEquals(InterpreterResult.Code.SUCCESS, repl.interpret("val c=1\n//comment", context).code());
+	}
+
+	@Test
+	public void testSparkSql(){
+		repl.interpret("case class Person(name:String, age:Int)\n", context);
+		repl.interpret("val people = sc.parallelize(Seq(Person(\"moon\", 33), Person(\"jobs\", 51), Person(\"gates\", 51), Person(\"park\", 34)))\n", context);
+		assertEquals(Code.SUCCESS, repl.interpret("people.take(3)", context).code());
+
+		// create new interpreter
+		Properties p = new Properties();
+		SparkInterpreter repl2 = new SparkInterpreter(p);
+		repl2.open();
+
+		repl.interpret("case class Man(name:String, age:Int)", context);
+		repl.interpret("val man = sc.parallelize(Seq(Man(\"moon\", 33), Man(\"jobs\", 51), Man(\"gates\", 51), Man(\"park\", 34)))", context);
+		assertEquals(Code.SUCCESS, repl.interpret("man.take(3)", context).code());
+		repl2.getSparkContext().stop();
+	}
+
+	@Test
+	public void testReferencingUndefinedVal(){
+		InterpreterResult result = repl.interpret("def category(min: Int) = {" +
+				       "    if (0 <= value) \"error\"" +
+                       "}", context);
+		assertEquals(Code.ERROR, result.code());
+	}
+
+  @Test
+  public void testZContextDependencyLoading() {
+    // try to import library does not exist on classpath. it'll fail
+    assertEquals(InterpreterResult.Code.ERROR, repl.interpret("import org.apache.commons.csv.CSVFormat", context).code());
+
+    // load library from maven repository and try to import again
+    repl.interpret("z.load(\"org.apache.commons:commons-csv:1.1\")", context);
+    assertEquals(InterpreterResult.Code.SUCCESS, repl.interpret("import org.apache.commons.csv.CSVFormat", context).code());
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/test/java/org/apache/zeppelin/spark/SparkSqlInterpreterTest.java
----------------------------------------------------------------------
diff --git a/spark/src/test/java/org/apache/zeppelin/spark/SparkSqlInterpreterTest.java b/spark/src/test/java/org/apache/zeppelin/spark/SparkSqlInterpreterTest.java
new file mode 100644
index 0000000..71f088d
--- /dev/null
+++ b/spark/src/test/java/org/apache/zeppelin/spark/SparkSqlInterpreterTest.java
@@ -0,0 +1,112 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.spark;
+
+import static org.junit.Assert.assertEquals;
+
+import java.util.HashMap;
+import java.util.Properties;
+
+import org.apache.zeppelin.display.GUI;
+import org.apache.zeppelin.interpreter.InterpreterContext;
+import org.apache.zeppelin.interpreter.InterpreterGroup;
+import org.apache.zeppelin.interpreter.InterpreterResult;
+import org.apache.zeppelin.interpreter.InterpreterResult.Type;
+import org.apache.zeppelin.spark.SparkInterpreter;
+import org.apache.zeppelin.spark.SparkSqlInterpreter;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class SparkSqlInterpreterTest {
+
+	private SparkSqlInterpreter sql;
+  private SparkInterpreter repl;
+  private InterpreterContext context;
+
+	@Before
+	public void setUp() throws Exception {
+		Properties p = new Properties();
+
+		if (repl == null) {
+
+		  if (SparkInterpreterTest.repl == null) {
+		    repl = new SparkInterpreter(p);
+		    repl.open();
+		    SparkInterpreterTest.repl = repl;
+		  } else {
+		    repl = SparkInterpreterTest.repl;
+		  }
+
+  		sql = new SparkSqlInterpreter(p);
+
+  		InterpreterGroup intpGroup = new InterpreterGroup();
+		  intpGroup.add(repl);
+		  intpGroup.add(sql);
+		  sql.setInterpreterGroup(intpGroup);
+		  sql.open();
+		}
+		context = new InterpreterContext("id", "title", "text", new HashMap<String, Object>(), new GUI());
+	}
+
+	@After
+	public void tearDown() throws Exception {
+	}
+
+	@Test
+	public void test() {
+		repl.interpret("case class Test(name:String, age:Int)", context);
+		repl.interpret("val test = sc.parallelize(Seq(Test(\"moon\", 33), Test(\"jobs\", 51), Test(\"gates\", 51), Test(\"park\", 34)))", context);
+		repl.interpret("test.registerAsTable(\"test\")", context);
+
+		InterpreterResult ret = sql.interpret("select name, age from test where age < 40", context);
+		assertEquals(InterpreterResult.Code.SUCCESS, ret.code());
+		assertEquals(Type.TABLE, ret.type());
+		assertEquals("name\tage\nmoon\t33\npark\t34\n", ret.message());
+
+		assertEquals(InterpreterResult.Code.ERROR, sql.interpret("select wrong syntax", context).code());
+		assertEquals(InterpreterResult.Code.SUCCESS, sql.interpret("select case when name==\"aa\" then name else name end from people", context).code());
+	}
+
+	@Test
+	public void testStruct(){
+		repl.interpret("case class Person(name:String, age:Int)", context);
+		repl.interpret("case class People(group:String, person:Person)", context);
+		repl.interpret("val gr = sc.parallelize(Seq(People(\"g1\", Person(\"moon\",33)), People(\"g2\", Person(\"sun\",11))))", context);
+		repl.interpret("gr.registerAsTable(\"gr\")", context);
+		InterpreterResult ret = sql.interpret("select * from gr", context);
+		assertEquals(InterpreterResult.Code.SUCCESS, ret.code());
+	}
+
+	@Test
+	public void test_null_value_in_row() {
+		repl.interpret("import org.apache.spark.sql._", context);
+		repl.interpret("def toInt(s:String): Any = {try { s.trim().toInt} catch {case e:Exception => null}}", context);
+		repl.interpret("val schema = StructType(Seq(StructField(\"name\", StringType, false),StructField(\"age\" , IntegerType, true),StructField(\"other\" , StringType, false)))", context);
+		repl.interpret("val csv = sc.parallelize(Seq((\"jobs, 51, apple\"), (\"gates, , microsoft\")))", context);
+		repl.interpret("val raw = csv.map(_.split(\",\")).map(p => Row(p(0),toInt(p(1)),p(2)))", context);
+		repl.interpret("val people = z.sqlContext.applySchema(raw, schema)", context);
+		repl.interpret("people.registerTempTable(\"people\")", context);
+
+		InterpreterResult ret = sql.interpret("select name, age from people where name = 'gates'", context);
+		System.err.println("RET=" + ret.message());
+		assertEquals(InterpreterResult.Code.SUCCESS, ret.code());
+		assertEquals(Type.TABLE, ret.type());
+		assertEquals("name\tage\ngates\tnull\n", ret.message());
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/test/java/org/apache/zeppelin/spark/dep/DependencyResolverTest.java
----------------------------------------------------------------------
diff --git a/spark/src/test/java/org/apache/zeppelin/spark/dep/DependencyResolverTest.java b/spark/src/test/java/org/apache/zeppelin/spark/dep/DependencyResolverTest.java
new file mode 100644
index 0000000..e41de60
--- /dev/null
+++ b/spark/src/test/java/org/apache/zeppelin/spark/dep/DependencyResolverTest.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.spark.dep;
+
+import static org.junit.Assert.assertEquals;
+
+import org.apache.zeppelin.spark.dep.DependencyResolver;
+import org.junit.Test;
+
+public class DependencyResolverTest {
+
+  @Test
+  public void testInferScalaVersion() {
+    String [] version = scala.util.Properties.versionNumberString().split("[.]");
+    String scalaVersion = version[0] + "." + version[1];
+
+    assertEquals("groupId:artifactId:version",
+        DependencyResolver.inferScalaVersion("groupId:artifactId:version"));
+    assertEquals("groupId:artifactId_" + scalaVersion + ":version",
+        DependencyResolver.inferScalaVersion("groupId::artifactId:version"));
+    assertEquals("groupId:artifactId:version::test",
+        DependencyResolver.inferScalaVersion("groupId:artifactId:version::test"));
+    assertEquals("*",
+        DependencyResolver.inferScalaVersion("*"));
+    assertEquals("groupId:*",
+        DependencyResolver.inferScalaVersion("groupId:*"));
+    assertEquals("groupId:artifactId*",
+        DependencyResolver.inferScalaVersion("groupId:artifactId*"));
+    assertEquals("groupId:artifactId_" + scalaVersion,
+        DependencyResolver.inferScalaVersion("groupId::artifactId"));
+    assertEquals("groupId:artifactId_" + scalaVersion + "*",
+        DependencyResolver.inferScalaVersion("groupId::artifactId*"));
+    assertEquals("groupId:artifactId_" + scalaVersion + ":*",
+        DependencyResolver.inferScalaVersion("groupId::artifactId:*"));
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/testing/startSparkCluster.sh
----------------------------------------------------------------------
diff --git a/testing/startSparkCluster.sh b/testing/startSparkCluster.sh
index 60e8efb..896ed52 100755
--- a/testing/startSparkCluster.sh
+++ b/testing/startSparkCluster.sh
@@ -1,4 +1,21 @@
 #!/bin/sh
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
 wget http://apache.mesi.com.ar/spark/spark-1.1.1/spark-1.1.1-bin-hadoop2.3.tgz
 tar zxvf spark-1.1.1-bin-hadoop2.3.tgz
 cd spark-1.1.1-bin-hadoop2.3

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/testing/stopSparkCluster.sh
----------------------------------------------------------------------
diff --git a/testing/stopSparkCluster.sh b/testing/stopSparkCluster.sh
index 36b95f4..4f8d7a9 100755
--- a/testing/stopSparkCluster.sh
+++ b/testing/stopSparkCluster.sh
@@ -1,4 +1,21 @@
-#!/bin/sh
+#!/bin/sh#
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
 cd spark-1.1.1-bin-hadoop2.3
 ./sbin/stop-master.sh
 kill $(ps -ef | grep 'org.apache.spark.deploy.worker.Worker' | awk '{print $2}')

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-distribution/README.md
----------------------------------------------------------------------
diff --git a/zeppelin-distribution/README.md b/zeppelin-distribution/README.md
index dabb7ce..72fd630 100644
--- a/zeppelin-distribution/README.md
+++ b/zeppelin-distribution/README.md
@@ -1,3 +1,20 @@
+<!---
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
 # Distribution archive of Zeppelin project #
 
 Zeppelin is distributed as a single gzip archive with the following structure:

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-distribution/build-infrastructure.md
----------------------------------------------------------------------
diff --git a/zeppelin-distribution/build-infrastructure.md b/zeppelin-distribution/build-infrastructure.md
index 582725d..b8b3e20 100644
--- a/zeppelin-distribution/build-infrastructure.md
+++ b/zeppelin-distribution/build-infrastructure.md
@@ -1,3 +1,20 @@
+<!---
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
 Zeppelin dependency graph:
 --------------
                      hive, hadoop, ...

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-distribution/pom.xml
----------------------------------------------------------------------
diff --git a/zeppelin-distribution/pom.xml b/zeppelin-distribution/pom.xml
index 1a4da42..efe83ab 100644
--- a/zeppelin-distribution/pom.xml
+++ b/zeppelin-distribution/pom.xml
@@ -1,11 +1,28 @@
 <?xml version="1.0" encoding="UTF-8"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one or more
+  ~ contributor license agreements.  See the NOTICE file distributed with
+  ~ this work for additional information regarding copyright ownership.
+  ~ The ASF licenses this file to You under the Apache License, Version 2.0
+  ~ (the "License"); you may not use this file except in compliance with
+  ~ the License.  You may obtain a copy of the License at
+  ~
+  ~    http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  -->
+
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
          xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
   <modelVersion>4.0.0</modelVersion>
 
   <parent>
     <artifactId>zeppelin</artifactId>
-    <groupId>com.nflabs.zeppelin</groupId>
+    <groupId>org.apache.zeppelin</groupId>
     <version>0.5.0-SNAPSHOT</version>
   </parent>
 

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-distribution/src/assemble/distribution.xml
----------------------------------------------------------------------
diff --git a/zeppelin-distribution/src/assemble/distribution.xml b/zeppelin-distribution/src/assemble/distribution.xml
index 2edee49..dae34c4 100644
--- a/zeppelin-distribution/src/assemble/distribution.xml
+++ b/zeppelin-distribution/src/assemble/distribution.xml
@@ -1,3 +1,20 @@
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one or more
+  ~ contributor license agreements.  See the NOTICE file distributed with
+  ~ this work for additional information regarding copyright ownership.
+  ~ The ASF licenses this file to You under the Apache License, Version 2.0
+  ~ (the "License"); you may not use this file except in compliance with
+  ~ the License.  You may obtain a copy of the License at
+  ~
+  ~    http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  -->
+
 <assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2"
     xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
     xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2 http://maven.apache.org/xsd/assembly-1.1.2.xsd">
@@ -16,9 +33,9 @@
       <useAllReactorProjects>true</useAllReactorProjects> -->
       <!-- Now, select which projects to include in this module-set. -->
       <includes>
-        <include>com.nflabs.zeppelin:zeppelin-api-ui</include>
-        <include>com.nflabs.zeppelin:zeppelin-server</include>
-        <include>com.nflabs.zeppelin:zeppelin-web</include>
+        <include>org.apache.zeppelin:zeppelin-api-ui</include>
+        <include>org.apache.zeppelin:zeppelin-server</include>
+        <include>org.apache.zeppelin:zeppelin-web</include>
       </includes>
       <useProjectArtifact>false</useProjectArtifact>
       <useTransitiveDependencies>false</useTransitiveDependencies>

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-distribution/src/deb/control/control
----------------------------------------------------------------------
diff --git a/zeppelin-distribution/src/deb/control/control b/zeppelin-distribution/src/deb/control/control
index a9d4222..507d151 100644
--- a/zeppelin-distribution/src/deb/control/control
+++ b/zeppelin-distribution/src/deb/control/control
@@ -1,3 +1,22 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
 Package: [[deb.pkg.name]]
 Version: [[version]]-[[buildNumber]]
 Section: misc

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-distribution/src/deb/control/prerm
----------------------------------------------------------------------
diff --git a/zeppelin-distribution/src/deb/control/prerm b/zeppelin-distribution/src/deb/control/prerm
index 447d708..85977fc 100644
--- a/zeppelin-distribution/src/deb/control/prerm
+++ b/zeppelin-distribution/src/deb/control/prerm
@@ -1,4 +1,22 @@
 #!/bin/sh
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
 
 set -e
 
@@ -6,4 +24,4 @@ SERVICE=$(which service 2> /dev/null)
 RM=$(which rm 2> /dev/null)
 
 exec $SERVICE zeppelind stop
-exec $RM -rf [[deb.log.path]]/* [[deb.pid.path]]/*
\ No newline at end of file
+exec $RM -rf [[deb.log.path]]/* [[deb.pid.path]]/*

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-distribution/src/deb/init.d/zeppelind
----------------------------------------------------------------------
diff --git a/zeppelin-distribution/src/deb/init.d/zeppelind b/zeppelin-distribution/src/deb/init.d/zeppelind
index dbb22c4..d9752df 100755
--- a/zeppelin-distribution/src/deb/init.d/zeppelind
+++ b/zeppelin-distribution/src/deb/init.d/zeppelind
@@ -1,5 +1,23 @@
 #!/bin/bash
 #
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
 ### BEGIN INIT INFO
 # Provides:          zeppelind
 # Required-Start:    $remote_fs

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-docs/Makefile
----------------------------------------------------------------------
diff --git a/zeppelin-docs/Makefile b/zeppelin-docs/Makefile
deleted file mode 100644
index 7614d1a..0000000
--- a/zeppelin-docs/Makefile
+++ /dev/null
@@ -1,174 +0,0 @@
-# Makefile for Sphinx documentation
-#
-
-# You can set these variables from the command line.
-SPHINXOPTS    =
-SPHINXBUILD   = sphinx-build
-PAPER         =
-BUILDDIR      = target
-SOURCEDIR     = src/main/sphinx
-
-# User-friendly check for sphinx-build
-ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
-$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
-endif
-
-# Internal variables.
-PAPEROPT_a4     = -D latex_paper_size=a4
-PAPEROPT_letter = -D latex_paper_size=letter
-ALLSPHINXOPTS   = -W -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) $(SOURCEDIR)
-# the i18n builder cannot share the environment and doctrees with the others
-I18NSPHINXOPTS  = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) $(SOURCEDIR)
-
-.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
-
-help:
-	@echo "Please use \`make <target>' where <target> is one of"
-	@echo "  html       to make standalone HTML files"
-	@echo "  dirhtml    to make HTML files named index.html in directories"
-	@echo "  singlehtml to make a single large HTML file"
-	@echo "  pickle     to make pickle files"
-	@echo "  json       to make JSON files"
-	@echo "  htmlhelp   to make HTML files and a HTML help project"
-	@echo "  qthelp     to make HTML files and a qthelp project"
-	@echo "  devhelp    to make HTML files and a Devhelp project"
-	@echo "  epub       to make an epub"
-	@echo "  latex      to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
-	@echo "  latexpdf   to make LaTeX files and run them through pdflatex"
-	@echo "  latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
-	@echo "  text       to make text files"
-	@echo "  man        to make manual pages"
-	@echo "  texinfo    to make Texinfo files"
-	@echo "  info       to make Texinfo files and run them through makeinfo"
-	@echo "  gettext    to make PO message catalogs"
-	@echo "  changes    to make an overview of all changed/added/deprecated items"
-	@echo "  xml        to make Docutils-native XML files"
-	@echo "  pseudoxml  to make pseudoxml-XML files for display purposes"
-	@echo "  linkcheck  to check all external links for integrity"
-	@echo "  doctest    to run all doctests embedded in the documentation (if enabled)"
-
-clean:
-	rm -rf $(BUILDDIR)/*
-
-html:
-	$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
-	@echo
-	@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
-
-dirhtml:
-	$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
-	@echo
-	@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
-
-singlehtml:
-	$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
-	@echo
-	@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
-
-pickle:
-	$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
-	@echo
-	@echo "Build finished; now you can process the pickle files."
-
-json:
-	$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
-	@echo
-	@echo "Build finished; now you can process the JSON files."
-
-htmlhelp:
-	$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
-	@echo
-	@echo "Build finished; now you can run HTML Help Workshop with the" \
-	      ".hhp project file in $(BUILDDIR)/htmlhelp."
-
-qthelp:
-	$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
-	@echo
-	@echo "Build finished; now you can run "qcollectiongenerator" with the" \
-	      ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
-
-
-devhelp:
-	$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
-	@echo
-	@echo "Build finished."
-	@echo "To view the help file:"
-	@echo "# devhelp"
-
-epub:
-	$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
-	@echo
-	@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
-
-latex:
-	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
-	@echo
-	@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
-	@echo "Run \`make' in that directory to run these through (pdf)latex" \
-	      "(use \`make latexpdf' here to do that automatically)."
-
-latexpdf:
-	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
-	@echo "Running LaTeX files through pdflatex..."
-	$(MAKE) -C $(BUILDDIR)/latex all-pdf
-	@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
-
-latexpdfja:
-	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
-	@echo "Running LaTeX files through platex and dvipdfmx..."
-	$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
-	@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
-
-text:
-	$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
-	@echo
-	@echo "Build finished. The text files are in $(BUILDDIR)/text."
-
-man:
-	$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
-	@echo
-	@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
-
-texinfo:
-	$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
-	@echo
-	@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
-	@echo "Run \`make' in that directory to run these through makeinfo" \
-	      "(use \`make info' here to do that automatically)."
-
-info:
-	$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
-	@echo "Running Texinfo files through makeinfo..."
-	make -C $(BUILDDIR)/texinfo info
-	@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
-
-gettext:
-	$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
-	@echo
-	@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
-
-changes:
-	$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
-	@echo
-	@echo "The overview file is in $(BUILDDIR)/changes."
-
-linkcheck:
-	$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
-	@echo
-	@echo "Link check complete; look for any errors in the above output " \
-	      "or in $(BUILDDIR)/linkcheck/output.txt."
-
-doctest:
-	$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
-	@echo "Testing of doctests in the sources finished, look at the " \
-	      "results in $(BUILDDIR)/doctest/output.txt."
-
-xml:
-	$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
-	@echo
-	@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
-
-pseudoxml:
-	$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
-	@echo
-	@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-docs/README.md
----------------------------------------------------------------------
diff --git a/zeppelin-docs/README.md b/zeppelin-docs/README.md
deleted file mode 100644
index 01b9c93..0000000
--- a/zeppelin-docs/README.md
+++ /dev/null
@@ -1,24 +0,0 @@
-# Zeppelin documentation
-Build Zeppelin documentation
-
-## Get Sphinx-doc
- - Install [Sphinx](http://sphinx-doc.org/install.html)
-
-## Build
-```mvn package```
-Or
-```make html```
-
-Sphinx will create the folder "targert".
-
-### Build failed
- - make: sphinx-build: No such file or directory
-```
-Make sure sphinx is correctly installed
-Check the sphinx app name (can be sphinx-build27, in that case make a symbolic link)
-```
- - ValueError: unknown locale: UTF-8
-````
-export LC_ALL=en_US.UTF-8
-export LANG=en_US.UTF-8
-```

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-docs/pom.xml
----------------------------------------------------------------------
diff --git a/zeppelin-docs/pom.xml b/zeppelin-docs/pom.xml
deleted file mode 100644
index 9c43ada..0000000
--- a/zeppelin-docs/pom.xml
+++ /dev/null
@@ -1,87 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-
-  <parent>
-    <artifactId>zeppelin</artifactId>
-    <groupId>com.nflabs.zeppelin</groupId>
-    <version>0.5.0-SNAPSHOT</version>
-  </parent>
-
-  <groupId>com.nflabs.zeppelin</groupId>
-  <artifactId>zeppelin-api-ui</artifactId>
-  <packaging>war</packaging>
-  <version>0.5.0-SNAPSHOT</version>
-  <name>Zeppelin: Documentation</name>
-  <url>http://www.nflabs.com</url>
-  
-  <!--
-      Genarate HTML docs quickly while developing:
-      - brew install python
-      - pip install sphinx
-    -->
-  
-  <build>
-    <finalName>zeppelin-api-ui</finalName>
-    <pluginManagement>
-      <plugins>
-        <plugin>
-          <groupId>com.mycila</groupId>
-          <artifactId>license-maven-plugin</artifactId>
-          <configuration>
-            <excludes combine.children="append">
-              <exclude>**/*.conf</exclude>
-              <exclude>**/*.css_t</exclude>
-            </excludes>
-          </configuration>
-        </plugin>
-      </plugins>
-    </pluginManagement>
-    
-    <plugins>
-      <!-- Swagger ui -->
-      <plugin>
-	<groupId>org.apache.maven.plugins</groupId>
-	<artifactId>maven-war-plugin</artifactId>
-	<configuration>
-	  <failOnMissingWebXml>false</failOnMissingWebXml>
-	  <webResources>
-	    <resource>
-	      <directory>src/main/swagger</directory>
-	    </resource>
-	  </webResources>
-	</configuration>
-      </plugin>
-      
-      <!-- Sphinx -->
-      <plugin>
-	<groupId>org.tomdz.maven</groupId>
-	<artifactId>sphinx-maven-plugin</artifactId>
-	<configuration>
-	  <fork>true</fork>
-	  <force>true</force>
-	  <warningsAsErrors>true</warningsAsErrors>
-	  <sourceDirectory>${project.basedir}/src/main/sphinx</sourceDirectory>
-	  <outputDirectory>${project.build.directory}/html</outputDirectory>
-	</configuration>
-	<executions>
-	  <execution>
-	    <phase>package</phase>
-	    <goals>
-	      <goal>generate</goal>
-	    </goals>
-	  </execution>
-	</executions>
-      </plugin>
-
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-deploy-plugin</artifactId>
-        <version>2.7</version>
-        <configuration>
-          <skip>true</skip>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-</project>

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-docs/src/main/sphinx/index.rst
----------------------------------------------------------------------
diff --git a/zeppelin-docs/src/main/sphinx/index.rst b/zeppelin-docs/src/main/sphinx/index.rst
deleted file mode 100644
index 9d5468e..0000000
--- a/zeppelin-docs/src/main/sphinx/index.rst
+++ /dev/null
@@ -1,21 +0,0 @@
-.. Zeppelin documentation master file, created by
-   sphinx-quickstart on Wed Jan 29 17:19:36 2014.
-   You can adapt this file completely to your liking, but it should at least
-   contain the root `toctree` directive.
-
-Zeppelin documentation
-====================================
-
-.. toctree::
-   :maxdepth: 4
-   :numbered: 2
-   :titlesonly:
-
-   installation
-
-Link
-====================================
-.. toctree::
-    :maxdepth: 1
-
-* :ref:`search`
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-docs/src/main/sphinx/installation.rst
----------------------------------------------------------------------
diff --git a/zeppelin-docs/src/main/sphinx/installation.rst b/zeppelin-docs/src/main/sphinx/installation.rst
deleted file mode 100644
index 6ecc42d..0000000
--- a/zeppelin-docs/src/main/sphinx/installation.rst
+++ /dev/null
@@ -1,8 +0,0 @@
-************
-Installation
-************
-
-.. toctree::
-    :maxdepth: 1
-
-    installation/install
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-docs/src/main/sphinx/installation/install.rst
----------------------------------------------------------------------
diff --git a/zeppelin-docs/src/main/sphinx/installation/install.rst b/zeppelin-docs/src/main/sphinx/installation/install.rst
deleted file mode 100644
index 5317e00..0000000
--- a/zeppelin-docs/src/main/sphinx/installation/install.rst
+++ /dev/null
@@ -1,101 +0,0 @@
-=================
-Install Zeppelin
-=================
-
-Instructions for the Impatient
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-Install Zeppelin in local mode
-
-.. code-block:: bash
-
-  # this scripts install hadoop and zeppelin in current directory and start zeppelin in local mode
-  # download and unarchive hadoop distribution package
-  curl -O http://apache.mirror.cdnetworks.com/hadoop/common/hadoop-1.2.1/hadoop-1.2.1-bin.tar.gz
-  tar -xzf hadoop-1.2.1-bin.tar.gz
-
-  # download zeppelin and unarchive
-  curl -O https://s3-ap-northeast-1.amazonaws.com/zeppel.in/zeppelin-0.3.0.tar.gz
-  tar -xzf zeppelin-0.3.0.tar.gz
-
-  # set HADOOP_HOME
-  echo "export HADOOP_HOME=`pwd`/hadoop-1.2.1" >> zeppelin-0.3.0/conf/zeppelin-env.sh
-
-  # start zeppelin
-  ./zeppelin-0.3.0/bin/zeppelin-daemon.sh start
-
-You can access Zeppelin with browser http://localhost:8080
-
-Install
-^^^^^^^
-Configuring Zeppelin with existing hadoop cluster, refer this section.
-
-Prerequisites
--------------
-Java 1.6 or Later
-Apache Hadoop (Standalone mode)
-Download
-To get Zeppelin distribution, download a recent release.
-
-Install
--------
-Unpack the downloaded Zeppelin distribution.
-
-Configure
----------
-Configuration can be done by both environment variable and java properties. If both defined, environment vaiable is used.
-
-=========================    =======================  ============================== ===========
-zepplin-env.sh	             zepplin-site.xml         Default value  		     Description
-=========================    =======================  ============================== ===========
-ZEPPELIN_HOME	  		    		   	   		   	     Zeppelin Home directory
-ZEPPELIN_PORT         	     zeppelin.server.port     8080	   		     Zeppelin server port
-ZEPPELIN_JOB_DIR             zeppelin.job.dir         jobs	   		     Zeppelin persist/load session in this directory. Can be a path or a URI. location on HDFS supported
-ZEPPELIN_ZAN_REPO            zeppelin.zan.repo        https://github.com/NFLabs/zan  Remote ZAN repository URL
-ZEPPELIN_ZAN_LOCAL_REPO      zeppelin.zan.localrepo   zan-repo	 		     Zeppelin library local repository. Local filesystem path
-ZEPPELIN_ZAN_SHARED_REPO     zeppelin.zan.sharedrepo				     Zeppelin library shared repository. Location on HDFS. Usufull when your backend (eg. hiveserver) is not running on the sam machine and want to use zeppelin library with resource file(eg. in hive 'ADD FILE 'path'). So your backend can get resource file from shared repository.
-ZEPPELIN_DRIVERS             zeppelin.drivers         hive:hive2://,exec:exec://     Comma separated list of [Name]:[Connection URI]
-ZEPPELIN_DRIVER_DIR          zeppelin.driver.dir      drivers			     Zeppelin driver directory.
-=========================    =======================  ============================== ===========
-
-Configuring with existing Hive
--------------------------------
-If you have hive already installed in your hadoop cluster, just run hive server and make Zeppelin to connect it. There're two different version of hive servers, Hive Server1, Hive Server2. Make sure you have Hive server running.
-
-And then, add connection uri in zeppelin.drivers at zeppelin-site.xml If you have Hive Server 1 installed and running on host hiveserver1Address on port 10000, configuration property can be
-
-.. code-block:: bash
- 
- <property>
-   <name>zeppelin.drivers</name>
-   <value>hive:hive://hiveserver1Address:10000/default,exec:exec://</value>
-   <description>Comma separated driver configurations uri. </description>
- </property>
-
-If Hive Server 2 installed and running on host hiveserver2Address on port 10000, configuration will be
-
-.. code-block:: bash
-
-  <property>
-   <name>zeppelin.drivers</name>
-   <value>hive:hive2://hiveserver2Address:10000/default,exec:exec://</value>
-   <description>Comma separated driver configurations uri. </description>
-  </property>
-
-Start/Stop
-^^^^^^^^^^
-
-**Start Zeppelin**
-
-.. code-block:: bash
-
-  bin/zeppelin-daemon.sh start
-
-After successful start, visit http://localhost:8080 with your web browser
-
-**Stop Zeppelin**
-
-.. code-block:: bash
-
-  bin/zeppelin-daemon.sh stop
-

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-docs/src/main/sphinx/templates/zeppelin/layout.html
----------------------------------------------------------------------
diff --git a/zeppelin-docs/src/main/sphinx/templates/zeppelin/layout.html b/zeppelin-docs/src/main/sphinx/templates/zeppelin/layout.html
deleted file mode 100644
index 3cbcded..0000000
--- a/zeppelin-docs/src/main/sphinx/templates/zeppelin/layout.html
+++ /dev/null
@@ -1,38 +0,0 @@
-{%- extends 'basic/layout.html' %}
-
-{% block relbar1 %}{% endblock %}
-{% block relbar2 %}{% endblock %}
-
-{% macro nav() %}
-<p class="nav">
-    <span class="left">
-        {%- if prev %}
-        &laquo; <a href="{{ prev.link|e }}">{{ prev.title }}</a>
-        {%- else %}
-        &nbsp;
-        {%- endif %}
-    </span>
-    <span class="right">
-        {%- if next %}
-        <a href="{{ next.link|e }}">{{ next.title }}</a> &raquo;
-        {%- endif %}
-    </span>
-</p>
-{% endmacro %}
-
-{% block content %}
-<div class="header">
-    <h1 class="heading"><a href="{{ pathto('index') }}">
-        <span>{{ shorttitle|e }}</span></a></h1>
-    <h2 class="heading"><span>{{ title|striptags|e }}</span></h2>
-</div>
-<div class="topnav">
-    {{ nav() }}
-</div>
-<div class="content">
-    {% block body %}{% endblock %}
-</div>
-<div class="bottomnav">
-    {{ nav() }}
-</div>
-{% endblock %}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-docs/src/main/sphinx/templates/zeppelin/static/alert_info_32.png
----------------------------------------------------------------------
diff --git a/zeppelin-docs/src/main/sphinx/templates/zeppelin/static/alert_info_32.png b/zeppelin-docs/src/main/sphinx/templates/zeppelin/static/alert_info_32.png
deleted file mode 100644
index f0f5ebb..0000000
Binary files a/zeppelin-docs/src/main/sphinx/templates/zeppelin/static/alert_info_32.png and /dev/null differ

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-docs/src/main/sphinx/templates/zeppelin/static/alert_warning_32.png
----------------------------------------------------------------------
diff --git a/zeppelin-docs/src/main/sphinx/templates/zeppelin/static/alert_warning_32.png b/zeppelin-docs/src/main/sphinx/templates/zeppelin/static/alert_warning_32.png
deleted file mode 100644
index 114fa32..0000000
Binary files a/zeppelin-docs/src/main/sphinx/templates/zeppelin/static/alert_warning_32.png and /dev/null differ

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-docs/src/main/sphinx/templates/zeppelin/static/bullet.png
----------------------------------------------------------------------
diff --git a/zeppelin-docs/src/main/sphinx/templates/zeppelin/static/bullet.png b/zeppelin-docs/src/main/sphinx/templates/zeppelin/static/bullet.png
deleted file mode 100644
index f0c9386..0000000
Binary files a/zeppelin-docs/src/main/sphinx/templates/zeppelin/static/bullet.png and /dev/null differ

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-docs/src/main/sphinx/templates/zeppelin/static/zeppelin.css_t
----------------------------------------------------------------------
diff --git a/zeppelin-docs/src/main/sphinx/templates/zeppelin/static/zeppelin.css_t b/zeppelin-docs/src/main/sphinx/templates/zeppelin/static/zeppelin.css_t
deleted file mode 100644
index 44401e9..0000000
--- a/zeppelin-docs/src/main/sphinx/templates/zeppelin/static/zeppelin.css_t
+++ /dev/null
@@ -1,132 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-@import url("haiku.css");
-@import url(http://fonts.googleapis.com/css?family=Anonymous+Pro);
-@import url(http://fonts.googleapis.com/css?family=Open+Sans:400italic,600italic,700italic,400,600,700);
-
-html {
-    background: #fff;
-    -webkit-font-smoothing: antialiased;
-}
-
-body {
-/*    font-family: 'Helvetica Neue', Helvetica, Arial, 'lucida grande', tahoma, verdana, arial, sans-serif;*/
-    font-family: 'Open Sans', sans-serif;
-    color: #222222;
-}
-
-div.header {
-    background-color: rgb(48, 113, 169);
-    background-image: url(presto.png);
-    background-position: 36px 50%;
-    background-repeat: no-repeat;
-    background-size: 57px 50px;
-    padding: 10px 40px 20px 120px;
-        color: #fff !important;
-}
-
-div.header h1 a {
-    color: #fff !important;
-}
-
-div.header h2 {
-    color: rgb(184, 184, 185);
-}
-
-.nav .right {
-    float: right;
-    text-align: right;
-}
-
-.highlight pre {
- background-color:white;
- font-family: 'Anonymous Pro', sans-serif;
-}
-
-pre {
-    padding: 0.8em;
-    border: 0;
-}
-
-div.topnav {
-    background: #eeeeee;
-}
-
-div.topnav,
-div.bottomnav {
-    padding: 10px 0;
-}
-
-div.topnav p,
-div.bottomnav p {
-    font-size: 1.0em;
-    margin: 0 40px;
-    text-align: left;
-}
-
-div.header h1 {
-    font-size: 1.8em;
-}
-
-h1 {
-    font-size: 1.5em;
-}
-
-h2 {
-    font-size: 1.4em;
-}
-
-h3 {
-    font-size: 1.3em;
-}
-
-h4 {
-    font-size: 1.2em;
-}
-
-div.content {
-    font-size: 1.0em;
-}
-
-div.content ul > li {
-    background-image: url(bullet.png);
-}
-
-div.admonition {
-    border-radius: 3px;
-    border-style: solid;
-}
-
-div.note {
-    background-color: #f5f5ff
-}
-
-tt {
-    font-family: Consolas, 'Liberation Mono', Courier, monospace;
-    font-size: 0.9em;
-}
-
-tt.descname {
-    font-size: 1.1em;
-}
-
-tt.docutils.literal {
-    margin: 0 2px;
-    padding: 0 5px;
-    border: 1px solid #ddd;
-    background-color: #f8f8f8;
-    border-radius: 3px;
-    white-space: nowrap;
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-docs/src/main/sphinx/templates/zeppelin/theme.conf
----------------------------------------------------------------------
diff --git a/zeppelin-docs/src/main/sphinx/templates/zeppelin/theme.conf b/zeppelin-docs/src/main/sphinx/templates/zeppelin/theme.conf
deleted file mode 100644
index 726cae7..0000000
--- a/zeppelin-docs/src/main/sphinx/templates/zeppelin/theme.conf
+++ /dev/null
@@ -1,26 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-[theme]
-inherit = haiku
-stylesheet = zeppelin.css
-
-[options]
-bodyfont = 'Open Sans', sans-serif
-
-full_logo         = false
-textcolor         = #ecf2fc
-headingcolor      = #374665
-
-linkcolor         = #3b5998
-visitedlinkcolor  = #3b5998
-hoverlinkcolor    = #3b5998


Mime
View raw message