geode-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From aba...@apache.org
Subject [1/6] geode git commit: GEODE-194: Remove spark connector
Date Wed, 07 Jun 2017 21:27:29 GMT
Repository: geode
Updated Branches:
  refs/heads/release/1.2.0 991952534 -> 3bc0a16d0


http://git-wip-us.apache.org/repos/asf/geode/blob/3bc0a16d/geode-spark-connector/geode-spark-demos/basic-demos/src/main/java/demo/Emp.java
----------------------------------------------------------------------
diff --git a/geode-spark-connector/geode-spark-demos/basic-demos/src/main/java/demo/Emp.java b/geode-spark-connector/geode-spark-demos/basic-demos/src/main/java/demo/Emp.java
deleted file mode 100644
index 03e15a0..0000000
--- a/geode-spark-connector/geode-spark-demos/basic-demos/src/main/java/demo/Emp.java
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package demo;
-
-import java.io.Serializable;
-
-/**
- * This is a demo class used in doc/?.md
- */
-public class Emp implements Serializable {
-
-  private int id;
-  
-  private String lname;
-
-  private String fname;
-
-  private int age;
-
-  private String loc;
-
-  public Emp(int id, String lname, String fname, int age, String loc) {
-    this.id = id;
-    this.lname = lname;
-    this.fname = fname;
-    this.age = age;
-    this.loc = loc;
-  }
-
-  public int getId() {
-    return id;
-  }
-
-  public String getLname() {
-    return lname;
-  }
-
-  public String getFname() {
-    return fname;
-  }
-
-  public int getAge() {
-    return age;
-  }
-
-  public String getLoc() {
-    return loc;
-  }
-
-  @Override
-  public String toString() {
-    return "Emp(" + id + ", " + lname + ", " + fname + ", " + age + ", " + loc + ")";
-  }
-
-  @Override
-  public boolean equals(Object o) {
-    if (this == o) return true;
-    if (o == null || getClass() != o.getClass()) return false;
-
-    Emp emp = (Emp) o;
-
-    if (age != emp.age) return false;
-    if (id != emp.id) return false;
-    if (fname != null ? !fname.equals(emp.fname) : emp.fname != null) return false;
-    if (lname != null ? !lname.equals(emp.lname) : emp.lname != null) return false;
-    if (loc != null ? !loc.equals(emp.loc) : emp.loc != null) return false;
-
-    return true;
-  }
-
-  @Override
-  public int hashCode() {
-    int result = id;
-    result = 31 * result + (lname != null ? lname.hashCode() : 0);
-    result = 31 * result + (fname != null ? fname.hashCode() : 0);
-    result = 31 * result + age;
-    result = 31 * result + (loc != null ? loc.hashCode() : 0);
-    return result;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/geode/blob/3bc0a16d/geode-spark-connector/geode-spark-demos/basic-demos/src/main/java/demo/OQLJavaDemo.java
----------------------------------------------------------------------
diff --git a/geode-spark-connector/geode-spark-demos/basic-demos/src/main/java/demo/OQLJavaDemo.java b/geode-spark-connector/geode-spark-demos/basic-demos/src/main/java/demo/OQLJavaDemo.java
deleted file mode 100644
index 9107796..0000000
--- a/geode-spark-connector/geode-spark-demos/basic-demos/src/main/java/demo/OQLJavaDemo.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package demo;
-
-import org.apache.spark.SparkConf;
-import org.apache.spark.api.java.JavaSparkContext;
-import org.apache.spark.sql.DataFrame;
-import org.apache.spark.sql.SQLContext;
-import static org.apache.geode.spark.connector.javaapi.GeodeJavaUtil.*;
-
-
-/**
- * This Spark application demonstrates how to get region data from Geode using Geode
- * OQL Java API. The result is a Spark DataFrame.
- * <p>
- * In order to run it, you will need to start a Geode cluster, and run demo PairRDDSaveJavaDemo
- * first to create some data in the region.
- * <p>
- * Once you compile and package the demo, the jar file basic-demos_2.10-0.5.0.jar
- * should be generated under geode-spark-demos/basic-demos/target/scala-2.10/.
- * Then run the following command to start a Spark job:
- * <pre>
- *   <path to spark>/bin/spark-submit --master=local[2] --class demo.OQLJavaDemo \
- *       <path to>/basic-demos_2.10-0.5.0.jar <locator host>:<port>
- * </pre>
- */
-public class OQLJavaDemo {
-
-  public static void main(String[] argv) {
-
-    if (argv.length != 1) {
-      System.err.printf("Usage: OQLJavaDemo <locators>\n");
-      return;
-    }
-
-    SparkConf conf = new SparkConf().setAppName("OQLJavaDemo");
-    conf.set(GeodeLocatorPropKey, argv[0]); // "192.168.1.47[10335]"
-    JavaSparkContext sc = new JavaSparkContext(conf);
-    SQLContext sqlContext = new org.apache.spark.sql.SQLContext(sc);
-    DataFrame df = javaFunctions(sqlContext).geodeOQL("select * from /str_str_region");
-    System.out.println("======= DataFrame =======\n");
-    df.show();
-    sc.stop();
-  }
-}

http://git-wip-us.apache.org/repos/asf/geode/blob/3bc0a16d/geode-spark-connector/geode-spark-demos/basic-demos/src/main/java/demo/PairRDDSaveJavaDemo.java
----------------------------------------------------------------------
diff --git a/geode-spark-connector/geode-spark-demos/basic-demos/src/main/java/demo/PairRDDSaveJavaDemo.java b/geode-spark-connector/geode-spark-demos/basic-demos/src/main/java/demo/PairRDDSaveJavaDemo.java
deleted file mode 100644
index d1edd41..0000000
--- a/geode-spark-connector/geode-spark-demos/basic-demos/src/main/java/demo/PairRDDSaveJavaDemo.java
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package demo;
-
-import org.apache.geode.spark.connector.GeodeConnectionConf;
-import org.apache.spark.SparkConf;
-import org.apache.spark.api.java.JavaPairRDD;
-import org.apache.spark.api.java.JavaRDD;
-import org.apache.spark.api.java.JavaSparkContext;
-import scala.Tuple2;
-import java.util.*;
-
-import static org.apache.geode.spark.connector.javaapi.GeodeJavaUtil.*;
-
-/**
- * This Spark application demonstrates how to save a RDD to Geode using Geode Spark
- * Connector with Java.
- * <p/>
- * In order to run it, you will need to start Geode cluster, and create the following region
- * with GFSH:
- * <pre>
- * gfsh> create region --name=str_str_region --type=REPLICATE \
- *         --key-constraint=java.lang.String --value-constraint=java.lang.String
- * </pre>
- * 
- * Once you compile and package the demo, the jar file basic-demos_2.10-0.5.0.jar
- * should be generated under geode-spark-demos/basic-demos/target/scala-2.10/.
- * Then run the following command to start a Spark job:
- * <pre>
- *   <path to spark>/bin/spark-submit --master=local[2] --class demo.PairRDDSaveJavaDemo \
- *       <path to>/basic-demos_2.10-0.5.0.jar <locator host>:<port>
- * </pre>
- * 
- * Verify the data was saved to Geode with GFSH:
- * <pre>gfsh> query --query="select * from /str_str_region.entrySet"  </pre>
- */
-public class PairRDDSaveJavaDemo {
-
-  public static void main(String[] argv) {
-
-    if (argv.length != 1) {
-      System.err.printf("Usage: PairRDDSaveJavaDemo <locators>\n");
-      return;
-    }
-
-    SparkConf conf = new SparkConf().setAppName("PairRDDSaveJavaDemo");
-    conf.set(GeodeLocatorPropKey, argv[0]);
-    JavaSparkContext sc = new JavaSparkContext(conf);
-    GeodeConnectionConf connConf = GeodeConnectionConf.apply(conf);
-
-    List<Tuple2<String, String>> data = new ArrayList<>();
-    data.add(new Tuple2<>("7", "seven"));
-    data.add(new Tuple2<>("8", "eight"));
-    data.add(new Tuple2<>("9", "nine"));
-
-    List<Tuple2<String, String>> data2 = new ArrayList<Tuple2<String, String>>();
-    data2.add(new Tuple2<>("11", "eleven"));
-    data2.add(new Tuple2<>("12", "twelve"));
-    data2.add(new Tuple2<>("13", "thirteen"));
-
-    // method 1: generate JavaPairRDD directly
-    JavaPairRDD<String, String> rdd1 =  sc.parallelizePairs(data);
-    javaFunctions(rdd1).saveToGeode("str_str_region", connConf);
-
-    // method 2: convert JavaRDD<Tuple2<K,V>> to JavaPairRDD<K, V>
-    JavaRDD<Tuple2<String, String>> rdd2 =  sc.parallelize(data2);
-    javaFunctions(toJavaPairRDD(rdd2)).saveToGeode("str_str_region", connConf);
-       
-    sc.stop();
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/geode/blob/3bc0a16d/geode-spark-connector/geode-spark-demos/basic-demos/src/main/java/demo/RDDSaveJavaDemo.java
----------------------------------------------------------------------
diff --git a/geode-spark-connector/geode-spark-demos/basic-demos/src/main/java/demo/RDDSaveJavaDemo.java b/geode-spark-connector/geode-spark-demos/basic-demos/src/main/java/demo/RDDSaveJavaDemo.java
deleted file mode 100644
index 22c01f4..0000000
--- a/geode-spark-connector/geode-spark-demos/basic-demos/src/main/java/demo/RDDSaveJavaDemo.java
+++ /dev/null
@@ -1,85 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package demo;
-
-import org.apache.geode.spark.connector.GeodeConnectionConf;
-import org.apache.spark.SparkConf;
-import org.apache.spark.api.java.JavaRDD;
-import org.apache.spark.api.java.JavaSparkContext;
-import org.apache.spark.api.java.function.PairFunction;
-import scala.Tuple2;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import static org.apache.geode.spark.connector.javaapi.GeodeJavaUtil.*;
-
-/**
- * This Spark application demonstrates how to save a RDD to Geode using Geode Spark
- * Connector with Java.
- * <p/>
- * In order to run it, you will need to start Geode cluster, and create the following region
- * with GFSH:
- * <pre>
- * gfsh> create region --name=str_int_region --type=REPLICATE \
- *         --key-constraint=java.lang.String --value-constraint=java.lang.Integer
- * </pre>
- *
- * Once you compile and package the demo, the jar file basic-demos_2.10-0.5.0.jar
- * should be generated under geode-spark-demos/basic-demos/target/scala-2.10/.
- * Then run the following command to start a Spark job:
- * <pre>
- *   <path to spark>/bin/spark-submit --master=local[2] --class demo.RDDSaveJavaDemo \
- *       <path to>/basic-demos_2.10-0.5.0.jar <locator host>:<port>
- * </pre>
- *
- * Verify the data was saved to Geode with GFSH:
- * <pre>gfsh> query --query="select * from /str_int_region.entrySet"  </pre>
- */
-public class RDDSaveJavaDemo {
-
-  public static void main(String[] argv) {
-
-    if (argv.length != 1) {
-      System.err.printf("Usage: RDDSaveJavaDemo <locators>\n");
-      return;
-    }
-
-    SparkConf conf = new SparkConf().setAppName("RDDSaveJavaDemo");
-    conf.set(GeodeLocatorPropKey, argv[0]);
-    JavaSparkContext sc = new JavaSparkContext(conf);
-
-    List<String> data = new ArrayList<String>();
-    data.add("abcdefg");
-    data.add("abcdefgh");
-    data.add("abcdefghi");
-    JavaRDD<String> rdd =  sc.parallelize(data);
-
-    GeodeConnectionConf connConf = GeodeConnectionConf.apply(conf);
-
-    PairFunction<String, String, Integer> func =  new PairFunction<String, String, Integer>() {
-      @Override public Tuple2<String, Integer> call(String s) throws Exception {
-        return new Tuple2<String, Integer>(s, s.length());
-      }
-    };
-
-    javaFunctions(rdd).saveToGeode("str_int_region", func, connConf);
-
-    sc.stop();
-  }
-  
-}

http://git-wip-us.apache.org/repos/asf/geode/blob/3bc0a16d/geode-spark-connector/geode-spark-demos/basic-demos/src/main/java/demo/RegionToRDDJavaDemo.java
----------------------------------------------------------------------
diff --git a/geode-spark-connector/geode-spark-demos/basic-demos/src/main/java/demo/RegionToRDDJavaDemo.java b/geode-spark-connector/geode-spark-demos/basic-demos/src/main/java/demo/RegionToRDDJavaDemo.java
deleted file mode 100644
index 41a07f5..0000000
--- a/geode-spark-connector/geode-spark-demos/basic-demos/src/main/java/demo/RegionToRDDJavaDemo.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package demo;
-
-import org.apache.spark.SparkConf;
-import org.apache.spark.api.java.JavaPairRDD;
-import org.apache.spark.api.java.JavaSparkContext;
-import static org.apache.geode.spark.connector.javaapi.GeodeJavaUtil.*;
-
-/**
- * This Spark application demonstrates how to expose a region in Geode as a RDD using Geode
- * Spark Connector with Java.
- * <p>
- * In order to run it, you will need to start Geode cluster, and run demo PairRDDSaveJavaDemo
- * first to create some data in the region.
- * <p>
- * Once you compile and package the demo, the jar file basic-demos_2.10-0.5.0.jar
- * should be generated under geode-spark-demos/basic-demos/target/scala-2.10/.
- * Then run the following command to start a Spark job:
- * <pre>
- *   <path to spark>/bin/spark-submit --master=local[2] --class demo.RegionToRDDJavaDemo \
- *       <path to>/basic-demos_2.10-0.5.0.jar <locator host>:<port>
- * </pre>
- */
-public class RegionToRDDJavaDemo {
-
-  public static void main(String[] argv) {
-
-    if (argv.length != 1) {
-      System.err.printf("Usage: RegionToRDDJavaDemo <locators>\n");
-      return;
-    }
-    
-    SparkConf conf = new SparkConf().setAppName("RegionToRDDJavaDemo"); 
-    conf.set(GeodeLocatorPropKey, argv[0]);
-    JavaSparkContext sc = new JavaSparkContext(conf);
-
-    JavaPairRDD<String, String> rdd = javaFunctions(sc).geodeRegion("str_str_region");
-    System.out.println("=== geodeRegion =======\n" + rdd.collect() + "\n=========================");
-    
-    sc.stop();
-  }
-}

http://git-wip-us.apache.org/repos/asf/geode/blob/3bc0a16d/geode-spark-connector/geode-spark-demos/basic-demos/src/main/scala/demo/NetworkWordCount.scala
----------------------------------------------------------------------
diff --git a/geode-spark-connector/geode-spark-demos/basic-demos/src/main/scala/demo/NetworkWordCount.scala b/geode-spark-connector/geode-spark-demos/basic-demos/src/main/scala/demo/NetworkWordCount.scala
deleted file mode 100644
index 063ea69..0000000
--- a/geode-spark-connector/geode-spark-demos/basic-demos/src/main/scala/demo/NetworkWordCount.scala
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package demo
-
-import org.apache.spark.SparkConf
-import org.apache.spark.streaming.{Seconds, StreamingContext}
-import org.apache.geode.spark.connector.GeodeLocatorPropKey
-import org.apache.geode.spark.connector.streaming._
-
-/**
- * Counts words in UTF8 encoded, '\n' delimited text received from the network every second.
- * <p><p>
- * In order to run it, you will need to start Geode cluster, and create the following region
- * with GFSH:
- * <pre>
- * gfsh> create region --name=str_int_region --type=REPLICATE \
- *         --key-constraint=java.lang.String --value-constraint=java.lang.Integer
- * </pre> 
- *
- * <p>To run this on your local machine, you need to first run a net cat server
- *    `$ nc -lk 9999`
- * and then run the example
- *    `$ bin/spark-submit --master=local[2] --class demo.NetworkWordCount <path to>/basic-demos_2.10-0.5.0.jar localhost 9999 locatorHost:port`
- * 
- * <p><p> check result that was saved to Geode with GFSH:
- * <pre>gfsh> query --query="select * from /str_int_region.entrySet"  </pre>
- */
-object NetworkWordCount {
-  
-  def main(args: Array[String]) {
-    if (args.length < 3) {
-      System.err.println("Usage: NetworkWordCount <hostname> <port> <geode locator>")
-      System.exit(1)
-    }
-
-    val updateFunc = (values: Seq[Int], state: Option[Int]) => {
-      val currentCount = values.foldLeft(0)(_ + _)
-      val previousCount = state.getOrElse(0)
-      Some(currentCount + previousCount)
-    }
-    
-    // Create the context with a 1 second batch size
-    val sparkConf = new SparkConf().setAppName("NetworkWordCount").set(GeodeLocatorPropKey, args(2))
-    val ssc = new StreamingContext(sparkConf, Seconds(1))
-    ssc.checkpoint(".")
-    
-    // Create a socket stream on target ip:port and count the
-    // words in input stream of \n delimited text (eg. generated by 'nc')
-    // Note that no duplication in storage level only for running locally.
-    // Replication necessary in distributed scenario for fault tolerance.
-    val lines = ssc.socketTextStream(args(0), args(1).toInt)
-    val words = lines.flatMap(_.split(" "))
-    val wordCounts = words.map(x => (x, 1)).reduceByKey(_ + _)
-    val runningCounts = wordCounts.updateStateByKey[Int](updateFunc)
-    // runningCounts.print()
-    runningCounts.saveToGeode("str_int_region")
-    ssc.start()
-    ssc.awaitTermination()
-  }
-  
-}

http://git-wip-us.apache.org/repos/asf/geode/blob/3bc0a16d/geode-spark-connector/project/Dependencies.scala
----------------------------------------------------------------------
diff --git a/geode-spark-connector/project/Dependencies.scala b/geode-spark-connector/project/Dependencies.scala
deleted file mode 100644
index 1518494..0000000
--- a/geode-spark-connector/project/Dependencies.scala
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import sbt._
-import sbt.Keys._
-
-object Dependencies {
-
-  object Compile {
-    val sparkStreaming = "org.apache.spark" %% "spark-streaming" % "1.3.0" 
-    val sparkSql = "org.apache.spark" %% "spark-sql" % "1.3.0"
-    val geode = "org.apache.geode" % "geode-core" % "1.0.0-incubating" excludeAll(ExclusionRule(organization = "org.jboss.netty") )
-  }
-
-  object Test {
-    val scalaTest = "org.scalatest" % "scalatest_2.10" % "2.2.1" % "it, test" //scala test framework
-    val mockito = "org.mockito" % "mockito-all" % "1.10.19" % "test" //mockito mock test framework
-    val junit = "junit" % "junit" % "4.11" % "it, test" //4.11 because the junit-interface was complaining when using 4.12
-    val novoCode = "com.novocode" % "junit-interface" % "0.11" % "it, test"//for junit to run with sbt
-  }
-
-  import Test._
-  import Compile._
-
-  val unitTests = Seq(scalaTest, mockito, junit, novoCode)
-
-  val connector = unitTests ++ Seq(sparkStreaming, sparkSql, geode)
-
-  val functions = Seq(geode, junit)
- 
-  val demos = Seq(sparkStreaming, sparkSql, geode)
-}

http://git-wip-us.apache.org/repos/asf/geode/blob/3bc0a16d/geode-spark-connector/project/GeodeSparkBuild.scala
----------------------------------------------------------------------
diff --git a/geode-spark-connector/project/GeodeSparkBuild.scala b/geode-spark-connector/project/GeodeSparkBuild.scala
deleted file mode 100644
index 07cae51..0000000
--- a/geode-spark-connector/project/GeodeSparkBuild.scala
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import sbt._
-import sbt.Keys._
-import scoverage.ScoverageSbtPlugin._
-import scoverage.ScoverageSbtPlugin
-
-object GeodeSparkConnectorBuild extends Build {
-  import Settings._
-  import Dependencies._ 
-
-  lazy val root = Project(
-    id = "root", 
-    base =file("."), 
-    aggregate = Seq(geodeFunctions, geodeSparkConnector,demos),
-    settings = commonSettings ++ Seq( 
-     name := "Geode Connector for Apache Spark",
-     publishArtifact :=  false,
-     publishLocal := { },
-     publish := { }
-    )
-  )
- 
-  lazy val geodeFunctions = Project(
-    id = "geode-functions",
-    base = file("geode-functions"),
-    settings = commonSettings ++ Seq(libraryDependencies ++= Dependencies.functions,
-      resolvers ++= gfcResolvers,
-      description := "Required Geode Functions to be deployed onto the Geode Cluster before using the Geode Spark Connector"
-    )
-  ).configs(IntegrationTest)
-  
-  lazy val geodeSparkConnector = Project(
-    id = "geode-spark-connector",
-    base = file("geode-spark-connector"),
-    settings = gfcSettings ++ Seq(libraryDependencies ++= Dependencies.connector,
-      resolvers ++= gfcResolvers,
-      description := "A library that exposes Geode regions as Spark RDDs, writes Spark RDDs to Geode regions, and executes OQL queries from Spark Applications to Geode"
-    )
-  ).dependsOn(geodeFunctions).configs(IntegrationTest)
-
- 
-  /******** Demo Project Definitions ********/ 
-  lazy val demoPath = file("geode-spark-demos")
-
-  lazy val demos = Project ( 
-    id = "geode-spark-demos",
-    base = demoPath,
-    settings = demoSettings,
-    aggregate = Seq(basicDemos)
-  )
- 
-  lazy val basicDemos = Project (
-    id = "basic-demos",
-    base = demoPath / "basic-demos",
-    settings = demoSettings ++ Seq(libraryDependencies ++= Dependencies.demos,
-      resolvers ++= gfcResolvers,
-      description := "Sample applications that demonstrates functionality of the Geode Spark Connector"
-    )
-  ).dependsOn(geodeSparkConnector)
-}
-

http://git-wip-us.apache.org/repos/asf/geode/blob/3bc0a16d/geode-spark-connector/project/Settings.scala
----------------------------------------------------------------------
diff --git a/geode-spark-connector/project/Settings.scala b/geode-spark-connector/project/Settings.scala
deleted file mode 100644
index c6852a6..0000000
--- a/geode-spark-connector/project/Settings.scala
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import sbt._
-import sbt.Keys._
-import org.scalastyle.sbt.ScalastylePlugin
-
-object Settings extends Build {
-   
-  lazy val commonSettings = Seq(
-    organization := "io.pivotal",
-    version := "0.5.0",
-    scalaVersion := "2.10.4",
-    organization := "org.apache.geode.spark",
-    organizationHomepage := Some(url("http://www.pivotal.io/"))
-  ) 
-
-  lazy val gfcResolvers = Seq(
-   //"GemStone Official Release" at "http://dist.gemstone.com/maven/release",
-  "Repo for JLine" at "http://repo.spring.io/libs-release",
-  "Local Maven" at Path.userHome.asFile.toURI.toURL + ".m2/repository",
-  "Apache Snapshots" at "https://repository.apache.org/content/repositories/snapshots/"
-  //"Apache Repository" at "https://repository.apache.org/content/repositories/releases/",
-  // "Akka Repository" at "http://repo.akka.io/releases/",
-  // "Spray Repository" at "http://repo.spray.cc/"
-  //"Local Maven Repository" at "file://"+Path.userHome.absolutePath+"/.m2/repository"
-  )
-
-
-  val gfcITSettings = inConfig(IntegrationTest)(Defaults.itSettings) ++
-    Seq(parallelExecution in IntegrationTest := false, fork in IntegrationTest := true)
-
-  val gfcCompileSettings = inConfig(Compile)(Defaults.compileSettings) ++ Seq(unmanagedSourceDirectories in Compile += baseDirectory.value /"../geode-functions/src")
-
-  val gfcSettings = commonSettings ++ gfcITSettings ++ gfcCompileSettings 
-
-  val demoSettings = commonSettings ++ Seq(
-      scoverage.ScoverageSbtPlugin.ScoverageKeys.coverageExcludedPackages := ".*"
-    )
-  
-  val scalastyleSettings = Seq( 
-        ScalastylePlugin.scalastyleConfig := baseDirectory.value / "project/scalastyle-config.xml"
-        )
-  
-}

http://git-wip-us.apache.org/repos/asf/geode/blob/3bc0a16d/geode-spark-connector/project/build.properties
----------------------------------------------------------------------
diff --git a/geode-spark-connector/project/build.properties b/geode-spark-connector/project/build.properties
deleted file mode 100644
index 64abd37..0000000
--- a/geode-spark-connector/project/build.properties
+++ /dev/null
@@ -1 +0,0 @@
-sbt.version=0.13.6

http://git-wip-us.apache.org/repos/asf/geode/blob/3bc0a16d/geode-spark-connector/project/plugins.sbt
----------------------------------------------------------------------
diff --git a/geode-spark-connector/project/plugins.sbt b/geode-spark-connector/project/plugins.sbt
deleted file mode 100644
index 313bbbf..0000000
--- a/geode-spark-connector/project/plugins.sbt
+++ /dev/null
@@ -1,8 +0,0 @@
-addSbtPlugin("org.scalastyle" %% "scalastyle-sbt-plugin" % "0.6.0")
-
-resolvers += "sonatype-releases" at "https://oss.sonatype.org/content/repositories/releases/"
-
-addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.0.4")
-
-resolvers += Classpaths.sbtPluginReleases
-

http://git-wip-us.apache.org/repos/asf/geode/blob/3bc0a16d/geode-spark-connector/sbt
----------------------------------------------------------------------
diff --git a/geode-spark-connector/sbt b/geode-spark-connector/sbt
deleted file mode 100755
index 2c69287..0000000
--- a/geode-spark-connector/sbt
+++ /dev/null
@@ -1,602 +0,0 @@
-#!/usr/bin/env bash
-#
-# // Generated from http://www.opensource.org/licenses/bsd-license.php
-# Copyright (c) 2011, Paul Phillips. All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#     * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#     * Redistributions in binary form must reproduce the above copyright
-# notice, this list of conditions and the following disclaimer in the
-# documentation and/or other materials provided with the distribution.
-#     * Neither the name of the author nor the names of its contributors
-# may be used to endorse or promote products derived from this software
-# without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
-# TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
-# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
-# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
-# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
-# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-# A more capable sbt runner, coincidentally also called sbt.
-# Author: Paul Phillips <paulp@improving.org>
-
-set -o pipefail
-
-# todo - make this dynamic
-declare -r sbt_release_version="0.13.11"
-declare -r sbt_unreleased_version="0.13.11"
-declare -r buildProps="project/build.properties"
-
-declare sbt_jar sbt_dir sbt_create sbt_version sbt_script
-declare scala_version sbt_explicit_version
-declare verbose noshare batch trace_level log_level
-declare sbt_saved_stty debugUs
-
-echoerr () { echo >&2 "$@"; }
-vlog ()    { [[ -n "$verbose" ]] && echoerr "$@"; }
-
-# spaces are possible, e.g. sbt.version = 0.13.0
-build_props_sbt () {
-  [[ -r "$buildProps" ]] && \
-    grep '^sbt\.version' "$buildProps" | tr '=\r' ' ' | awk '{ print $2; }'
-}
-
-update_build_props_sbt () {
-  local ver="$1"
-  local old="$(build_props_sbt)"
-
-  [[ -r "$buildProps" ]] && [[ "$ver" != "$old" ]] && {
-    perl -pi -e "s/^sbt\.version\b.*\$/sbt.version=${ver}/" "$buildProps"
-    grep -q '^sbt.version[ =]' "$buildProps" || printf "\nsbt.version=%s\n" "$ver" >> "$buildProps"
-
-    vlog "!!!"
-    vlog "!!! Updated file $buildProps setting sbt.version to: $ver"
-    vlog "!!! Previous value was: $old"
-    vlog "!!!"
-  }
-}
-
-set_sbt_version () {
-  sbt_version="${sbt_explicit_version:-$(build_props_sbt)}"
-  [[ -n "$sbt_version" ]] || sbt_version=$sbt_release_version
-  export sbt_version
-}
-
-# restore stty settings (echo in particular)
-onSbtRunnerExit() {
-  [[ -n "$sbt_saved_stty" ]] || return
-  vlog ""
-  vlog "restoring stty: $sbt_saved_stty"
-  stty "$sbt_saved_stty"
-  unset sbt_saved_stty
-}
-
-# save stty and trap exit, to ensure echo is reenabled if we are interrupted.
-trap onSbtRunnerExit EXIT
-sbt_saved_stty="$(stty -g 2>/dev/null)"
-vlog "Saved stty: $sbt_saved_stty"
-
-# this seems to cover the bases on OSX, and someone will
-# have to tell me about the others.
-get_script_path () {
-  local path="$1"
-  [[ -L "$path" ]] || { echo "$path" ; return; }
-
-  local target="$(readlink "$path")"
-  if [[ "${target:0:1}" == "/" ]]; then
-    echo "$target"
-  else
-    echo "${path%/*}/$target"
-  fi
-}
-
-die() {
-  echo "Aborting: $@"
-  exit 1
-}
-
-url_base () {
-  local version="$1"
-
-  case "$version" in
-        0.7.*) echo "http://simple-build-tool.googlecode.com" ;;
-      0.10.* ) echo "$sbt_launch_release_repo" ;;
-    0.11.[12]) echo "$sbt_launch_release_repo" ;;
-    *-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]-[0-9][0-9][0-9][0-9][0-9][0-9]) # ie "*-yyyymmdd-hhMMss"
-               echo "$sbt_launch_snapshot_repo" ;;
-            *) echo "$sbt_launch_release_repo" ;;
-  esac
-}
-
-make_url () {
-  local version="$1"
-
-  local base="${sbt_launch_repo:-$(url_base "$version")}"
-
-  case "$version" in
-        0.7.*) echo "$base/files/sbt-launch-0.7.7.jar" ;;
-      0.10.* ) echo "$base/org.scala-tools.sbt/sbt-launch/$version/sbt-launch.jar" ;;
-    0.11.[12]) echo "$base/org.scala-tools.sbt/sbt-launch/$version/sbt-launch.jar" ;;
-            *) echo "$base/org.scala-sbt/sbt-launch/$version/sbt-launch.jar" ;;
-  esac
-}
-
-init_default_option_file () {
-  local overriding_var="${!1}"
-  local default_file="$2"
-  if [[ ! -r "$default_file" && "$overriding_var" =~ ^@(.*)$ ]]; then
-    local envvar_file="${BASH_REMATCH[1]}"
-    if [[ -r "$envvar_file" ]]; then
-      default_file="$envvar_file"
-    fi
-  fi
-  echo "$default_file"
-}
-
-declare -r cms_opts="-XX:+CMSClassUnloadingEnabled -XX:+UseConcMarkSweepGC"
-declare -r jit_opts="-XX:ReservedCodeCacheSize=256m -XX:+TieredCompilation"
-declare -r default_jvm_opts_common="-Xms512m -Xmx1536m -Xss2m $jit_opts $cms_opts"
-declare -r noshare_opts="-Dsbt.global.base=project/.sbtboot -Dsbt.boot.directory=project/.boot -Dsbt.ivy.home=project/.ivy"
-declare -r latest_28="2.8.2"
-declare -r latest_29="2.9.3"
-declare -r latest_210="2.10.6"
-declare -r latest_211="2.11.8"
-declare -r latest_212="2.12.0-M3"
-declare -r sbt_launch_release_repo="http://repo.typesafe.com/typesafe/ivy-releases"
-declare -r sbt_launch_snapshot_repo="https://repo.scala-sbt.org/scalasbt/ivy-snapshots"
-
-declare -r script_path="$(get_script_path "$BASH_SOURCE")"
-declare -r script_name="${script_path##*/}"
-
-# some non-read-onlies set with defaults
-declare java_cmd="java"
-declare sbt_opts_file="$(init_default_option_file SBT_OPTS .sbtopts)"
-declare jvm_opts_file="$(init_default_option_file JVM_OPTS .jvmopts)"
-declare sbt_launch_dir="$HOME/.sbt/launchers"
-
-declare sbt_launch_repo
-
-# pull -J and -D options to give to java.
-declare -a residual_args
-declare -a java_args
-declare -a scalac_args
-declare -a sbt_commands
-
-# args to jvm/sbt via files or environment variables
-declare -a extra_jvm_opts extra_sbt_opts
-
-addJava () {
-  vlog "[addJava] arg = '$1'"
-  java_args+=("$1")
-}
-addSbt () {
-  vlog "[addSbt] arg = '$1'"
-  sbt_commands+=("$1")
-}
-setThisBuild () {
-  vlog "[addBuild] args = '$@'"
-  local key="$1" && shift
-  addSbt "set $key in ThisBuild := $@"
-}
-addScalac () {
-  vlog "[addScalac] arg = '$1'"
-  scalac_args+=("$1")
-}
-addResidual () {
-  vlog "[residual] arg = '$1'"
-  residual_args+=("$1")
-}
-addResolver () {
-  addSbt "set resolvers += $1"
-}
-addDebugger () {
-  addJava "-Xdebug"
-  addJava "-Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=$1"
-}
-setScalaVersion () {
-  [[ "$1" == *"-SNAPSHOT" ]] && addResolver 'Resolver.sonatypeRepo("snapshots")'
-  addSbt "++ $1"
-}
-setJavaHome () {
-  java_cmd="$1/bin/java"
-  setThisBuild javaHome "scala.Some(file(\"$1\"))"
-  export JAVA_HOME="$1"
-  export JDK_HOME="$1"
-  export PATH="$JAVA_HOME/bin:$PATH"
-}
-setJavaHomeQuietly () {
-  addSbt warn
-  setJavaHome "$1"
-  addSbt info
-}
-
-# if set, use JDK_HOME/JAVA_HOME over java found in path
-if [[ -e "$JDK_HOME/lib/tools.jar" ]]; then
-  setJavaHomeQuietly "$JDK_HOME"
-elif [[ -e "$JAVA_HOME/bin/java" ]]; then
-  setJavaHomeQuietly "$JAVA_HOME"
-fi
-
-# directory to store sbt launchers
-[[ -d "$sbt_launch_dir" ]] || mkdir -p "$sbt_launch_dir"
-[[ -w "$sbt_launch_dir" ]] || sbt_launch_dir="$(mktemp -d -t sbt_extras_launchers.XXXXXX)"
-
-java_version () {
-  local version=$("$java_cmd" -version 2>&1 | grep -E -e '(java|openjdk) version' | awk '{ print $3 }' | tr -d \")
-  vlog "Detected Java version: $version"
-  echo "${version:2:1}"
-}
-
-# MaxPermSize critical on pre-8 jvms but incurs noisy warning on 8+
-default_jvm_opts () {
-  local v="$(java_version)"
-  if [[ $v -ge 8 ]]; then
-    echo "$default_jvm_opts_common"
-  else
-    echo "-XX:MaxPermSize=384m $default_jvm_opts_common"
-  fi
-}
-
-build_props_scala () {
-  if [[ -r "$buildProps" ]]; then
-    versionLine="$(grep '^build.scala.versions' "$buildProps")"
-    versionString="${versionLine##build.scala.versions=}"
-    echo "${versionString%% .*}"
-  fi
-}
-
-execRunner () {
-  # print the arguments one to a line, quoting any containing spaces
-  vlog "# Executing command line:" && {
-    for arg; do
-      if [[ -n "$arg" ]]; then
-        if printf "%s\n" "$arg" | grep -q ' '; then
-          printf >&2 "\"%s\"\n" "$arg"
-        else
-          printf >&2 "%s\n" "$arg"
-        fi
-      fi
-    done
-    vlog ""
-  }
-
-  [[ -n "$batch" ]] && exec </dev/null
-  exec "$@"
-}
-
-jar_url () {
-  make_url "$1"
-}
-
-jar_file () {
-  echo "$sbt_launch_dir/$1/sbt-launch.jar"
-}
-
-download_url () {
-  local url="$1"
-  local jar="$2"
-
-  echoerr "Downloading sbt launcher for $sbt_version:"
-  echoerr "  From  $url"
-  echoerr "    To  $jar"
-
-  mkdir -p "${jar%/*}" && {
-    if which curl >/dev/null; then
-      curl --fail --silent --location "$url" --output "$jar"
-    elif which wget >/dev/null; then
-      wget --quiet -O "$jar" "$url"
-    fi
-  } && [[ -r "$jar" ]]
-}
-
-acquire_sbt_jar () {
-  local sbt_url="$(jar_url "$sbt_version")"
-  sbt_jar="$(jar_file "$sbt_version")"
-
-  [[ -r "$sbt_jar" ]] || download_url "$sbt_url" "$sbt_jar"
-}
-
-usage () {
-  set_sbt_version
-  cat <<EOM
-Usage: $script_name [options]
-
-Note that options which are passed along to sbt begin with -- whereas
-options to this runner use a single dash. Any sbt command can be scheduled
-to run first by prefixing the command with --, so --warn, --error and so on
-are not special.
-
-Output filtering: if there is a file in the home directory called .sbtignore
-and this is not an interactive sbt session, the file is treated as a list of
-bash regular expressions. Output lines which match any regex are not echoed.
-One can see exactly which lines would have been suppressed by starting this
-runner with the -x option.
-
-  -h | -help         print this message
-  -v                 verbose operation (this runner is chattier)
-  -d, -w, -q         aliases for --debug, --warn, --error (q means quiet)
-  -x                 debug this script
-  -trace <level>     display stack traces with a max of <level> frames (default: -1, traces suppressed)
-  -debug-inc         enable debugging log for the incremental compiler
-  -no-colors         disable ANSI color codes
-  -sbt-create        start sbt even if current directory contains no sbt project
-  -sbt-dir   <path>  path to global settings/plugins directory (default: ~/.sbt/<version>)
-  -sbt-boot  <path>  path to shared boot directory (default: ~/.sbt/boot in 0.11+)
-  -ivy       <path>  path to local Ivy repository (default: ~/.ivy2)
-  -no-share          use all local caches; no sharing
-  -offline           put sbt in offline mode
-  -jvm-debug <port>  Turn on JVM debugging, open at the given port.
-  -batch             Disable interactive mode
-  -prompt <expr>     Set the sbt prompt; in expr, 's' is the State and 'e' is Extracted
-  -script <file>     Run the specified file as a scala script
-
-  # sbt version (default: sbt.version from $buildProps if present, otherwise $sbt_release_version)
-  -sbt-force-latest         force the use of the latest release of sbt: $sbt_release_version
-  -sbt-version  <version>   use the specified version of sbt (default: $sbt_release_version)
-  -sbt-dev                  use the latest pre-release version of sbt: $sbt_unreleased_version
-  -sbt-jar      <path>      use the specified jar as the sbt launcher
-  -sbt-launch-dir <path>    directory to hold sbt launchers (default: $sbt_launch_dir)
-  -sbt-launch-repo <url>    repo url for downloading sbt launcher jar (default: $(url_base "$sbt_version"))
-
-  # scala version (default: as chosen by sbt)
-  -28                       use $latest_28
-  -29                       use $latest_29
-  -210                      use $latest_210
-  -211                      use $latest_211
-  -212                      use $latest_212
-  -scala-home <path>        use the scala build at the specified directory
-  -scala-version <version>  use the specified version of scala
-  -binary-version <version> use the specified scala version when searching for dependencies
-
-  # java version (default: java from PATH, currently $(java -version 2>&1 | grep version))
-  -java-home <path>         alternate JAVA_HOME
-
-  # passing options to the jvm - note it does NOT use JAVA_OPTS due to pollution
-  # The default set is used if JVM_OPTS is unset and no -jvm-opts file is found
-  <default>        $(default_jvm_opts)
-  JVM_OPTS         environment variable holding either the jvm args directly, or
-                   the reference to a file containing jvm args if given path is prepended by '@' (e.g. '@/etc/jvmopts')
-                   Note: "@"-file is overridden by local '.jvmopts' or '-jvm-opts' argument.
-  -jvm-opts <path> file containing jvm args (if not given, .jvmopts in project root is used if present)
-  -Dkey=val        pass -Dkey=val directly to the jvm
-  -J-X             pass option -X directly to the jvm (-J is stripped)
-
-  # passing options to sbt, OR to this runner
-  SBT_OPTS         environment variable holding either the sbt args directly, or
-                   the reference to a file containing sbt args if given path is prepended by '@' (e.g. '@/etc/sbtopts')
-                   Note: "@"-file is overridden by local '.sbtopts' or '-sbt-opts' argument.
-  -sbt-opts <path> file containing sbt args (if not given, .sbtopts in project root is used if present)
-  -S-X             add -X to sbt's scalacOptions (-S is stripped)
-EOM
-}
-
-process_args () {
-  require_arg () {
-    local type="$1"
-    local opt="$2"
-    local arg="$3"
-
-    if [[ -z "$arg" ]] || [[ "${arg:0:1}" == "-" ]]; then
-      die "$opt requires <$type> argument"
-    fi
-  }
-  while [[ $# -gt 0 ]]; do
-    case "$1" in
-          -h|-help) usage; exit 1 ;;
-                -v) verbose=true && shift ;;
-                -d) addSbt "--debug" && addSbt debug && shift ;;
-                -w) addSbt "--warn"  && addSbt warn  && shift ;;
-                -q) addSbt "--error" && addSbt error && shift ;;
-                -x) debugUs=true && shift ;;
-            -trace) require_arg integer "$1" "$2" && trace_level="$2" && shift 2 ;;
-              -ivy) require_arg path "$1" "$2" && addJava "-Dsbt.ivy.home=$2" && shift 2 ;;
-        -no-colors) addJava "-Dsbt.log.noformat=true" && shift ;;
-         -no-share) noshare=true && shift ;;
-         -sbt-boot) require_arg path "$1" "$2" && addJava "-Dsbt.boot.directory=$2" && shift 2 ;;
-          -sbt-dir) require_arg path "$1" "$2" && sbt_dir="$2" && shift 2 ;;
-        -debug-inc) addJava "-Dxsbt.inc.debug=true" && shift ;;
-          -offline) addSbt "set offline := true" && shift ;;
-        -jvm-debug) require_arg port "$1" "$2" && addDebugger "$2" && shift 2 ;;
-            -batch) batch=true && shift ;;
-           -prompt) require_arg "expr" "$1" "$2" && setThisBuild shellPrompt "(s => { val e = Project.extract(s) ; $2 })" && shift 2 ;;
-           -script) require_arg file "$1" "$2" && sbt_script="$2" && addJava "-Dsbt.main.class=sbt.ScriptMain" && shift 2 ;;
-
-       -sbt-create) sbt_create=true && shift ;;
-          -sbt-jar) require_arg path "$1" "$2" && sbt_jar="$2" && shift 2 ;;
-      -sbt-version) require_arg version "$1" "$2" && sbt_explicit_version="$2" && shift 2 ;;
- -sbt-force-latest) sbt_explicit_version="$sbt_release_version" && shift ;;
-          -sbt-dev) sbt_explicit_version="$sbt_unreleased_version" && shift ;;
-   -sbt-launch-dir) require_arg path "$1" "$2" && sbt_launch_dir="$2" && shift 2 ;;
-  -sbt-launch-repo) require_arg path "$1" "$2" && sbt_launch_repo="$2" && shift 2 ;;
-    -scala-version) require_arg version "$1" "$2" && setScalaVersion "$2" && shift 2 ;;
-   -binary-version) require_arg version "$1" "$2" && setThisBuild scalaBinaryVersion "\"$2\"" && shift 2 ;;
-       -scala-home) require_arg path "$1" "$2" && setThisBuild scalaHome "scala.Some(file(\"$2\"))" && shift 2 ;;
-        -java-home) require_arg path "$1" "$2" && setJavaHome "$2" && shift 2 ;;
-         -sbt-opts) require_arg path "$1" "$2" && sbt_opts_file="$2" && shift 2 ;;
-         -jvm-opts) require_arg path "$1" "$2" && jvm_opts_file="$2" && shift 2 ;;
-
-               -D*) addJava "$1" && shift ;;
-               -J*) addJava "${1:2}" && shift ;;
-               -S*) addScalac "${1:2}" && shift ;;
-               -28) setScalaVersion "$latest_28" && shift ;;
-               -29) setScalaVersion "$latest_29" && shift ;;
-              -210) setScalaVersion "$latest_210" && shift ;;
-              -211) setScalaVersion "$latest_211" && shift ;;
-              -212) setScalaVersion "$latest_212" && shift ;;
-
-           --debug) addSbt debug && addResidual "$1" && shift ;;
-            --warn) addSbt warn  && addResidual "$1" && shift ;;
-           --error) addSbt error && addResidual "$1" && shift ;;
-                 *) addResidual "$1" && shift ;;
-    esac
-  done
-}
-
-# process the direct command line arguments
-process_args "$@"
-
-# skip #-styled comments and blank lines
-readConfigFile() {
-  local end=false
-  until $end; do
-    read || end=true
-    [[ $REPLY =~ ^# ]] || [[ -z $REPLY ]] || echo "$REPLY"
-  done < "$1"
-}
-
-# if there are file/environment sbt_opts, process again so we
-# can supply args to this runner
-if [[ -r "$sbt_opts_file" ]]; then
-  vlog "Using sbt options defined in file $sbt_opts_file"
-  while read opt; do extra_sbt_opts+=("$opt"); done < <(readConfigFile "$sbt_opts_file")
-elif [[ -n "$SBT_OPTS" && ! ("$SBT_OPTS" =~ ^@.*) ]]; then
-  vlog "Using sbt options defined in variable \$SBT_OPTS"
-  extra_sbt_opts=( $SBT_OPTS )
-else
-  vlog "No extra sbt options have been defined"
-fi
-
-[[ -n "${extra_sbt_opts[*]}" ]] && process_args "${extra_sbt_opts[@]}"
-
-# reset "$@" to the residual args
-set -- "${residual_args[@]}"
-argumentCount=$#
-
-# set sbt version
-set_sbt_version
-
-# only exists in 0.12+
-setTraceLevel() {
-  case "$sbt_version" in
-    "0.7."* | "0.10."* | "0.11."* ) echoerr "Cannot set trace level in sbt version $sbt_version" ;;
-                                 *) setThisBuild traceLevel $trace_level ;;
-  esac
-}
-
-# set scalacOptions if we were given any -S opts
-[[ ${#scalac_args[@]} -eq 0 ]] || addSbt "set scalacOptions in ThisBuild += \"${scalac_args[@]}\""
-
-# Update build.properties on disk to set explicit version - sbt gives us no choice
-[[ -n "$sbt_explicit_version" ]] && update_build_props_sbt "$sbt_explicit_version"
-vlog "Detected sbt version $sbt_version"
-
-[[ -n "$scala_version" ]] && vlog "Overriding scala version to $scala_version"
-
-if [[ -n "$sbt_script" ]]; then
-  residual_args=( $sbt_script ${residual_args[@]} )
-else
-  # no args - alert them there's stuff in here
-  (( argumentCount > 0 )) || {
-    vlog "Starting $script_name: invoke with -help for other options"
-    residual_args=( shell )
-  }
-fi
-
-# verify this is an sbt dir, -create was given or user attempts to run a scala script
-[[ -r ./build.sbt || -d ./project || -n "$sbt_create" || -n "$sbt_script" ]] || {
-  cat <<EOM
-$(pwd) doesn't appear to be an sbt project.
-If you want to start sbt anyway, run:
-  $0 -sbt-create
-
-EOM
-  exit 1
-}
-
-# pick up completion if present; todo
-[[ -r .sbt_completion.sh ]] && source .sbt_completion.sh
-
-# no jar? download it.
-[[ -r "$sbt_jar" ]] || acquire_sbt_jar || {
-  # still no jar? uh-oh.
-  echo "Download failed. Obtain the jar manually and place it at $sbt_jar"
-  exit 1
-}
-
-if [[ -n "$noshare" ]]; then
-  for opt in ${noshare_opts}; do
-    addJava "$opt"
-  done
-else
-  case "$sbt_version" in
-    "0.7."* | "0.10."* | "0.11."* | "0.12."* )
-      [[ -n "$sbt_dir" ]] || {
-        sbt_dir="$HOME/.sbt/$sbt_version"
-        vlog "Using $sbt_dir as sbt dir, -sbt-dir to override."
-      }
-    ;;
-  esac
-
-  if [[ -n "$sbt_dir" ]]; then
-    addJava "-Dsbt.global.base=$sbt_dir"
-  fi
-fi
-
-if [[ -r "$jvm_opts_file" ]]; then
-  vlog "Using jvm options defined in file $jvm_opts_file"
-  while read opt; do extra_jvm_opts+=("$opt"); done < <(readConfigFile "$jvm_opts_file")
-elif [[ -n "$JVM_OPTS" && ! ("$JVM_OPTS" =~ ^@.*) ]]; then
-  vlog "Using jvm options defined in \$JVM_OPTS variable"
-  extra_jvm_opts=( $JVM_OPTS )
-else
-  vlog "Using default jvm options"
-  extra_jvm_opts=( $(default_jvm_opts) )
-fi
-
-# traceLevel is 0.12+
-[[ -n "$trace_level" ]] && setTraceLevel
-
-main () {
-  execRunner "$java_cmd" \
-    "${extra_jvm_opts[@]}" \
-    "${java_args[@]}" \
-    -jar "$sbt_jar" \
-    "${sbt_commands[@]}" \
-    "${residual_args[@]}"
-}
-
-# sbt inserts this string on certain lines when formatting is enabled:
-#   val OverwriteLine = "\r\u001BM\u001B[2K"
-# ...in order not to spam the console with a million "Resolving" lines.
-# Unfortunately that makes it that much harder to work with when
-# we're not going to print those lines anyway. We strip that bit of
-# line noise, but leave the other codes to preserve color.
-mainFiltered () {
-  local ansiOverwrite='\r\x1BM\x1B[2K'
-  local excludeRegex=$(egrep -v '^#|^$' ~/.sbtignore | paste -sd'|' -)
-
-  echoLine () {
-    local line="$1"
-    local line1="$(echo "$line" | sed 's/\r\x1BM\x1B\[2K//g')"       # This strips the OverwriteLine code.
-    local line2="$(echo "$line1" | sed 's/\x1B\[[0-9;]*[JKmsu]//g')" # This strips all codes - we test regexes against this.
-
-    if [[ $line2 =~ $excludeRegex ]]; then
-      [[ -n $debugUs ]] && echo "[X] $line1"
-    else
-      [[ -n $debugUs ]] && echo "    $line1" || echo "$line1"
-    fi
-  }
-
-  echoLine "Starting sbt with output filtering enabled."
-  main | while read -r line; do echoLine "$line"; done
-}
-
-# Only filter if there's a filter file and we don't see a known interactive command.
-# Obviously this is super ad hoc but I don't know how to improve on it. Testing whether
-# stdin is a terminal is useless because most of my use cases for this filtering are
-# exactly when I'm at a terminal, running sbt non-interactively.
-shouldFilter () { [[ -f ~/.sbtignore ]] && ! egrep -q '\b(shell|console|consoleProject)\b' <<<"${residual_args[@]}"; }
-
-# run sbt
-if shouldFilter; then mainFiltered; else main; fi

http://git-wip-us.apache.org/repos/asf/geode/blob/3bc0a16d/geode-spark-connector/scalastyle-config.xml
----------------------------------------------------------------------
diff --git a/geode-spark-connector/scalastyle-config.xml b/geode-spark-connector/scalastyle-config.xml
deleted file mode 100644
index fcbfc0e..0000000
--- a/geode-spark-connector/scalastyle-config.xml
+++ /dev/null
@@ -1,117 +0,0 @@
-<scalastyle>
- <name>Scalastyle standard configuration</name>
- <check level="warning" class="org.scalastyle.file.FileTabChecker" enabled="true"></check>
- <check level="warning" class="org.scalastyle.file.FileLengthChecker" enabled="true">
-  <parameters>
-   <parameter name="maxFileLength"><![CDATA[800]]></parameter>
-  </parameters>
- </check>
- <check level="warning" class="org.scalastyle.file.HeaderMatchesChecker" enabled="false">
-  <parameters>
-   <parameter name="header"><![CDATA[// Copyright (C) 2011-2012 the original author or authors.
-// See the LICENCE.txt file distributed with this work for additional
-// information regarding copyright ownership.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.]]></parameter>
-  </parameters>
- </check>
- <check level="warning" class="org.scalastyle.scalariform.SpacesAfterPlusChecker" enabled="true"></check>
- <check level="warning" class="org.scalastyle.file.WhitespaceEndOfLineChecker" enabled="false"></check>
- <check level="warning" class="org.scalastyle.scalariform.SpacesBeforePlusChecker" enabled="true"></check>
- <check level="warning" class="org.scalastyle.file.FileLineLengthChecker" enabled="true">
-  <parameters>
-   <parameter name="maxLineLength"><![CDATA[160]]></parameter>
-   <parameter name="tabSize"><![CDATA[4]]></parameter>
-  </parameters>
- </check>
- <check level="warning" class="org.scalastyle.scalariform.ClassNamesChecker" enabled="true">
-  <parameters>
-   <parameter name="regex"><![CDATA[[A-Z][A-Za-z]*]]></parameter>
-  </parameters>
- </check>
- <check level="warning" class="org.scalastyle.scalariform.ObjectNamesChecker" enabled="true">
-  <parameters>
-   <parameter name="regex"><![CDATA[[A-Z][A-Za-z]*]]></parameter>
-  </parameters>
- </check>
- <check level="warning" class="org.scalastyle.scalariform.PackageObjectNamesChecker" enabled="true">
-  <parameters>
-   <parameter name="regex"><![CDATA[^[a-z][A-Za-z]*$]]></parameter>
-  </parameters>
- </check>
- <check level="warning" class="org.scalastyle.scalariform.EqualsHashCodeChecker" enabled="true"></check>
- <check level="warning" class="org.scalastyle.scalariform.IllegalImportsChecker" enabled="true">
-  <parameters>
-   <parameter name="illegalImports"><![CDATA[sun._,java.awt._]]></parameter>
-  </parameters>
- </check>
- <check level="warning" class="org.scalastyle.scalariform.ParameterNumberChecker" enabled="true">
-  <parameters>
-   <parameter name="maxParameters"><![CDATA[8]]></parameter>
-  </parameters>
- </check>
- <check level="warning" class="org.scalastyle.scalariform.MagicNumberChecker" enabled="true">
-  <parameters>
-   <parameter name="ignore"><![CDATA[-1,0,1,2,3]]></parameter>
-  </parameters>
- </check>
- <check level="warning" class="org.scalastyle.scalariform.NoWhitespaceBeforeLeftBracketChecker" enabled="true"></check>
- <check level="warning" class="org.scalastyle.scalariform.NoWhitespaceAfterLeftBracketChecker" enabled="true"></check>
- <check level="warning" class="org.scalastyle.scalariform.ReturnChecker" enabled="true"></check>
- <check level="warning" class="org.scalastyle.scalariform.NullChecker" enabled="false"></check>
- <check level="warning" class="org.scalastyle.scalariform.NoCloneChecker" enabled="true"></check>
- <check level="warning" class="org.scalastyle.scalariform.NoFinalizeChecker" enabled="true"></check>
- <check level="warning" class="org.scalastyle.scalariform.CovariantEqualsChecker" enabled="true"></check>
- <check level="warning" class="org.scalastyle.scalariform.StructuralTypeChecker" enabled="true"></check>
- <check level="warning" class="org.scalastyle.file.RegexChecker" enabled="true">
-  <parameters>
-   <parameter name="regex"><![CDATA[println]]></parameter>
-  </parameters>
- </check>
- <check level="warning" class="org.scalastyle.scalariform.NumberOfTypesChecker" enabled="true">
-  <parameters>
-   <parameter name="maxTypes"><![CDATA[30]]></parameter>
-  </parameters>
- </check>
- <check level="error" class="org.scalastyle.scalariform.CyclomaticComplexityChecker" enabled="true">
-  <parameters>
-   <parameter name="maximum"><![CDATA[10]]></parameter>
-  </parameters>
- </check>
- <check level="warning" class="org.scalastyle.scalariform.UppercaseLChecker" enabled="true"></check>
- <check level="warning" class="org.scalastyle.scalariform.SimplifyBooleanExpressionChecker" enabled="true"></check>
- <check level="warning" class="org.scalastyle.scalariform.IfBraceChecker" enabled="false">
-  <parameters>
-   <parameter name="singleLineAllowed"><![CDATA[true]]></parameter>
-   <parameter name="doubleLineAllowed"><![CDATA[false]]></parameter>
-  </parameters>
- </check>
- <check level="warning" class="org.scalastyle.scalariform.MethodLengthChecker" enabled="true">
-  <parameters>
-   <parameter name="maxLength"><![CDATA[50]]></parameter>
-  </parameters>
- </check>
- <check level="warning" class="org.scalastyle.scalariform.MethodNamesChecker" enabled="true">
-  <parameters>
-   <parameter name="regex"><![CDATA[^[a-z][A-Za-z0-9]*$]]></parameter>
-  </parameters>
- </check>
- <check level="warning" class="org.scalastyle.scalariform.NumberOfMethodsInTypeChecker" enabled="true">
-  <parameters>
-   <parameter name="maxMethods"><![CDATA[30]]></parameter>
-  </parameters>
- </check>
- <check level="warning" class="org.scalastyle.scalariform.PublicMethodsHaveTypeChecker" enabled="true"></check>
- <check level="warning" class="org.scalastyle.file.NewLineAtEofChecker" enabled="false"></check>
- <check level="warning" class="org.scalastyle.file.NoNewLineAtEofChecker" enabled="false"></check>
-</scalastyle>

http://git-wip-us.apache.org/repos/asf/geode/blob/3bc0a16d/gradle/rat.gradle
----------------------------------------------------------------------
diff --git a/gradle/rat.gradle b/gradle/rat.gradle
index f8018b6..10df53f 100644
--- a/gradle/rat.gradle
+++ b/gradle/rat.gradle
@@ -40,10 +40,6 @@ rat {
     '**/build-*/**',
     '.buildinfo',
 
-    // SBT
-    'geode-spark-connector/**/target/**',
-    'geode-spark-connector/sbt',
-
     // Geode examples
     'geode-examples/.idea/**',
     'geode-examples/gradlew*/**',
@@ -61,7 +57,6 @@ rat {
     '**/*.ipr',
     '**/*.iws',
     '.idea/**',
-    'geode-spark-connector/.idea/**',
     '**/tags',
     '**/out/**',
 
@@ -102,8 +97,6 @@ rat {
     'geode-book/final_app/**',
 
     // other text files
-    'geode-spark-connector/project/plugins.sbt',
-    'geode-spark-connector/project/build.properties',
     '**/log4j*.xml',
 
     // modules


Mime
View raw message