carbondata-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From chenliang...@apache.org
Subject [3/5] incubator-carbondata git commit: make carbon compilable with -Pspark-2.0
Date Fri, 25 Nov 2016 09:43:20 GMT
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/f88737d4/examples/spark/src/main/resources/dimSample.csv
----------------------------------------------------------------------
diff --git a/examples/spark/src/main/resources/dimSample.csv b/examples/spark/src/main/resources/dimSample.csv
new file mode 100644
index 0000000..0c8f27a
--- /dev/null
+++ b/examples/spark/src/main/resources/dimSample.csv
@@ -0,0 +1,21 @@
+id,name,city
+1,David,Beijing
+2,Mark,Paris
+3,Bill,NewYork
+4,Sara,Tokyo
+5,John,Beijing
+6,Michel,Chicago
+7,Robert,Houston
+8,Sunny,Boston
+9,Mary,Tokyo
+10,Edward,Paris
+11,James,Washington
+12,Maria,Berlin
+13,Adam,Athens
+14,Peter,Boston
+15,George,Paris
+16,Paul,Shanghai
+17,Lisa,Hangzhou
+18,Angel,Beijing
+19,Emily,Bangalore
+20,Kevin,Singapore
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/f88737d4/examples/spark/src/main/resources/factSample.csv
----------------------------------------------------------------------
diff --git a/examples/spark/src/main/resources/factSample.csv b/examples/spark/src/main/resources/factSample.csv
new file mode 100644
index 0000000..9693156
--- /dev/null
+++ b/examples/spark/src/main/resources/factSample.csv
@@ -0,0 +1,51 @@
+id,name,city,salary
+1,David,Beijing,15000
+1,David,Tokyo,20000
+1,David,Hangzhou,18000
+2,Mark,Paris,12000
+2,Mark,Boston,15000
+2,Mark,Chicago,18000
+3,Bill,NewYork,20000
+3,Bill,Boston,23000
+4,Sara,Tokyo,11000
+4,Sara,Paris,15000
+4,Sara,Chicago,21000
+4,Sara,Hangzhou,17000
+5,John,Beijing,15000
+5,John,Shanghai,16000
+6,Michel,Chicago,11000
+6,Michel,Boston,12000
+6,Michel,Tokyo,11000
+8,Sunny,Boston,14000
+8,Sunny,Beijing,22000
+8,Sunny,Tokyo,20000
+9,Mary,Tokyo,13000
+9,Mary,NewYork,18000
+9,Mary,Paris,16000
+9,Mary,Washington,20000
+9,Mary,Boston,17000
+10,Edward,Paris,20000
+10,Edward,Beijing,12000
+10,Edward,Berlin,15000
+11,James,Washington,16000
+12,Maria,Berlin,15000
+12,Maria,Beijing,16000
+13,Adam,Athens,21000
+13,Adam,Berlin,18000
+13,Adam,Hangzhou,17000
+14,Peter,Boston,20000
+14,Peter,Berlin,21000
+14,Peter,Shanghai,18000
+15,George,Paris,17000
+15,George,Tokyo,12000
+15,George,Beijing,15000
+15,George,Berlin,18000
+16,Paul,Shanghai,22000
+16,Paul,Tokyo,19000
+16,Paul,Paris,24000
+16,Paul,Hangzhou,22000
+18,Angel,Beijing,22000
+18,Angel,NewYork,25000
+18,Angel,Tokyo,22000
+20,Kevin,Singapore,18000
+20,Kevin,Bangalore,16000
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/f88737d4/examples/spark/src/main/scala/org/apache/carbondata/examples/AllDictionaryExample.scala
----------------------------------------------------------------------
diff --git a/examples/spark/src/main/scala/org/apache/carbondata/examples/AllDictionaryExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/AllDictionaryExample.scala
new file mode 100644
index 0000000..9fecadb
--- /dev/null
+++ b/examples/spark/src/main/scala/org/apache/carbondata/examples/AllDictionaryExample.scala
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.carbondata.examples
+
+import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.util.CarbonProperties
+import org.apache.carbondata.examples.util.{AllDictionaryUtil, ExampleUtils}
+
+object AllDictionaryExample {
+
+  def main(args: Array[String]) {
+    val cc = ExampleUtils.createCarbonContext("CarbonExample")
+    val testData = ExampleUtils.currentPath + "/src/main/resources/data.csv"
+    val csvHeader = "ID,date,country,name,phonetype,serialname,salary"
+    val dictCol = "|date|country|name|phonetype|serialname|"
+    val allDictFile = ExampleUtils.currentPath + "/src/main/resources/data.dictionary"
+    // extract all dictionary files from source data
+    AllDictionaryUtil.extractDictionary(cc.sparkContext,
+      testData, allDictFile, csvHeader, dictCol)
+    // Specify timestamp format based on raw data
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy/MM/dd")
+
+    cc.sql("DROP TABLE IF EXISTS t3")
+
+    cc.sql("""
+           CREATE TABLE IF NOT EXISTS t3
+           (ID Int, date Timestamp, country String,
+           name String, phonetype String, serialname String, salary Int)
+           STORED BY 'carbondata'
+           """)
+
+    cc.sql(s"""
+           LOAD DATA LOCAL INPATH '$testData' into table t3
+           options('ALL_DICTIONARY_PATH'='$allDictFile')
+           """)
+
+    cc.sql("""
+           SELECT * FROM t3
+           """).show()
+
+    cc.sql("DROP TABLE IF EXISTS t3")
+
+    // clean local dictionary files
+    AllDictionaryUtil.cleanDictionary(allDictFile)
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/f88737d4/examples/spark/src/main/scala/org/apache/carbondata/examples/AlluxioExample.scala
----------------------------------------------------------------------
diff --git a/examples/spark/src/main/scala/org/apache/carbondata/examples/AlluxioExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/AlluxioExample.scala
new file mode 100644
index 0000000..17fe960
--- /dev/null
+++ b/examples/spark/src/main/scala/org/apache/carbondata/examples/AlluxioExample.scala
@@ -0,0 +1,65 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.examples
+
+import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.datastorage.store.impl.FileFactory
+import org.apache.carbondata.core.util.CarbonProperties
+import org.apache.carbondata.examples.util.ExampleUtils
+
+/**
+ * configure alluxio:
+ * 1.start alluxio
+ * 2.upload the jar :"/alluxio_path/core/client/target/
+ * alluxio-core-client-YOUR-VERSION-jar-with-dependencies.jar"
+ * 3.Get more detail at:http://www.alluxio.org/docs/master/en/Running-Spark-on-Alluxio.html
+ */
+
+object AlluxioExample {
+  def main(args: Array[String]) {
+    val cc = ExampleUtils.createCarbonContext("AlluxioExample")
+    cc.sparkContext.hadoopConfiguration.set("fs.alluxio.impl", "alluxio.hadoop.FileSystem")
+    FileFactory.getConfiguration.set("fs.alluxio.impl", "alluxio.hadoop.FileSystem")
+
+    // Specify timestamp format based on raw data
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy/MM/dd")
+
+    cc.sql("DROP TABLE IF EXISTS t3")
+
+    cc.sql("""
+           CREATE TABLE IF NOT EXISTS t3
+           (ID Int, date Timestamp, country String,
+           name String, phonetype String, serialname String, salary Int)
+           STORED BY 'carbondata'
+           """)
+
+    cc.sql(s"""
+           LOAD DATA LOCAL INPATH 'alluxio://localhost:19998/data.csv' into table t3
+           """)
+
+    cc.sql("""
+           SELECT country, count(salary) AS amount
+           FROM t3
+           WHERE country IN ('china','france')
+           GROUP BY country
+           """).show()
+
+    cc.sql("DROP TABLE IF EXISTS t3")
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/f88737d4/examples/spark/src/main/scala/org/apache/carbondata/examples/CarbonExample.scala
----------------------------------------------------------------------
diff --git a/examples/spark/src/main/scala/org/apache/carbondata/examples/CarbonExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/CarbonExample.scala
new file mode 100644
index 0000000..f98d46d
--- /dev/null
+++ b/examples/spark/src/main/scala/org/apache/carbondata/examples/CarbonExample.scala
@@ -0,0 +1,78 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.examples
+
+import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.util.CarbonProperties
+import org.apache.carbondata.examples.util.ExampleUtils
+
+object CarbonExample {
+
+  def main(args: Array[String]) {
+    val cc = ExampleUtils.createCarbonContext("CarbonExample")
+    val testData = ExampleUtils.currentPath + "/src/main/resources/data.csv"
+
+    // Specify timestamp format based on raw data
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy/MM/dd")
+
+    cc.sql("DROP TABLE IF EXISTS t3")
+
+    // Create table, 6 dimensions, 1 measure
+    cc.sql("""
+           CREATE TABLE IF NOT EXISTS t3
+           (ID Int, date Timestamp, country String,
+           name String, phonetype String, serialname String, salary Int)
+           STORED BY 'carbondata'
+           """)
+
+    // Currently there are two data loading flows in CarbonData, one uses Kettle as ETL tool
+    // in each node to do data loading, another uses a multi-thread framework without Kettle (See
+    // AbstractDataLoadProcessorStep)
+    // Load data with Kettle
+    cc.sql(s"""
+           LOAD DATA LOCAL INPATH '$testData' into table t3
+           """)
+
+    // Perform a query
+    cc.sql("""
+           SELECT country, count(salary) AS amount
+           FROM t3
+           WHERE country IN ('china','france')
+           GROUP BY country
+           """).show()
+
+    // Load data without kettle
+    cc.sql(s"""
+           LOAD DATA LOCAL INPATH '$testData' into table t3
+           OPTIONS('USE_KETTLE'='false')
+           """)
+
+    // Perform a query
+    cc.sql("""
+           SELECT country, count(salary) AS amount
+           FROM t3
+           WHERE country IN ('china','france')
+           GROUP BY country
+           """).show()
+
+    // Drop table
+    cc.sql("DROP TABLE IF EXISTS t3")
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/f88737d4/examples/spark/src/main/scala/org/apache/carbondata/examples/CaseClassDataFrameAPIExample.scala
----------------------------------------------------------------------
diff --git a/examples/spark/src/main/scala/org/apache/carbondata/examples/CaseClassDataFrameAPIExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/CaseClassDataFrameAPIExample.scala
new file mode 100644
index 0000000..21bd002
--- /dev/null
+++ b/examples/spark/src/main/scala/org/apache/carbondata/examples/CaseClassDataFrameAPIExample.scala
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.examples
+
+import org.apache.spark.rdd.RDD
+import org.apache.spark.sql.{DataFrame, SaveMode}
+
+import org.apache.carbondata.examples.util.ExampleUtils
+
+case class People(name: String, occupation: String, id: Int)
+
+object CaseClassDataFrameAPIExample {
+
+  def main(args: Array[String]) {
+    val cc = ExampleUtils.createCarbonContext("CaseClassDataFrameAPIExample")
+    import cc.implicits._
+
+    val people = List(People("sangeeta", "engineer", 1), People("pallavi", "consultant", 2))
+    val peopleRDD: RDD[People] = cc.sc.parallelize(people)
+    val peopleDF: DataFrame = peopleRDD.toDF("name", "occupation", "id")
+
+    // writing data to carbon table
+    peopleDF.write
+      .format("carbondata")
+      .option("tableName", "carbon2")
+      .option("compress", "true")
+      .mode(SaveMode.Overwrite)
+      .save()
+
+    cc.sql("SELECT * FROM carbon2").show()
+
+    cc.sql("DROP TABLE IF EXISTS carbon2")
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/f88737d4/examples/spark/src/main/scala/org/apache/carbondata/examples/ComplexTypeExample.scala
----------------------------------------------------------------------
diff --git a/examples/spark/src/main/scala/org/apache/carbondata/examples/ComplexTypeExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/ComplexTypeExample.scala
new file mode 100644
index 0000000..992c3f9
--- /dev/null
+++ b/examples/spark/src/main/scala/org/apache/carbondata/examples/ComplexTypeExample.scala
@@ -0,0 +1,78 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.examples
+
+import org.apache.carbondata.examples.util.ExampleUtils
+
+/**
+ * Carbon supports the complex types ARRAY and STRUCT.
+ * The complex type columns can be used with all SQL clauses.
+ */
+object ComplexTypeExample {
+
+  def main(args: Array[String]) {
+    val cc = ExampleUtils.createCarbonContext("ComplexTypeExample")
+    val dataPath = ExampleUtils.currentPath + "/src/main/resources/complexdata.csv"
+    val tableName = "complexTypeTable"
+
+    cc.sql(s"DROP TABLE IF EXISTS $tableName")
+    cc.sql(s"""CREATE TABLE $tableName (
+                 deviceInformationId int,
+                 channelsId string,
+                 ROMSize string,
+                 purchasedate string,
+                 mobile struct<imei:string,
+                              imsi:string>,
+                 MAC array<string>,
+                 locationinfo array<struct<ActiveAreaId:int,
+                                           ActiveCountry:string,
+                                           ActiveProvince:string,
+                                           Activecity:string,
+                                           ActiveDistrict:string,
+                                           ActiveStreet:string>>,
+                  proddate struct<productionDate: string,
+                                 activeDeactivedate: array<string>>,
+                  gamePointId double,
+                  contractNumber double)
+              STORED BY 'org.apache.carbondata.format' """)
+
+    cc.sql(s"load data local inpath '$dataPath' into table $tableName " +
+      "options ('COMPLEX_DELIMITER_LEVEL_1'='$', 'COMPLEX_DELIMITER_LEVEL_2'=':')")
+
+    // filter on complex ARRAY type with index filter
+    cc.sql(s"SELECT mobile, proddate.activeDeactivedate, MAC[0] FROM $tableName " +
+      "WHERE MAC[0] LIKE 'MAC1%'").show
+
+    // filter on complex STRUCT type
+    cc.sql(s"SELECT mobile, proddate.activeDeactivedate FROM $tableName " +
+      "WHERE mobile.imei = '1AA1' or mobile.imsi = ''").show
+
+    // filter on complex STRUCT<ARRAY>
+    cc.sql(s"SELECT mobile, proddate.activeDeactivedate[0] FROM $tableName " +
+      "WHERE proddate.activeDeactivedate[0] = '29-11-2015'").show
+
+    // filter on complex ARRAY<STRUCT>
+    cc.sql(s"SELECT mobile, locationinfo[0] FROM $tableName " +
+      "WHERE locationinfo[0].ActiveCountry = 'Chinese'").show
+
+    // complex type aggregation and group by complex type
+    cc.sql(s"SELECT mobile, count(proddate) FROM $tableName GROUP BY mobile").show
+
+    cc.sql(s"DROP TABLE IF EXISTS $tableName")
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/f88737d4/examples/spark/src/main/scala/org/apache/carbondata/examples/DataFrameAPIExample.scala
----------------------------------------------------------------------
diff --git a/examples/spark/src/main/scala/org/apache/carbondata/examples/DataFrameAPIExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/DataFrameAPIExample.scala
new file mode 100644
index 0000000..49fb0da
--- /dev/null
+++ b/examples/spark/src/main/scala/org/apache/carbondata/examples/DataFrameAPIExample.scala
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.examples
+
+import org.apache.carbondata.examples.util.ExampleUtils
+
+// scalastyle:off println
+object DataFrameAPIExample {
+
+  def main(args: Array[String]) {
+    val cc = ExampleUtils.createCarbonContext("DataFrameAPIExample")
+    ExampleUtils.writeSampleCarbonFile(cc, "carbon1")
+
+    // use datasource api to read
+    val in = cc.read
+      .format("carbondata")
+      .option("tableName", "carbon1")
+      .load()
+
+    import cc.implicits._
+    var count = in.where($"c3" > 500).select($"*").count()
+    println(s"count after 1 load: $count")
+
+    // append new data, query answer should be 1000
+    ExampleUtils.appendSampleCarbonFile(cc, "carbon1")
+    count = in.where($"c3" > 500).select($"*").count()
+    println(s"count after 2 load: $count")
+
+    // use SQL to read
+    cc.sql("SELECT count(*) FROM carbon1 WHERE c3 > 500").show
+    cc.sql("DROP TABLE IF EXISTS carbon1")
+  }
+}
+// scalastyle:on println

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/f88737d4/examples/spark/src/main/scala/org/apache/carbondata/examples/DataManagementExample.scala
----------------------------------------------------------------------
diff --git a/examples/spark/src/main/scala/org/apache/carbondata/examples/DataManagementExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/DataManagementExample.scala
new file mode 100644
index 0000000..4552e06
--- /dev/null
+++ b/examples/spark/src/main/scala/org/apache/carbondata/examples/DataManagementExample.scala
@@ -0,0 +1,78 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.examples
+
+import org.apache.carbondata.examples.util.ExampleUtils
+
+object DataManagementExample {
+  def main(args: Array[String]) {
+    val cc = ExampleUtils.createCarbonContext("DataManagementExample")
+
+    cc.sql("DROP TABLE IF EXISTS t3")
+
+    // create a table using CarbonData
+    cc.sql(
+      """
+           CREATE TABLE IF NOT EXISTS t3
+           (ID Int, date Timestamp, country String,
+           name String, phonetype String, serialname String, salary Int)
+           STORED BY 'carbondata'
+      """
+    )
+
+    // data.csv has 1000 lines
+    val testData = ExampleUtils.currentPath + "/src/main/resources/data.csv"
+
+    // load data 5 times, each load of data is called a segment in CarbonData
+    (1 to 5).map { i =>
+      cc.sql(s"LOAD DATA LOCAL INPATH '$testData' into table t3")
+    }
+    cc.sql("SHOW SEGMENTS FOR TABLE t3 ").show
+
+    // delete the first segment
+    cc.sql("DELETE SEGMENT 0 FROM TABLE t3")
+    cc.sql("SHOW SEGMENTS FOR TABLE t3 LIMIT 10").show
+
+    // this query will be executed on last 4 segments, it should return 4000 rows
+    cc.sql("SELECT count(*) AS amount FROM t3").show
+
+    // force a major compaction to compact all segments into one
+    cc.sql("ALTER TABLE t3 COMPACT 'MAJOR' ")
+    cc.sql("SHOW SEGMENTS FOR TABLE t3 LIMIT 10").show
+
+    // load again, add another 1000 rows
+    cc.sql(s"LOAD DATA LOCAL INPATH '$testData' into table t3")
+    cc.sql("SHOW SEGMENTS FOR TABLE t3 LIMIT 10").show
+
+    // this query will be executed on 2 segments, it should return 5000 rows
+    cc.sql("SELECT count(*) AS amount FROM t3").show
+
+    // delete all segments whose loading time is before '2099-01-01 01:00:00'
+    cc.sql("DELETE SEGMENTS FROM TABLE t3 WHERE STARTTIME BEFORE '2099-01-01 01:00:00'")
+    cc.sql("SHOW SEGMENTS FOR TABLE t3 ").show
+
+    // this query will be executed on 0 segments, it should return 0 rows
+    cc.sql("SELECT count(*) AS amount FROM t3").show
+
+    // force clean up all 'MARKED_FOR_DELETE' and 'COMPACTED' segments immediately
+    cc.sql("CLEAN FILES FOR TABLE t3")
+    cc.sql("SHOW SEGMENTS FOR TABLE t3").show
+
+    cc.sql("DROP TABLE IF EXISTS t3")
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/f88737d4/examples/spark/src/main/scala/org/apache/carbondata/examples/DatasourceExample.scala
----------------------------------------------------------------------
diff --git a/examples/spark/src/main/scala/org/apache/carbondata/examples/DatasourceExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/DatasourceExample.scala
new file mode 100644
index 0000000..791a126
--- /dev/null
+++ b/examples/spark/src/main/scala/org/apache/carbondata/examples/DatasourceExample.scala
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.examples
+
+import org.apache.spark.sql.{SaveMode, SQLContext}
+
+import org.apache.carbondata.examples.util.ExampleUtils
+
+object DatasourceExample {
+
+  def main(args: Array[String]) {
+    // use CarbonContext to write CarbonData files
+    val cc = ExampleUtils.createCarbonContext("DatasourceExample")
+    ExampleUtils.writeSampleCarbonFile(cc, "table1")
+
+    // Use SQLContext to read CarbonData files
+    val sqlContext = new SQLContext(cc.sparkContext)
+    sqlContext.sql(
+      s"""
+        | CREATE TEMPORARY TABLE source
+        | USING org.apache.spark.sql.CarbonSource
+        | OPTIONS (path '${cc.storePath}/default/table1')
+      """.stripMargin)
+    sqlContext.sql("SELECT c1, c2, count(*) FROM source WHERE c3 > 100 GROUP BY c1, c2").show
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/f88737d4/examples/spark/src/main/scala/org/apache/carbondata/examples/DirectSQLExample.scala
----------------------------------------------------------------------
diff --git a/examples/spark/src/main/scala/org/apache/carbondata/examples/DirectSQLExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/DirectSQLExample.scala
new file mode 100644
index 0000000..2553e8c
--- /dev/null
+++ b/examples/spark/src/main/scala/org/apache/carbondata/examples/DirectSQLExample.scala
@@ -0,0 +1,44 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.examples
+
+import org.apache.spark.sql.SQLContext
+
+import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.examples.util.ExampleUtils
+
+/**
+ * This example needs Spark 1.6 or later version to run
+ */
+object DirectSQLExample {
+
+  def main(args: Array[String]) {
+    val cc = ExampleUtils.createCarbonContext("DatasourceExample")
+    ExampleUtils.writeSampleCarbonFile(cc, "table1")
+
+    // Use SQLContext to read CarbonData files without creating table
+    val sqlContext = new SQLContext(cc.sparkContext)
+    sqlContext.sql(
+      s"""
+        | SELECT c1, c2, count(*)
+        | FROM carbondata.`${cc.storePath}/${CarbonCommonConstants.DATABASE_DEFAULT_NAME}/table1`
+        | WHERE c3 > 100
+        | GROUP BY c1, c2
+      """.stripMargin).show
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/f88737d4/examples/spark/src/main/scala/org/apache/carbondata/examples/GenerateDictionaryExample.scala
----------------------------------------------------------------------
diff --git a/examples/spark/src/main/scala/org/apache/carbondata/examples/GenerateDictionaryExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/GenerateDictionaryExample.scala
new file mode 100644
index 0000000..2d7aed0
--- /dev/null
+++ b/examples/spark/src/main/scala/org/apache/carbondata/examples/GenerateDictionaryExample.scala
@@ -0,0 +1,95 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.examples
+
+import org.apache.spark.sql.{CarbonContext, CarbonEnv, CarbonRelation}
+
+import org.apache.carbondata.core.cache.dictionary.DictionaryColumnUniqueIdentifier
+import org.apache.carbondata.core.carbon.{CarbonTableIdentifier, ColumnIdentifier}
+import org.apache.carbondata.core.carbon.metadata.schema.table.column.CarbonDimension
+import org.apache.carbondata.core.carbon.path.CarbonStorePath
+import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.examples.util.ExampleUtils
+import org.apache.carbondata.spark.load.CarbonLoaderUtil
+
+/**
+ * example for global dictionary generation
+ * pls check files under directory of target/store/default/dictSample/Metadata
+ * and verify global dictionary values
+ */
+object GenerateDictionaryExample {
+
+  def main(args: Array[String]) {
+    val cc = ExampleUtils.createCarbonContext("GenerateDictionaryExample")
+    val factFilePath = ExampleUtils.currentPath + "/src/main/resources/factSample.csv"
+    val carbonTablePath = CarbonStorePath.getCarbonTablePath(ExampleUtils.storeLocation,
+      new CarbonTableIdentifier(CarbonCommonConstants.DATABASE_DEFAULT_NAME, "dictSample", "1"))
+    val dictFolderPath = carbonTablePath.getMetadataDirectoryPath
+
+    // execute sql statement
+    cc.sql("DROP TABLE IF EXISTS dictSample")
+
+    cc.sql("""
+           CREATE TABLE IF NOT EXISTS dictSample(id Int, name String, city String, salary Int)
+           STORED BY 'org.apache.carbondata.format'
+           """)
+
+    cc.sql(s"""
+           LOAD DATA LOCAL INPATH '$factFilePath' INTO TABLE dictSample
+           """)
+
+    // check generated dictionary
+    val tableIdentifier =
+      new CarbonTableIdentifier(CarbonCommonConstants.DATABASE_DEFAULT_NAME, "dictSample", "1")
+    printDictionary(cc, tableIdentifier, dictFolderPath)
+  }
+
+  def printDictionary(cc: CarbonContext, carbonTableIdentifier: CarbonTableIdentifier,
+                      dictFolderPath: String) {
+    val dataBaseName = carbonTableIdentifier.getDatabaseName
+    val tableName = carbonTableIdentifier.getTableName
+    val carbonRelation = CarbonEnv.getInstance(cc).carbonCatalog.
+      lookupRelation1(Option(dataBaseName),
+        tableName) (cc).asInstanceOf[CarbonRelation]
+    val carbonTable = carbonRelation.tableMeta.carbonTable
+    val dimensions = carbonTable.getDimensionByTableName(tableName.toLowerCase())
+      .toArray.map(_.asInstanceOf[CarbonDimension])
+    // scalastyle:off println
+    // print dictionary information
+    println("**********************************************************************************")
+    println(s"table:$tableName in " + s"database:$dataBaseName")
+    for (dimension <- dimensions) {
+      println("**********************************************************************************")
+      println(s"dictionary of dimension: ${dimension.getColName}")
+      println(s"Key\t\t\tValue")
+      val columnIdentifier = new DictionaryColumnUniqueIdentifier(carbonTableIdentifier,
+        dimension.getColumnIdentifier, dimension.getDataType)
+      val dict = CarbonLoaderUtil.getDictionary(columnIdentifier, cc.storePath)
+      var index: Int = 1
+      var distinctValue = dict.getDictionaryValueForKey(index)
+      while (distinctValue != null) {
+        println(index + s"\t\t\t" + distinctValue)
+        index += 1
+        distinctValue = dict.getDictionaryValueForKey(index)
+      }
+    }
+    println("**********************************************************************************")
+    // scalastyle:on println
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/f88737d4/examples/spark/src/main/scala/org/apache/carbondata/examples/HadoopFileExample.scala
----------------------------------------------------------------------
diff --git a/examples/spark/src/main/scala/org/apache/carbondata/examples/HadoopFileExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/HadoopFileExample.scala
new file mode 100644
index 0000000..329b3c9
--- /dev/null
+++ b/examples/spark/src/main/scala/org/apache/carbondata/examples/HadoopFileExample.scala
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.examples
+
+import org.apache.carbondata.examples.util.ExampleUtils
+import org.apache.carbondata.hadoop.CarbonInputFormat
+
+// scalastyle:off println
+object HadoopFileExample {
+
+  def main(args: Array[String]): Unit = {
+    val cc = ExampleUtils.createCarbonContext("DataFrameAPIExample")
+    ExampleUtils.writeSampleCarbonFile(cc, "carbon1")
+
+    val sc = cc.sparkContext
+    val input = sc.newAPIHadoopFile(s"${cc.storePath}/default/carbon1",
+      classOf[CarbonInputFormat[Array[Object]]],
+      classOf[Void],
+      classOf[Array[Object]])
+    val result = input.map(x => x._2.toList).collect
+    result.foreach(x => println(x.mkString(", ")))
+  }
+}
+// scalastyle:on println
+

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/f88737d4/examples/spark/src/main/scala/org/apache/carbondata/examples/PerfTest.scala
----------------------------------------------------------------------
diff --git a/examples/spark/src/main/scala/org/apache/carbondata/examples/PerfTest.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/PerfTest.scala
new file mode 100644
index 0000000..b1f6b24
--- /dev/null
+++ b/examples/spark/src/main/scala/org/apache/carbondata/examples/PerfTest.scala
@@ -0,0 +1,330 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.examples
+
+import java.io.File
+
+import scala.util.Random
+
+import org.apache.spark.sql.{CarbonContext, DataFrame, Row, SaveMode, SQLContext}
+import org.apache.spark.sql.types.{DataTypes, StructType}
+
+import org.apache.carbondata.examples.PerfTest._
+import org.apache.carbondata.examples.util.ExampleUtils
+
+// scalastyle:off println
+
+/**
+ * represent one query
+ */
+class Query(val queryType: String, val queryNo: Int, val sqlString: String) {
+
+  /**
+   * run the query in a batch and calculate average time
+   *
+   * @param sqlContext context to run the query
+   * @param runs run how many time
+   * @param datasource datasource to run
+   */
+  def run(sqlContext: SQLContext, runs: Int, datasource: String): QueryResult = {
+    // run repeated and calculate average time elapsed
+    require(runs >= 1)
+    val sqlToRun = makeSQLString(datasource)
+
+    val firstTime = withTime {
+      sqlContext.sql(sqlToRun).collect
+    }
+
+    var totalTime: Long = 0
+    var result: Array[Row] = null
+    (1 to (runs - 1)).foreach { x =>
+      totalTime += withTime {
+        result = sqlContext.sql(sqlToRun).collect
+      }
+    }
+
+    val avgTime = totalTime / (runs - 1)
+    QueryResult(datasource, result, avgTime, firstTime)
+  }
+
+  private def makeSQLString(datasource: String): String = {
+    sqlString.replaceFirst("tableName", PerfTest.makeTableName(datasource))
+  }
+
+}
+
+/**
+ * query performance result
+ */
+case class QueryResult(datasource: String, result: Array[Row], avgTime: Long, firstTime: Long)
+
+class QueryRunner(sqlContext: SQLContext, dataFrame: DataFrame, datasources: Seq[String]) {
+
+  /**
+   * run a query on each datasource
+   */
+  def run(query: Query, runs: Int): Seq[QueryResult] = {
+    var results = Seq[QueryResult]()
+    datasources.foreach { datasource =>
+      val result = query.run(sqlContext, runs, datasource)
+      results :+= result
+    }
+    checkResult(results)
+    results
+  }
+
+  private def checkResult(results: Seq[QueryResult]): Unit = {
+    results.foldLeft(results.head) { (last, cur) =>
+      if (last.result.sortBy(_.toString()).sameElements(cur.result.sortBy(_.toString()))) cur
+      else sys.error(s"result is not the same between " +
+          s"${last.datasource} and " +
+          s"${cur.datasource}")
+    }
+  }
+
+  private def loadToNative(datasource: String): Unit = {
+    val savePath = PerfTest.savePath(datasource)
+    println(s"loading data into $datasource, path: $savePath")
+    dataFrame.write
+        .mode(SaveMode.Overwrite)
+        .format(datasource)
+        .save(savePath)
+    sqlContext.read
+        .format(datasource)
+        .load(savePath)
+        .registerTempTable(PerfTest.makeTableName(datasource))
+  }
+
+  /**
+   * load data to each datasource
+   */
+  def loadData: Seq[QueryResult] = {
+    // load data into all datasources
+    var results = Seq[QueryResult]()
+    datasources.foreach { datasource =>
+      val time = withTime {
+        datasource match {
+          case "parquet" =>
+            dataFrame.sqlContext.setConf(s"spark.sql.$datasource.compression.codec", "snappy")
+            loadToNative(datasource)
+          case "orc" =>
+            dataFrame.sqlContext.sparkContext.hadoopConfiguration.set("orc.compress", "SNAPPY")
+            loadToNative(datasource)
+          case "carbon" =>
+            sqlContext.sql(s"DROP TABLE IF EXISTS ${PerfTest.makeTableName(datasource)}")
+            println(s"loading data into $datasource, path: " +
+                s"${dataFrame.sqlContext.asInstanceOf[CarbonContext].storePath}")
+            dataFrame.write
+                .format("org.apache.spark.sql.CarbonSource")
+                .option("tableName", PerfTest.makeTableName(datasource))
+                .mode(SaveMode.Overwrite)
+                .save()
+          case _ => sys.error("unsupported data source")
+        }
+      }
+      println(s"load data into $datasource completed, time taken ${time/1000000}ms")
+      results :+= QueryResult(datasource, null, time, time)
+    }
+    results
+  }
+
+  def shutDown(): Unit = {
+    // drop all tables and temp files
+    datasources.foreach { datasource =>
+      datasource match {
+        case "parquet" | "orc" =>
+          val f = new File(PerfTest.savePath(datasource))
+          if (f.exists()) f.delete()
+        case "carbon" =>
+          sqlContext.sql(s"DROP TABLE IF EXISTS ${PerfTest.makeTableName("carbon")}")
+        case _ => sys.error("unsupported data source")
+      }
+    }
+  }
+}
+
+/**
+ * template for table data generation
+ *
+ * @param dimension number of dimension columns and their cardinality
+ * @param measure number of measure columns
+ */
+case class TableTemplate(dimension: Seq[(Int, Int)], measure: Int)
+
+/**
+ * utility to generate random data according to template
+ */
+class TableGenerator(sqlContext: SQLContext) {
+
+  /**
+   * generate a dataframe from random data
+   */
+  def genDataFrame(template: TableTemplate, rows: Int): DataFrame = {
+    val measures = template.measure
+    val dimensions = template.dimension.foldLeft(0) {(x, y) => x + y._1}
+    val cardinality = template.dimension.foldLeft(Seq[Int]()) {(x, y) =>
+      x ++ (1 to y._1).map(z => y._2)
+    }
+    print(s"generating data: $rows rows of $dimensions dimensions and $measures measures. ")
+    println("cardinality for each dimension: " + cardinality.mkString(", "))
+
+    val dimensionFields = (1 to dimensions).map { id =>
+      DataTypes.createStructField(s"c$id", DataTypes.StringType, false)
+    }
+    val measureFields = (dimensions + 1 to dimensions + measures).map { id =>
+      DataTypes.createStructField(s"c$id", DataTypes.IntegerType, false)
+    }
+    val schema = StructType(dimensionFields ++ measureFields)
+    val data = sqlContext.sparkContext.parallelize(1 to rows).map { x =>
+      val random = new Random()
+      val dimSeq = (1 to dimensions).map { y =>
+        s"P${y}_${random.nextInt(cardinality(y - 1))}"
+      }
+      val msrSeq = (1 to measures).map { y =>
+        random.nextInt(10)
+      }
+      Row.fromSeq(dimSeq ++ msrSeq)
+    }
+    val df = sqlContext.createDataFrame(data, schema)
+    df.write.mode(SaveMode.Overwrite).parquet(PerfTest.savePath("temp"))
+    sqlContext.parquetFile(PerfTest.savePath("temp"))
+  }
+}
+
+object PerfTest {
+
+  private val olap: Seq[String] = Seq(
+    """SELECT c3, c4, sum(c8) FROM tableName
+      |WHERE c1 = 'P1_23' and c2 = 'P2_43'
+      |GROUP BY c3, c4""".stripMargin,
+
+    """SELECT c2, c3, sum(c9) FROM tableName
+      |WHERE c1 = 'P1_432' and c4 = 'P4_3' and c5 = 'P5_2'
+      |GROUP by c2, c3 """.stripMargin,
+
+    """SELECT c2, count(distinct c1), sum(c8) FROM tableName
+      |WHERE c3="P3_4" and c5="P5_4"
+      |GROUP BY c2 """.stripMargin,
+
+    """SELECT c2, c5, count(distinct c1), sum(c7) FROM tableName
+      |WHERE c4="P4_4" and c5="P5_7" and c8>4
+      |GROUP BY c2, c5 """.stripMargin
+  )
+
+  private val point: Seq[String] = Seq(
+    """SELECT c4 FROM tableName
+      |WHERE c1="P1_43" """.stripMargin,
+
+    """SELECT c3 FROM tableName
+      |WHERE c1="P1_542" and c2="P2_23" """.stripMargin,
+
+    """SELECT c3, c5 FROM tableName
+      |WHERE c1="P1_52" and c7=4""".stripMargin,
+
+    """SELECT c4, c9 FROM tableName
+      |WHERE c1="P1_43" and c8<3""".stripMargin
+  )
+
+  private val filter: Seq[String] = Seq(
+    """SELECT * FROM tableName
+      |WHERE c2="P2_43" """.stripMargin,
+
+    """SELECT * FROM tableName
+      |WHERE c3="P3_3"  """.stripMargin,
+
+    """SELECT * FROM tableName
+      |WHERE c2="P2_32" and c3="P3_23" """.stripMargin,
+
+    """SELECT * FROM tableName
+      |WHERE c3="P3_28" and c4="P4_3" """.stripMargin
+  )
+
+  private val scan: Seq[String] = Seq(
+    """SELECT sum(c7), sum(c8), avg(c9), max(c10) FROM tableName """.stripMargin,
+
+    """SELECT sum(c7) FROM tableName
+      |WHERE c2="P2_32" """.stripMargin,
+
+    """SELECT sum(c7), sum(c8), sum(9), sum(c10) FROM tableName
+      |WHERE c4="P4_4" """.stripMargin,
+
+    """SELECT sum(c7), sum(c8), sum(9), sum(c10) FROM tableName
+      |WHERE c2="P2_75" and c6<5 """.stripMargin
+  )
+
+  def main(args: Array[String]) {
+    val cc = ExampleUtils.createCarbonContext("PerfTest")
+
+    // prepare performance queries
+    var workload = Seq[Query]()
+    olap.zipWithIndex.foreach(x => workload :+= new Query("OLAP Query", x._2, x._1))
+    point.zipWithIndex.foreach(x => workload :+= new Query("Point Query", x._2, x._1))
+    filter.zipWithIndex.foreach(x => workload :+= new Query("Filter Query", x._2, x._1))
+    scan.zipWithIndex.foreach(x => workload :+= new Query("Scan Query", x._2, x._1))
+
+    // prepare data
+    val rows = 3 * 1000 * 1000
+    val dimension = Seq((1, 1 * 1000), (1, 100), (1, 50), (2, 10)) // cardinality for each column
+    val measure = 5 // number of measure
+    val template = TableTemplate(dimension, measure)
+    val df = new TableGenerator(cc).genDataFrame(template, rows)
+    println("generate data completed")
+
+    // run all queries against all data sources
+    val datasource = Seq("parquet", "orc", "carbon")
+    val runner = new QueryRunner(cc, df, datasource)
+
+    val results = runner.loadData
+    println(s"load performance: ${results.map(_.avgTime / 1000000L).mkString(", ")}")
+
+    var parquetTime: Double = 0
+    var orcTime: Double = 0
+    var carbonTime: Double = 0
+
+    println(s"query id: ${datasource.mkString(", ")}, result in millisecond")
+    workload.foreach { query =>
+      // run 4 times each round, will print performance of first run and avg time of last 3 runs
+      print(s"${query.queryType} ${query.queryNo}: ")
+      val results = runner.run(query, 4)
+      print(s"${results.map(_.avgTime / 1000000L).mkString(", ")} ")
+      println(s"[sql: ${query.sqlString.replace('\n', ' ')}]")
+      parquetTime += results(0).avgTime
+      orcTime += results(1).avgTime
+      carbonTime += results(2).avgTime
+    }
+
+    println(s"Total time: ${parquetTime / 1000000}, ${orcTime / 1000000}, " +
+        s"${carbonTime / 1000000} = 1 : ${parquetTime / orcTime} : ${parquetTime / carbonTime}")
+    runner.shutDown()
+  }
+
+  def makeTableName(datasource: String): String = {
+    s"${datasource}_perftest_table"
+  }
+
+  def savePath(datasource: String): String =
+      s"${ExampleUtils.currentPath}/target/perftest/${datasource}"
+
+  def withTime(body: => Unit): Long = {
+    val start = System.nanoTime()
+    body
+    System.nanoTime() - start
+  }
+
+}
+// scalastyle:on println

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/f88737d4/examples/spark/src/main/scala/org/apache/carbondata/examples/util/AllDictionaryUtil.scala
----------------------------------------------------------------------
diff --git a/examples/spark/src/main/scala/org/apache/carbondata/examples/util/AllDictionaryUtil.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/util/AllDictionaryUtil.scala
new file mode 100644
index 0000000..6011bcb
--- /dev/null
+++ b/examples/spark/src/main/scala/org/apache/carbondata/examples/util/AllDictionaryUtil.scala
@@ -0,0 +1,109 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.examples.util
+
+import java.io.DataOutputStream
+
+import scala.collection.mutable.{ArrayBuffer, HashSet}
+
+import org.apache.spark.SparkContext
+
+import org.apache.carbondata.common.logging.LogServiceFactory
+import org.apache.carbondata.core.datastorage.store.impl.FileFactory
+
+object AllDictionaryUtil {
+  private val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
+  def extractDictionary(sc: SparkContext,
+                        srcData: String,
+                        outputPath: String,
+                        fileHeader: String,
+                        dictCol: String): Unit = {
+    val fileHeaderArr = fileHeader.split(",")
+    val isDictCol = new Array[Boolean](fileHeaderArr.length)
+    for (i <- 0 until fileHeaderArr.length) {
+      if (dictCol.contains("|" + fileHeaderArr(i).toLowerCase() + "|")) {
+        isDictCol(i) = true
+      } else {
+        isDictCol(i) = false
+      }
+    }
+    val dictionaryRdd = sc.textFile(srcData).flatMap(x => {
+      val tokens = x.split(",")
+      val result = new ArrayBuffer[(Int, String)]()
+      for (i <- 0 until isDictCol.length) {
+        if (isDictCol(i)) {
+          try {
+            result += ((i, tokens(i)))
+          } catch {
+            case ex: ArrayIndexOutOfBoundsException =>
+              LOGGER.error("Read a bad record: " + x)
+          }
+        }
+      }
+      result
+    }).groupByKey().flatMap(x => {
+      val distinctValues = new HashSet[(Int, String)]()
+      for (value <- x._2) {
+        distinctValues.add(x._1, value)
+      }
+      distinctValues
+    })
+    val dictionaryValues = dictionaryRdd.map(x => x._1 + "," + x._2).collect()
+    saveToFile(dictionaryValues, outputPath)
+  }
+
+  def cleanDictionary(outputPath: String): Unit = {
+    try {
+      val fileType = FileFactory.getFileType(outputPath)
+      val file = FileFactory.getCarbonFile(outputPath, fileType)
+      if (file.exists()) {
+        file.delete()
+      }
+    } catch {
+      case ex: Exception =>
+        LOGGER.error("Clean dictionary catching exception:" + ex)
+    }
+  }
+
+  def saveToFile(contents: Array[String], outputPath: String): Unit = {
+    var writer: DataOutputStream = null
+    try {
+      val fileType = FileFactory.getFileType(outputPath)
+      val file = FileFactory.getCarbonFile(outputPath, fileType)
+      if (!file.exists()) {
+        file.createNewFile()
+      }
+      writer = FileFactory.getDataOutputStream(outputPath, fileType)
+      for (content <- contents) {
+        writer.writeBytes(content + "\n")
+      }
+    } catch {
+      case ex: Exception =>
+        LOGGER.error("Save dictionary to file catching exception:" + ex)
+    } finally {
+      if (writer != null) {
+        try {
+          writer.close()
+        } catch {
+          case ex: Exception =>
+            LOGGER.error("Close output stream catching exception:" + ex)
+        }
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/f88737d4/examples/spark/src/main/scala/org/apache/carbondata/examples/util/ExampleUtils.scala
----------------------------------------------------------------------
diff --git a/examples/spark/src/main/scala/org/apache/carbondata/examples/util/ExampleUtils.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/util/ExampleUtils.scala
new file mode 100644
index 0000000..82387a4
--- /dev/null
+++ b/examples/spark/src/main/scala/org/apache/carbondata/examples/util/ExampleUtils.scala
@@ -0,0 +1,93 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.examples.util
+
+import java.io.File
+
+import org.apache.spark.{SparkConf, SparkContext}
+import org.apache.spark.sql.{CarbonContext, SaveMode}
+
+import org.apache.carbondata.core.util.CarbonProperties
+
+// scalastyle:off println
+
+object ExampleUtils {
+
+  def currentPath: String = new File(this.getClass.getResource("/").getPath + "../../")
+      .getCanonicalPath
+  val storeLocation = currentPath + "/target/store"
+  val kettleHome = new File(currentPath + "/../../processing/carbonplugins").getCanonicalPath
+
+  def createCarbonContext(appName: String): CarbonContext = {
+    val sc = new SparkContext(new SparkConf()
+        .setAppName(appName)
+        .setMaster("local[2]"))
+    sc.setLogLevel("ERROR")
+
+    println(s"Starting $appName using spark version ${sc.version}")
+
+    val cc = new CarbonContext(sc, storeLocation, currentPath + "/target/carbonmetastore")
+    cc.setConf("carbon.kettle.home", kettleHome)
+
+    // whether use table split partition
+    // true -> use table split partition, support multiple partition loading
+    // false -> use node split partition, support data load by host partition
+    CarbonProperties.getInstance().addProperty("carbon.table.split.partition.enable", "false")
+    cc
+  }
+
+  /**
+   * This func will write a sample CarbonData file containing following schema:
+   * c1: String, c2: String, c3: Double
+   */
+  def writeSampleCarbonFile(cc: CarbonContext, tableName: String, numRows: Int = 1000): Unit = {
+    cc.sql(s"DROP TABLE IF EXISTS $tableName")
+    writeDataframe(cc, tableName, numRows, SaveMode.Overwrite)
+  }
+
+  /**
+   * This func will append data to the CarbonData file
+   */
+  def appendSampleCarbonFile(cc: CarbonContext, tableName: String, numRows: Int = 1000): Unit = {
+    writeDataframe(cc, tableName, numRows, SaveMode.Append)
+  }
+
+  /**
+   * create a new dataframe and write to CarbonData file, based on save mode
+   */
+  private def writeDataframe(
+      cc: CarbonContext, tableName: String, numRows: Int, mode: SaveMode): Unit = {
+    // use CarbonContext to write CarbonData files
+    import cc.implicits._
+    val sc = cc.sparkContext
+    val df = sc.parallelize(1 to numRows, 2)
+        .map(x => ("a", "b", x))
+        .toDF("c1", "c2", "c3")
+
+    // save dataframe to carbon file
+    df.write
+        .format("carbondata")
+        .option("tableName", tableName)
+        .option("compress", "true")
+        .option("useKettle", "false")
+        .mode(mode)
+        .save()
+  }
+}
+// scalastyle:on println
+

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/f88737d4/examples/spark2/pom.xml
----------------------------------------------------------------------
diff --git a/examples/spark2/pom.xml b/examples/spark2/pom.xml
new file mode 100644
index 0000000..bfb68d7
--- /dev/null
+++ b/examples/spark2/pom.xml
@@ -0,0 +1,85 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+    Licensed to the Apache Software Foundation (ASF) under one or more
+    contributor license agreements.  See the NOTICE file distributed with
+    this work for additional information regarding copyright ownership.
+    The ASF licenses this file to You under the Apache License, Version 2.0
+    (the "License"); you may not use this file except in compliance with
+    the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+
+  <modelVersion>4.0.0</modelVersion>
+
+  <parent>
+    <groupId>org.apache.carbondata</groupId>
+    <artifactId>carbondata-parent</artifactId>
+    <version>0.3.0-incubating-SNAPSHOT</version>
+    <relativePath>../../pom.xml</relativePath>
+  </parent>
+
+  <artifactId>carbondata-examples-spark2</artifactId>
+  <name>Apache CarbonData :: Spark2 Examples</name>
+
+  <properties>
+    <dev.path>${basedir}/../../dev</dev.path>
+  </properties>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.carbondata</groupId>
+      <artifactId>carbondata-spark2</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+  </dependencies>
+
+  <build>
+    <sourceDirectory>src/main/scala</sourceDirectory>
+    <resources>
+      <resource>
+        <directory>.</directory>
+        <includes>
+          <include>CARBON_EXAMPLESLogResource.properties</include>
+        </includes>
+      </resource>
+    </resources>
+    <plugins>
+      <plugin>
+        <groupId>org.scala-tools</groupId>
+        <artifactId>maven-scala-plugin</artifactId>
+        <version>2.15.2</version>
+        <executions>
+          <execution>
+            <id>compile</id>
+            <goals>
+              <goal>compile</goal>
+            </goals>
+            <phase>compile</phase>
+          </execution>
+          <execution>
+            <phase>process-resources</phase>
+            <goals>
+              <goal>compile</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+
+</project>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/f88737d4/examples/src/main/resources/complexdata.csv
----------------------------------------------------------------------
diff --git a/examples/src/main/resources/complexdata.csv b/examples/src/main/resources/complexdata.csv
deleted file mode 100644
index 23a3949..0000000
--- a/examples/src/main/resources/complexdata.csv
+++ /dev/null
@@ -1,101 +0,0 @@
-deviceInformationId,channelsId,ROMSize,purchasedate,mobile,MAC,locationinfo,proddate,gamePointId,contractNumber
-1,109,4ROM size,29-11-2015,1AA1$2BB1,MAC1$MAC2$MAC3,7:Chinese:Hubei Province:yichang:yichang:yichang$7:India:New Delhi:delhi:delhi:delhi,29-11-2015$29-11-2015:29-11-2015,109,2738.562
-10,93,1ROM size,29-11-2015,1AA10$2BB10,MAC4$MAC5$MAC6,7:Chinese:Hubei Province:yichang:yichang:yichang$7:India:New Delhi:delhi:delhi:delhi,30-11-2015$30-11-2015:30-11-2015,93,1714.635
-100,2591,2ROM size,29-11-2015,1AA100$2BB100,MAC7$MAC8$MAC9,4:Chinese:Hunan Province:xiangtan:xiangtan:jianshelu$4:India:Hunan Province:xiangtan:xiangtan:jianshelu,01-12-2015$01-12-2015:01-12-2015,2591,1271
-1000,2531,2ROM size,29-11-2015,1AA1000$2BB1000,MAC10$$MAC12,6:Chinese:Hubei Province:wuhan:hongshan:hongshan$6:India:New Delhi:wuhan:hongshan:hongshan,02-12-2015$02-12-2015:02-12-2015,2531,692
-10000,2408,0ROM size,29-11-2015,1AA10000$2BB10000,MAC13$$MAC15,2:Chinese:Guangdong Province:guangzhou:longhua:mingzhi$2:India:Guangdong Province:guangzhou:longhua:mingzhi,03-12-2015$03-12-2015:03-12-2015,2408,2175
-100000,1815,0ROM size,29-11-2015,1AA100000$2BB100000,MAC16$$MAC18,6:Chinese:Hubei Province:wuhan:hongshan:hongshan$6:India:New Delhi:wuhan:hongshan:hongshan,04-12-2015$04-12-2015:04-12-2015,1815,136
-1000000,2479,4ROM size,29-11-2015,1AA1000000$2BB1000000,MAC19$$MAC21,7:Chinese:Hubei Province:yichang:yichang:yichang$7:India:New Delhi:delhi:delhi:delhi,05-12-2015$05-12-2015:05-12-2015,2479,1600
-100001,1845,7ROM size,29-11-2015,1AA100001$,MAC22$$MAC24,4:Chinese:Hunan Province:xiangtan:xiangtan:jianshelu$4:India:Hunan Province:xiangtan:xiangtan:jianshelu,06-12-2015$06-12-2015:06-12-2015,1845,505
-100002,2008,1ROM size,29-11-2015,1AA100002$,MAC25$$MAC27,3:Chinese:Hunan Province:changsha:yuhua:shazitang$3:India:Hunan Province:changsha:yuhua:shazitang,07-12-2015$07-12-2015:07-12-2015,2008,1341
-100003,1121,5ROM size,29-11-2015,1AA100003$,MAC28$$MAC30,5:Chinese:Hunan Province:zhuzhou:tianyuan:tianyua$5:India:Hunan Province:zhuzhou:tianyuan:tianyua,08-12-2015$08-12-2015:08-12-2015,1121,2239
-100004,1511,8ROM size,29-11-2015,1AA100004$,MAC31$$MAC33,7:Chinese:Hubei Province:yichang:yichang:yichang$7:India:New Delhi:delhi:delhi:delhi,09-12-2015$09-12-2015:09-12-2015,1511,2970
-100005,2759,0ROM size,29-11-2015,1AA100005$,MAC34$$MAC36,7:Chinese:Hubei Province:yichang:yichang:yichang$7:India:New Delhi:delhi:delhi:delhi,10-12-2015$10-12-2015:10-12-2015,2759,2593
-100006,2069,7ROM size,29-11-2015,1AA100006$,MAC37$$MAC39,3:Chinese:Hunan Province:changsha:yuhua:shazitang$3:India:Hunan Province:changsha:yuhua:shazitang,11-12-2015$11-12-2015:11-12-2015,2069,2572
-100007,396,7ROM size,29-11-2015,1AA100007$,MAC40$$MAC42,3:Chinese:Hunan Province:changsha:yuhua:shazitang$3:India:Hunan Province:changsha:yuhua:shazitang,12-12-2015$12-12-2015:12-12-2015,396,1991
-100008,104,2ROM size,29-11-2015,1AA100008$,MAC43$$MAC45,3:Chinese:Hunan Province:changsha:yuhua:shazitang$3:India:Hunan Province:changsha:yuhua:shazitang,13-12-2015$13-12-2015:13-12-2015,104,1442
-100009,477,3ROM size,29-11-2015,1AA100009$,MAC46$$MAC48,7:Chinese:Hubei Province:yichang:yichang:yichang$7:India:New Delhi:delhi:delhi:delhi,14-12-2015$14-12-2015:14-12-2015,477,1841
-10001,546,8ROM size,29-11-2015,1AA10001$2,MAC49$$MAC51,3:Chinese:Hunan Province:changsha:yuhua:shazitang$3:India:Hunan Province:changsha:yuhua:shazitang,15-12-2015$15-12-2015:15-12-2015,546,298
-100010,2696,3ROM size,29-11-2015,1AA100010$2BB100010,MAC52$$MAC54,5:Chinese:Hunan Province:zhuzhou:tianyuan:tianyua$5:India:Hunan Province:zhuzhou:tianyuan:tianyua,16-12-2015$16-12-2015:16-12-2015,2696,79
-100011,466,2ROM size,29-11-2015,1AA100011$2BB100011,MAC55$$MAC57,2:Chinese:Guangdong Province:guangzhou:longhua:mingzhi$2:India:Guangdong Province:guangzhou:longhua:mingzhi,17-12-2015$17-12-2015:17-12-2015,466,202
-100012,2644,2ROM size,29-11-2015,1AA100012$2BB100012,MAC58$$MAC60,4:Chinese:Hunan Province:xiangtan:xiangtan:jianshelu$4:India:Hunan Province:xiangtan:xiangtan:jianshelu,18-12-2015$18-12-2015:18-12-2015,2644,568
-100013,2167,3ROM size,29-11-2015,1AA100013$2BB100013,MAC61$MAC62,3:Chinese:Hunan Province:changsha:yuhua:shazitang$3:India:Hunan Province:changsha:yuhua:shazitang,19-12-2015$19-12-2015:19-12-2015,2167,355
-100014,1069,7ROM size,29-11-2015,1AA100014$2BB100014,MAC64$MAC65,5:Chinese:Hunan Province:zhuzhou:tianyuan:tianyua$5:India:Hunan Province:zhuzhou:tianyuan:tianyua,20-12-2015$20-12-2015:20-12-2015,1069,151
-100015,1447,9ROM size,29-11-2015,1AA100015$2BB100015,MAC67$MAC68,4:Chinese:Hunan Province:xiangtan:xiangtan:jianshelu$4:India:Hunan Province:xiangtan:xiangtan:jianshelu,21-12-2015$21-12-2015:21-12-2015,1447,2863
-100016,2963,3ROM size,29-11-2015,1AA100016$2BB100016,MAC70$MAC71,3:Chinese:Hunan Province:changsha:yuhua:shazitang$3:India:Hunan Province:changsha:yuhua:shazitang,22-12-2015$22-12-2015:22-12-2015,2963,1873
-100017,1580,5ROM size,29-11-2015,1AA100017$2BB100017,MAC73$MAC74,4:Chinese:Hunan Province:xiangtan:xiangtan:jianshelu$4:India:Hunan Province:xiangtan:xiangtan:jianshelu,23-12-2015$23-12-2015:23-12-2015,1580,2205
-100018,446,2ROM size,29-11-2015,1AA100018$2BB100018,MAC76$MAC77,7:Chinese:Hubei Province:yichang:yichang:yichang$7:India:New Delhi:delhi:delhi:delhi,24-12-2015$24-12-2015:24-12-2015,446,441
-100019,2151,7ROM size,29-11-2015,1AA100019$2BB100019,MAC79$MAC80,5:Chinese:Hunan Province:zhuzhou:tianyuan:tianyua$5:India:Hunan Province:zhuzhou:tianyuan:tianyua,25-12-2015$25-12-2015:25-12-2015,2151,2194
-10002,2201,1ROM size,29-11-2015,2BB10002,MAC82$MAC83,6:Chinese:Hubei Province:wuhan:hongshan:hongshan$6:India:New Delhi:wuhan:hongshan:hongshan,26-12-2015$26-12-2015:26-12-2015,2201,2972
-100020,2574,5ROM size,29-11-2015,$2BB100020,MAC85$MAC86,1:Chinese:Guangdong Province:shenzhen:longgang:matishan$1:India:Guangdong Province:shenzhen:longgang:matishan,27-12-2015$27-12-2015:27-12-2015,2574,256
-100021,1734,4ROM size,29-11-2015,$2BB100021,MAC88$MAC89,3:Chinese:Hunan Province:changsha:yuhua:shazitang$3:India:Hunan Province:changsha:yuhua:shazitang,28-12-2015$28-12-2015:28-12-2015,1734,1778
-100022,155,3ROM size,29-11-2015,$2BB100022,MAC91$MAC92,5:Chinese:Hunan Province:zhuzhou:tianyuan:tianyua$5:India:Hunan Province:zhuzhou:tianyuan:tianyua,29-12-2015$29-12-2015:29-12-2015,155,1999
-100023,1386,8ROM size,29-11-2015,$2BB100023,MAC94$MAC95,2:Chinese:Guangdong Province:guangzhou:longhua:mingzhi$2:India:Guangdong Province:guangzhou:longhua:mingzhi,30-12-2015$30-12-2015:30-12-2015,1386,2194
-100024,1017,9ROM size,29-11-2015,$2BB100024,MAC97$MAC98,3:Chinese:Hunan Province:changsha:yuhua:shazitang$3:India:Hunan Province:changsha:yuhua:shazitang,,1017,2483
-100025,47,2ROM size,29-11-2015,$2BB100025,$MAC101$MAC102,2:Chinese:Guangdong Province:guangzhou:longhua:mingzhi$2:India:Guangdong Province:guangzhou:longhua:mingzhi,,47,1724
-100026,2930,7ROM size,29-11-2015,$2BB100026,$MAC104$MAC105,7:Chinese:Hubei Province:yichang:yichang:yichang$7:India:New Delhi:delhi:delhi:delhi,,2930,1768
-100027,2940,0ROM size,29-11-2015,$2BB100027,$MAC107$MAC108,5:Chinese:Hunan Province:zhuzhou:tianyuan:tianyua$5:India:Hunan Province:zhuzhou:tianyuan:tianyua,,2940,2436
-100028,297,5ROM size,29-11-2015,$2BB100028,$MAC110$MAC111,5:Chinese:Hunan Province:zhuzhou:tianyuan:tianyua$5:India:Hunan Province:zhuzhou:tianyuan:tianyua,,297,2849
-100029,1695,2ROM size,29-11-2015,$2BB100029,$MAC113$MAC114,4:Chinese:Hunan Province:xiangtan:xiangtan:jianshelu$4:India:Hunan Province:xiangtan:xiangtan:jianshelu,,1695,1691
-10003,1326,7ROM size,29-11-2015,2BB10003,$MAC116$MAC117,4:Chinese:Hunan Province:xiangtan:xiangtan:jianshelu$4:India:Hunan Province:xiangtan:xiangtan:jianshelu,,1326,2071
-100030,513,7ROM size,29-11-2015,$2BB100030,$MAC119$MAC120,5:Chinese:Hunan Province:zhuzhou:tianyuan:tianyua$5:India:Hunan Province:zhuzhou:tianyuan:tianyua,07-01-2016$07-01-2016:,513,1333
-100031,1741,1ROM size,29-11-2015,$2BB100031,$MAC122$MAC123,7:Chinese:Hubei Province:yichang:yichang:yichang$7:India:New Delhi:delhi:delhi:delhi,08-01-2016$08-01-2016:,1741,1080
-100032,1198,0ROM size,29-11-2015,$2BB100032,$MAC125$MAC126,1:Chinese:Guangdong Province:shenzhen:longgang:matishan$1:India:Guangdong Province:shenzhen:longgang:matishan,09-01-2016$09-01-2016:,1198,1053
-100033,273,9ROM size,29-11-2015,$2BB100033,$MAC128$MAC129,6:Chinese:Hubei Province:wuhan:hongshan:hongshan$6:India:New Delhi:wuhan:hongshan:hongshan,10-01-2016$10-01-2016:,273,760
-100034,1234,6ROM size,29-11-2015,$2BB100034,$MAC131$MAC132,2:Chinese:Guangdong Province:guangzhou:longhua:mingzhi$2:India:Guangdong Province:guangzhou:longhua:mingzhi,11-01-2016$11-01-2016:,1234,2061
-100035,1619,1ROM size,29-11-2015,$2BB100035,$MAC134$MAC135,3:Chinese:Hunan Province:changsha:yuhua:shazitang$3:India:Hunan Province:changsha:yuhua:shazitang,12-01-2016$12-01-2016:,1619,2142
-100036,2415,2ROM size,29-11-2015,$2BB100036,$MAC137$MAC138,3:Chinese:Hunan Province:changsha:yuhua:shazitang$3:India:Hunan Province:changsha:yuhua:shazitang,13-01-2016$13-01-2016:,2415,2224
-100037,2381,2ROM size,29-11-2015,$2BB100037,$MAC140$MAC141,4:Chinese:Hunan Province:xiangtan:xiangtan:jianshelu$4:India:Hunan Province:xiangtan:xiangtan:jianshelu,14-01-2016$14-01-2016:,2381,1015
-100038,872,7ROM size,29-11-2015,1AA100038$2BB100038,$MAC143$MAC144,1:Chinese:Guangdong Province:shenzhen:longgang:matishan$1:India:Guangdong Province:shenzhen:longgang:matishan,15-01-2016$15-01-2016,872,1229
-100039,1835,9ROM size,29-11-2015,1AA100039$2BB100039,$$MAC147,1:Chinese:Guangdong Province:shenzhen:longgang:matishan$1:India:Guangdong Province:shenzhen:longgang:matishan,16-01-2016$16-01-2016,1835,1750
-10004,2597,1ROM size,29-11-2015,1AA10004$2BB10004,$$MAC150,2:Chinese:Guangdong Province:guangzhou:longhua:mingzhi$2:India:Guangdong Province:guangzhou:longhua:mingzhi,17-01-2016$17-01-2016,2597,1717
-100040,1969,9ROM size,29-11-2015,1AA100040$2BB100040,$$MAC153,,18-01-2016$18-01-2016,1969,2078
-100041,2133,8ROM size,29-11-2015,$,$$MAC156,,19-01-2016$19-01-2016,2133,2734
-100042,631,9ROM size,29-11-2015,$,$$MAC159,,20-01-2016$20-01-2016,631,2745
-100043,187,4ROM size,29-11-2015,$,$$MAC162,2:Chinese::guangzhou:longhua:mingzhi$2:India::guangzhou:longhua:mingzhi,21-01-2016$21-01-2016,187,571
-100044,1232,5ROM size,29-11-2015,$,$$MAC165,2::Guangdong Province:guangzhou:longhua:mingzhi$2::Guangdong Province:guangzhou:longhua:mingzhi,22-01-2016$22-01-2016,1232,1697
-100045,1602,6ROM size,29-11-2015,$,$$MAC168,4:Chinese:Hunan Province::xiangtan:jianshelu$4:India:Hunan Province::xiangtan:jianshelu,23-01-2016$23-01-2016,1602,2553
-100046,2319,9ROM size,29-11-2015,$,$$MAC171,2:Chinese:Guangdong Province:guangzhou::mingzhi$2:India:Guangdong Province:guangzhou::mingzhi,24-01-2016$24-01-2016,2319,1077
-100047,839,4ROM size,29-11-2015,$,$$MAC174,5:Chinese:Hunan Province:zhuzhou:tianyuan:$5:India:Hunan Province:zhuzhou:tianyuan:,25-01-2016$25-01-2016,839,1823
-100048,1184,2ROM size,29-11-2015,$,$$MAC177,2:Chinese:Guangdong Province:guangzhou:longhua:mingzhi$2:India:Guangdong Province:guangzhou:longhua:mingzhi,26-01-2016$:,1184,2399
-100049,2705,2ROM size,29-11-2015,$,$$MAC180,2:Chinese:Guangdong Province$2:India:Guangdong Province,27-01-2016$:,2705,2890
-10005,1185,1ROM size,29-11-2015,,$$MAC183,4:Chinese:Hunan Province:xiangtan:xiangtan:jianshelu$4:India:Hunan Province:xiangtan:xiangtan:jianshelu,28-01-2016$:,1185,1608
-100050,2457,9ROM size,29-11-2015,,$$MAC186,7:Chinese:Hubei Province:yichang:yichang:yichang$7:India:New Delhi:delhi:delhi:delhi,29-01-2016$:,2457,29
-100051,2320,8ROM size,29-11-2015,,$$MAC189,2:Chinese:Guangdong Province:guangzhou:longhua:mingzhi$2:India:Guangdong Province:guangzhou:longhua:mingzhi,30-01-2016$:,2320,1407
-100052,2300,0ROM size,29-11-2015,,$$,5:Chinese:Hunan Province:zhuzhou:tianyuan:tianyua$5:India:Hunan Province:zhuzhou:tianyuan:tianyua,31-01-2016$:,2300,845
-100053,1210,4ROM size,29-11-2015,,$$,6:Chinese:Hubei Province:wuhan:hongshan:hongshan$6:India:New Delhi:wuhan:hongshan:hongshan,01-02-2016$:,1210,1655
-100054,1689,8ROM size,29-11-2015,,$$,1:Chinese:Guangdong Province:shenzhen:longgang:matishan$1:India:Guangdong Province:shenzhen:longgang:matishan,02-02-2016$:,1689,1368
-100055,2823,2ROM size,29-11-2015,,$$,7:Chinese:Hubei Province:yichang:yichang:yichang$7:India:New Delhi:delhi:delhi:delhi,$03-02-2016:03-02-2016,2823,1728
-100056,68,6ROM size,29-11-2015,,$$,6:Chinese:Hubei Province:wuhan:hongshan:hongshan$6:India:New Delhi:wuhan:hongshan:hongshan,$04-02-2016:04-02-2016,68,750
-100057,716,0ROM size,29-11-2015,,$$,5:Chinese:Hunan Province:zhuzhou:tianyuan:tianyua$5:India:Hunan Province:zhuzhou:tianyuan:tianyua,$05-02-2016:05-02-2016,716,2288
-100058,864,6ROM size,29-11-2015,,$$,2:Chinese:Guangdong Province:guangzhou:longhua:mingzhi$2:India:Guangdong Province:guangzhou:longhua:mingzhi,$06-02-2016:06-02-2016,864,2635
-100059,499,6ROM size,29-11-2015,,$$,1:Chinese:Guangdong Province:shenzhen:longgang:matishan$1:India:Guangdong Province:shenzhen:longgang:matishan,$07-02-2016:07-02-2016,499,1337
-10006,1429,3ROM size,29-11-2015,,$$,:Chinese:Guangdong Province:guangzhou:longhua:mingzhi$:India:Guangdong Province:guangzhou:longhua:mingzhi,$08-02-2016:08-02-2016,1429,2478
-100060,2176,2ROM size,29-11-2015,,$$,:Chinese:Hunan Province:xiangtan:xiangtan:jianshelu$:India:Hunan Province:xiangtan:xiangtan:jianshelu,$09-02-2016:09-02-2016,2176,538
-100061,2563,7ROM size,29-11-2015,,,3:Chinese:Hunan Province:changsha:yuhua:shazitang$3:India:Hunan Province:changsha:yuhua:shazitang,$10-02-2016:10-02-2016,2563,1407
-100062,2594,3ROM size,29-11-2015,,,7:Chinese:Hubei Province:yichang:yichang:yichang$7:India:New Delhi:delhi:delhi:delhi,$11-02-2016:11-02-2016,2594,2952
-100063,2142,1ROM size,29-11-2015,,,7:Chinese:Hubei Province:yichang:yichang:yichang$7:India:New Delhi:delhi:delhi:delhi,$12-02-2016:12-02-2016,2142,1226
-100064,138,0ROM size,29-11-2015,1AA100064$2BB100064,,5:Chinese:Hunan Province:zhuzhou:tianyuan:tianyua$5:India:Hunan Province:zhuzhou:tianyuan:tianyua,$13-02-2016:13-02-2016,138,865
-100065,1168,6ROM size,29-11-2015,1AA100065$2BB100065,,4:Chinese:Hunan Province:xiangtan:xiangtan:jianshelu$4:India:Hunan Province:xiangtan:xiangtan:jianshelu,$14-02-2016:14-02-2016,1168,901
-100066,2828,5ROM size,29-11-2015,1AA100066$2BB100066,,5:Chinese:Hunan Province:zhuzhou:tianyuan:tianyua$5:India:Hunan Province:zhuzhou:tianyuan:tianyua,$:,2828,1864
-100067,1160,0ROM size,29-11-2015,1AA100067$2BB100067,,6:Chinese:Hubei Province:wuhan:hongshan:hongshan$6:India:New Delhi:wuhan:hongshan:hongshan,$:,1160,572
-100068,1890,6ROM size,29-11-2015,1AA100068$2BB100068,,2:Chinese:Guangdong Province:guangzhou:longhua:mingzhi$2:India:Guangdong Province:guangzhou:longhua:mingzhi,$:,1890,412
-100069,1195,4ROM size,29-11-2015,1AA100069$2BB100069,,4:Chinese:Hunan Province:xiangtan:xiangtan:jianshelu$4:India:Hunan Province:xiangtan:xiangtan:jianshelu,$:,1195,1491
-10007,2797,9ROM size,29-11-2015,1AA10007$2BB10007,,4:Chinese:Hunan Province:xiangtan:xiangtan:jianshelu$4:India:Hunan Province:xiangtan:xiangtan:jianshelu,$:,2797,1350
-100070,44,5ROM size,29-11-2015,1AA100070$2BB100070,,2:Chinese:Guangdong Province:guangzhou:longhua:mingzhi$2:India:Guangdong Province:guangzhou:longhua:mingzhi,$:,44,1567
-100071,1683,6ROM size,29-11-2015,1AA100071$2BB100071,,2:Chinese:Guangdong Province:guangzhou:longhua:mingzhi$2:India:Guangdong Province:guangzhou:longhua:mingzhi,$:,1683,1973
-100072,1085,1ROM size,29-11-2015,1AA100072$2BB100072,,3:Chinese:Hunan Province:changsha:yuhua:shazitang$3:India:Hunan Province:changsha:yuhua:shazitang,22-02-2016$22-02-2016:22-02-2016,1085,448
-100073,776,7ROM size,29-11-2015,1AA100073$2BB100073,,5:Chinese:Hunan Province:zhuzhou:tianyuan:tianyua$5:India:Hunan Province:zhuzhou:tianyuan:tianyua,23-02-2016$23-02-2016:23-02-2016,776,2488
-100074,2074,9ROM size,29-11-2015,1AA100074$2BB100074,MAC262$MAC263$,6:Chinese:Hubei Province:wuhan:hongshan:hongshan$6:India:New Delhi:wuhan:hongshan:hongshan,24-02-2016$24-02-2016:24-02-2016,2074,907
-100075,1062,2ROM size,29-11-2015,1AA100075$2BB100075,MAC265$MAC266$,1:Chinese:Guangdong Province:shenzhen:longgang:matishan$1:India:Guangdong Province:shenzhen:longgang:matishan,25-02-2016$25-02-2016:25-02-2016,1062,2507
-100076,987,7ROM size,29-11-2015,1AA100076$2BB100076,MAC268$MAC269$,6:Chinese:Hubei Province:wuhan:hongshan:hongshan$6:India:New Delhi:wuhan:hongshan:hongshan,26-02-2016$26-02-2016:26-02-2016,987,732
-100077,2799,9ROM size,29-11-2015,1AA100077$2BB100077,MAC271$MAC272$,7:Chinese:Hubei Province:yichang:yichang:yichang$7:India:New Delhi:delhi:delhi:delhi,27-02-2016$27-02-2016:27-02-2016,2799,2077
-100078,2765,1ROM size,29-11-2015,1AA100078$2BB100078,MAC274$MAC275$,7:Chinese:Hubei Province:yichang:yichang:yichang$7:India:New Delhi:delhi:delhi:delhi,28-02-2016$28-02-2016:28-02-2016,2765,1434
-100079,2164,1ROM size,29-11-2015,1AA100079$2BB100079,MAC277$MAC278$,4:Chinese:Hunan Province:xiangtan:xiangtan:jianshelu$4:India:Hunan Province:xiangtan:xiangtan:jianshelu,29-02-2016$29-02-2016:29-02-2016,2164,1098
-10008,1624,6ROM size,29-11-2015,1AA10008$2BB10008,MAC280$MAC281$,1:Chinese:Guangdong Province:shenzhen:longgang:matishan$1:India:Guangdong Province:shenzhen:longgang:matishan,01-03-2016$01-03-2016:01-03-2016,1624,813
-100080,2355,1ROM size,29-11-2015,1AA100080$2BB100080,MAC283$MAC284$MAC285,1:Chinese:Guangdong Province:shenzhen:longgang:matishan$1:India:Guangdong Province:shenzhen:longgang:matishan,02-03-2016$02-03-2016:02-03-2016,2355,954
-100081,1650,6ROM size,29-11-2015,1AA100081$2BB100081,MAC286$MAC287$MAC288,1:Chinese:Guangdong Province:shenzhen:longgang:matishan$1:India:Guangdong Province:shenzhen:longgang:matishan,03-03-2016$03-03-2016:03-03-2016,1650,613
-100082,2761,3ROM size,29-11-2015,1AA100082$2BB100082,MAC289$MAC290$MAC291,4:Chinese:Hunan Province:xiangtan:xiangtan:jianshelu$4:India:Hunan Province:xiangtan:xiangtan:jianshelu,04-03-2016$04-03-2016:04-03-2016,2761,2348
-100083,1856,3ROM size,29-11-2015,1AA100083$2BB100083,MAC292$MAC293$MAC294,5:Chinese:Hunan Province:zhuzhou:tianyuan:tianyua$5:India:Hunan Province:zhuzhou:tianyuan:tianyua,05-03-2016$05-03-2016:05-03-2016,1856,2192
-100084,1841,7ROM size,29-11-2015,1AA100084$2BB100084,MAC295$MAC296$MAC297,2:Chinese:Guangdong Province:guangzhou:longhua:mingzhi$2:India:Guangdong Province:guangzhou:longhua:mingzhi,06-03-2016$06-03-2016:06-03-2016,1841,2826
-100085,1841,7ROM size,29-11-2015,1AA100084$2BB100084,MAC295$MAC296$MAC297,2:Chinese:Guangdong Province:guangzhou:longhua:mingzhi$2:India:Guangdong Province:guangzhou:longhua:mingzhi,06-03-2016$06-03-2016:06-03-2016,1841,2826


Mime
View raw message