hudi-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From GitBox <...@apache.org>
Subject [GitHub] [incubator-hudi] bhasudha commented on a change in pull request #1306: [HUDI-598] Update quick start page
Date Tue, 04 Feb 2020 22:22:34 GMT
bhasudha commented on a change in pull request #1306: [HUDI-598] Update quick start page
URL: https://github.com/apache/incubator-hudi/pull/1306#discussion_r374955592
 
 

 ##########
 File path: docs/_docs/1_1_quick_start_guide.md
 ##########
 @@ -176,28 +176,28 @@ Delete records for the HoodieKeys passed in.
 
 ```scala
 // fetch total records count
-spark.sql("select uuid, partitionPath from hudi_ro_table").count()
+spark.sql("select uuid, partitionPath from hudi_trips_snapshot").count()
 // fetch two records to be deleted
-val ds = spark.sql("select uuid, partitionPath from hudi_ro_table").limit(2)
+val ds = spark.sql("select uuid, partitionPath from hudi_trips_snapshot").limit(2)
 
 // issue deletes
 val deletes = dataGen.generateDeletes(ds.collectAsList())
 val df = spark.read.json(spark.sparkContext.parallelize(deletes, 2));
-df.write.format("org.apache.hudi").
-options(getQuickstartWriteConfigs).
-option(OPERATION_OPT_KEY,"delete").
-option(PRECOMBINE_FIELD_OPT_KEY, "ts").
-option(RECORDKEY_FIELD_OPT_KEY, "uuid").
-option(PARTITIONPATH_FIELD_OPT_KEY, "partitionpath").
-option(TABLE_NAME, tableName).
-mode(Append).
-save(basePath);
+df.write.format("hudi").
+  options(getQuickstartWriteConfigs).
+  option(OPERATION_OPT_KEY,"delete").
+  option(PRECOMBINE_FIELD_OPT_KEY, "ts").
+  option(RECORDKEY_FIELD_OPT_KEY, "uuid").
+  option(PARTITIONPATH_FIELD_OPT_KEY, "partitionpath").
+  option(TABLE_NAME, tableName).
+  mode(Append).
+  save(basePath)
 
 // run the same read query as above.
 val roAfterDeleteViewDF = spark.
-    read.
-    format("org.apache.hudi").
-    load(basePath + "/*/*/*/*")
+  read.
+  format("hudi").
+  load(basePath + "/*/*/*/*")
 roAfterDeleteViewDF.registerTempTable("hudi_ro_table")
 // fetch should return (total - 2) records
 spark.sql("select uuid, partitionPath from hudi_ro_table").count()
 
 Review comment:
   hudi_ro_table -> hudi_trips_snapshot here as well.

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

Mime
View raw message