spark-reviews mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From GitBox <...@apache.org>
Subject [GitHub] [spark] HeartSaVioR commented on a change in pull request #26821: [SPARK-20656][CORE]Support Incremental parsing of event logs in SHS
Date Thu, 12 Dec 2019 09:22:10 GMT
HeartSaVioR commented on a change in pull request #26821: [SPARK-20656][CORE]Support Incremental
parsing of event logs in SHS
URL: https://github.com/apache/spark/pull/26821#discussion_r357034401
 
 

 ##########
 File path: core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala
 ##########
 @@ -190,6 +191,54 @@ class FsHistoryProviderSuite extends SparkFunSuite with Matchers with
Logging {
     provider.mergeApplicationListingCall should be (1)
   }
 
+  test("support incremental parsing of the event logs") {
+    val provider = new FsHistoryProvider(createTestConf(true))
+
+    var store: InMemoryStore = null
+    val logFile1 = newLogFile("app1", None, inProgress = true)
+    writeFile(logFile1, None,
+      SparkListenerApplicationStart("app1", Some("app1"), 1L, "test", None),
+      SparkListenerJobStart(0, 2L, Seq())
+    )
+    updateAndCheck(provider) { list =>
+      list.size should be (1)
+      provider.getAttempt("app1", None).logPath should endWith(EventLogFileWriter.IN_PROGRESS)
+      val appUi = provider.getAppUI("app1", None)
+      appUi should not be null
+      store = appUi.get.ui.store.store.asInstanceOf[InMemoryStore]
+    }
+
+    writeFile(logFile1, None,
+      SparkListenerApplicationStart("app1", Some("app1"), 1L, "test", None),
+      SparkListenerJobStart(0, 2L, Seq()),
+      SparkListenerJobEnd(0, 3L, JobSucceeded)
+    )
+
+    updateAndCheck(provider) { list =>
+      store should not be null
+      store.read(classOf[AppStatusListenerData], Array(Some("app1"), None)) should not be
null
+      list.size should be (1)
+      provider.getAttempt("app1", None).logPath should endWith(EventLogFileWriter.IN_PROGRESS)
+      val appUi = provider.getAppUI("app1", None)
+      appUi should not be null
 
 Review comment:
   I think it should really check whether store reflects the events correctly. It's not enough
to just check it's loaded or not. That should cover these cases - initial read / new addition
of events in same file / new file.

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


Mime
View raw message