Skip to content

Commit 5009355

Browse files
committed
Updated kotlin jupyter version, added streaming test for jupyter as well
1 parent 10c3a90 commit 5009355

File tree

5 files changed

+87
-19
lines changed

5 files changed

+87
-19
lines changed

examples/pom-3.2_2.12.xml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -105,8 +105,8 @@
105105
<artifactId>maven-compiler-plugin</artifactId>
106106
<version>${maven-compiler-plugin.version}</version>
107107
<configuration>
108-
<source>8</source>
109-
<target>8</target>
108+
<source>9</source>
109+
<target>9</target>
110110
</configuration>
111111
</plugin>
112112
</plugins>

kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -58,8 +58,6 @@ abstract class Integration : JupyterIntegration() {
5858
"commons-io:commons-io:2.11.0",
5959
)
6060

61-
println("SparkIntegration loaded")
62-
6361
import(
6462
"org.jetbrains.kotlinx.spark.api.*",
6563
"org.jetbrains.kotlinx.spark.api.tuples.*",

kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt

Lines changed: 84 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -19,34 +19,31 @@
1919
*/
2020
package org.jetbrains.kotlinx.spark.api
2121

22+
import io.kotest.assertions.throwables.shouldThrowAny
2223
import io.kotest.core.spec.style.ShouldSpec
24+
import io.kotest.matchers.collections.shouldBeIn
2325
import io.kotest.matchers.nulls.shouldNotBeNull
26+
import io.kotest.matchers.shouldBe
2427
import io.kotest.matchers.shouldNotBe
2528
import io.kotest.matchers.string.shouldContain
2629
import io.kotest.matchers.types.shouldBeInstanceOf
2730
import jupyter.kotlin.DependsOn
28-
import kotlinx.serialization.decodeFromString
29-
import kotlinx.serialization.encodeToString
30-
import kotlinx.serialization.json.Json
31-
import org.apache.spark.SparkConf
3231
import org.apache.spark.api.java.JavaSparkContext
32+
import org.apache.spark.streaming.Duration
3333
import org.intellij.lang.annotations.Language
3434
import org.jetbrains.kotlinx.jupyter.EvalRequestData
3535
import org.jetbrains.kotlinx.jupyter.ReplForJupyter
3636
import org.jetbrains.kotlinx.jupyter.ReplForJupyterImpl
3737
import org.jetbrains.kotlinx.jupyter.api.Code
38-
import org.jetbrains.kotlinx.jupyter.api.KotlinKernelHost
3938
import org.jetbrains.kotlinx.jupyter.api.MimeTypedResult
40-
import org.jetbrains.kotlinx.jupyter.api.libraries.*
41-
import org.jetbrains.kotlinx.jupyter.dependencies.ResolverConfig
4239
import org.jetbrains.kotlinx.jupyter.libraries.EmptyResolutionInfoProvider
43-
import org.jetbrains.kotlinx.jupyter.libraries.LibrariesScanner
44-
import org.jetbrains.kotlinx.jupyter.libraries.LibraryResolver
45-
import org.jetbrains.kotlinx.jupyter.libraries.buildDependenciesInitCode
4640
import org.jetbrains.kotlinx.jupyter.repl.EvalResultEx
4741
import org.jetbrains.kotlinx.jupyter.testkit.ReplProvider
48-
import org.jetbrains.kotlinx.jupyter.util.NameAcceptanceRule
4942
import org.jetbrains.kotlinx.jupyter.util.PatternNameAcceptanceRule
43+
import org.jetbrains.kotlinx.spark.api.tuples.X
44+
import org.jetbrains.kotlinx.spark.api.tuples.component1
45+
import org.jetbrains.kotlinx.spark.api.tuples.component2
46+
import java.util.*
5047
import kotlin.script.experimental.jvm.util.classpathFromClassloader
5148

5249
class JupyterTests : ShouldSpec({
@@ -235,6 +232,82 @@ class JupyterTests : ShouldSpec({
235232
}
236233
})
237234

235+
class JupyterStreamingTests : ShouldSpec({
236+
val replProvider = ReplProvider { classpath ->
237+
ReplForJupyterImpl(
238+
resolutionInfoProvider = EmptyResolutionInfoProvider,
239+
scriptClasspath = classpath,
240+
isEmbedded = true,
241+
).apply {
242+
eval {
243+
librariesScanner.addLibrariesFromClassLoader(
244+
classLoader = currentClassLoader,
245+
host = this,
246+
integrationTypeNameRules = listOf(
247+
PatternNameAcceptanceRule(false, "org.jetbrains.kotlinx.spark.api.jupyter.**"),
248+
PatternNameAcceptanceRule(true,
249+
"org.jetbrains.kotlinx.spark.api.jupyter.SparkStreamingIntegration"),
250+
),
251+
)
252+
}
253+
}
254+
}
255+
256+
val currentClassLoader = DependsOn::class.java.classLoader
257+
val scriptClasspath = classpathFromClassloader(currentClassLoader).orEmpty()
258+
259+
fun createRepl(): ReplForJupyter = replProvider(scriptClasspath)
260+
suspend fun withRepl(action: suspend ReplForJupyter.() -> Unit): Unit = createRepl().action()
261+
262+
context("Jupyter") {
263+
withRepl {
264+
265+
should("Not have spark instance") {
266+
shouldThrowAny {
267+
@Language("kts")
268+
val spark = exec("""spark""")
269+
Unit
270+
}
271+
}
272+
273+
should("Not have sc instance") {
274+
shouldThrowAny {
275+
@Language("kts")
276+
val sc = exec("""sc""")
277+
Unit
278+
}
279+
}
280+
281+
should("stream") {
282+
val input = listOf("aaa", "bbb", "aaa", "ccc")
283+
val counter = Counter(0)
284+
285+
withSparkStreaming(Duration(10), timeout = 1000) {
286+
287+
val (counterBroadcast, queue) = withSpark(ssc) {
288+
spark.broadcast(counter) X LinkedList(listOf(sc.parallelize(input)))
289+
}
290+
291+
val inputStream = ssc.queueStream(queue)
292+
293+
inputStream.foreachRDD { rdd, _ ->
294+
withSpark(rdd) {
295+
rdd.toDS().forEach {
296+
it shouldBeIn input
297+
counterBroadcast.value.value++
298+
}
299+
}
300+
}
301+
}
302+
303+
counter.value shouldBe input.size
304+
}
305+
306+
}
307+
}
308+
})
309+
310+
238311
private fun ReplForJupyter.execEx(code: Code): EvalResultEx = evalEx(EvalRequestData(code))
239312

240313
private fun ReplForJupyter.exec(code: Code): Any? = execEx(code).renderedValue

kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,6 @@ class StreamingTest : ShouldSpec({
4949
context("streaming") {
5050

5151
should("stream") {
52-
5352
val input = listOf("aaa", "bbb", "aaa", "ccc")
5453
val counter = Counter(0)
5554

@@ -72,7 +71,6 @@ class StreamingTest : ShouldSpec({
7271
}
7372

7473
counter.value shouldBe input.size
75-
7674
}
7775

7876
should("Work with checkpointpath") {

pom.xml

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,7 @@
1717
<kotest-extensions-allure.version>1.1.0</kotest-extensions-allure.version>
1818
<embedded-kafka.version>3.1.0</embedded-kafka.version>
1919
<spark3.version>3.2.1</spark3.version>
20-
<!-- <kotlin-jupyter-api.version>0.11.0-79</kotlin-jupyter-api.version>-->
21-
<kotlin-jupyter-api.version>0.11.0-100500-1</kotlin-jupyter-api.version>
20+
<kotlin-jupyter-api.version>0.11.0-83</kotlin-jupyter-api.version>
2221
<kotlinx.html.version>0.7.3</kotlinx.html.version>
2322
<hadoop.version>3.3.1</hadoop.version>
2423
<!-- <junit.version>5.8.2</junit.version>-->

0 commit comments

Comments
 (0)