Skip to content

Commit b65c3d5

Browse files
authored
Merge branch 'main' into spark-3.3.2
2 parents e48464e + e52a517 commit b65c3d5

File tree

4 files changed

+20
-17
lines changed

4 files changed

+20
-17
lines changed

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -119,7 +119,7 @@ To it, simply add
119119
to the top of your notebook. This will get the latest version of the API, together with the latest version of Spark.
120120
To define a certain version of Spark or the API itself, simply add it like this:
121121
```jupyterpython
122-
%use spark(spark=3.3.1, scala=2.13, v=1.2.2)
122+
%use spark(spark=3.3.1, scala=2.13, v=1.2.3)
123123
```
124124

125125
Inside the notebook a Spark session will be initiated automatically. This can be accessed via the `spark` value.

buildSrc/src/main/kotlin/Versions.kt

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,16 @@
11
object Versions {
2-
const val project = "1.2.3"
2+
const val project = "1.2.4-SNAPSHOT"
33
const val groupID = "org.jetbrains.kotlinx.spark"
44
const val kotlin = "1.8.0"
55
const val jvmTarget = "8"
6-
const val jupyterJvmTarget = "11"
6+
const val jupyterJvmTarget = "8"
77

88
inline val spark get() = System.getProperty("spark") as String
99
inline val scala get() = System.getProperty("scala") as String
1010
inline val sparkMinor get() = spark.substringBeforeLast('.')
1111
inline val scalaCompat get() = scala.substringBeforeLast('.')
1212

13-
const val jupyter = "0.11.0-210"
13+
const val jupyter = "0.12.0-32-1"
1414
const val kotest = "5.5.4"
1515
const val kotestTestContainers = "1.3.3"
1616
const val dokka = "1.7.10"

core/src/main/scala/org/apache/spark/sql/catalyst/CatalystTypeConverters.scala

Lines changed: 14 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -82,15 +82,15 @@ object CatalystTypeConverters {
8282
final def toCatalyst(@Nullable maybeScalaValue: Any): CatalystType = {
8383
if (maybeScalaValue == null) {
8484
null.asInstanceOf[CatalystType]
85-
} else if (maybeScalaValue.isInstanceOf[Option[ScalaInputType]]) {
86-
val opt = maybeScalaValue.asInstanceOf[Option[ScalaInputType]]
87-
if (opt.isDefined) {
88-
toCatalystImpl(opt.get)
89-
} else {
90-
null.asInstanceOf[CatalystType]
91-
}
92-
} else {
93-
toCatalystImpl(maybeScalaValue.asInstanceOf[ScalaInputType])
85+
} else maybeScalaValue match {
86+
case opt: Option[ScalaInputType] =>
87+
if (opt.isDefined) {
88+
toCatalystImpl(opt.get)
89+
} else {
90+
null.asInstanceOf[CatalystType]
91+
}
92+
case _ =>
93+
toCatalystImpl(maybeScalaValue.asInstanceOf[ScalaInputType])
9494
}
9595
}
9696

@@ -429,10 +429,11 @@ object CatalystTypeConverters {
429429
// a measurable performance impact. Note that this optimization will be unnecessary if we
430430
// use code generation to construct Scala Row -> Catalyst Row converters.
431431
def convert(maybeScalaValue: Any): Any = {
432-
if (maybeScalaValue.isInstanceOf[Option[Any]]) {
433-
maybeScalaValue.asInstanceOf[Option[Any]].orNull
434-
} else {
435-
maybeScalaValue
432+
maybeScalaValue match {
433+
case option: Option[Any] =>
434+
option.orNull
435+
case _ =>
436+
maybeScalaValue
436437
}
437438
}
438439

jupyter/src/test/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/JupyterTests.kt

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -59,6 +59,7 @@ class JupyterTests : ShouldSpec({
5959
librariesScanner.addLibrariesFromClassLoader(
6060
classLoader = currentClassLoader,
6161
host = this,
62+
notebook = notebook,
6263
integrationTypeNameRules = listOf(
6364
PatternNameAcceptanceRule(
6465
acceptsFlag = false,
@@ -341,6 +342,7 @@ class JupyterStreamingTests : ShouldSpec({
341342
librariesScanner.addLibrariesFromClassLoader(
342343
classLoader = currentClassLoader,
343344
host = this,
345+
notebook = notebook,
344346
integrationTypeNameRules = listOf(
345347
PatternNameAcceptanceRule(
346348
acceptsFlag = false,

0 commit comments

Comments
 (0)