Skip to content

Commit 4ece47e

Browse files
committed
last cleanups
1 parent f83727d commit 4ece47e

File tree

5 files changed

+12
-28
lines changed

5 files changed

+12
-28
lines changed

kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,6 @@ package org.jetbrains.kotlinx.spark.api
3030
import org.apache.spark.api.java.Optional
3131
import scala.*
3232
import scala.collection.JavaConverters
33-
import scala.reflect.ClassTag
3433
import java.util.*
3534
import java.util.Enumeration
3635
import java.util.concurrent.ConcurrentMap

kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/kafkaHelper.kt renamed to kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaHelper.kt

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,6 @@ package org.jetbrains.kotlinx.spark.api
2323

2424
/**
2525
* Source: https://github.com/kotest/kotest-extensions-embedded-kafka
26-
*
2726
*/
2827

2928
import io.github.embeddedkafka.EmbeddedKafka
@@ -40,7 +39,7 @@ import org.apache.kafka.common.serialization.StringDeserializer
4039
import org.apache.kafka.common.serialization.StringSerializer
4140
import org.apache.kafka.common.utils.Bytes
4241
import scala.Predef
43-
import java.util.Properties
42+
import java.util.*
4443

4544
val embeddedKafkaListener: EmbeddedKafkaListener = EmbeddedKafkaListener(EmbeddedKafkaConfig.defaultConfig())
4645

kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaStreamingTest.kt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ object Kafka : Tag()
3838

3939
class KafkaStreamingTest : ShouldSpec({
4040

41-
// making sure it can be skipped on github actions since it times out
41+
// making sure it can be skipped on Github actions since it times out
4242
tags(Kafka)
4343

4444
context("kafka") {

kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ProjectConfig.kt

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,4 +25,6 @@ import io.kotest.extensions.allure.AllureTestReporter
2525
@Suppress("unused")
2626
object ProjectConfig : AbstractProjectConfig() {
2727
override fun listeners() = super.listeners() + AllureTestReporter(true)
28+
29+
override fun extensions() = super.extensions() + AllureTestReporter(true)
2830
}

kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt

Lines changed: 8 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -19,45 +19,29 @@
1919
*/
2020
package org.jetbrains.kotlinx.spark.api
2121

22-
import io.kotest.assertions.print.print
2322
import io.kotest.assertions.throwables.shouldThrow
24-
import io.kotest.assertions.timing.eventually
25-
import io.kotest.core.extensions.install
2623
import io.kotest.core.spec.style.ShouldSpec
27-
import io.kotest.extensions.testcontainers.TestContainerExtension
28-
import io.kotest.extensions.testcontainers.kafka.createStringStringConsumer
29-
import io.kotest.extensions.testcontainers.kafka.createStringStringProducer
30-
import io.kotest.extensions.testcontainers.perTest
3124
import io.kotest.matchers.collections.shouldBeIn
3225
import io.kotest.matchers.collections.shouldContainAll
33-
import io.kotest.matchers.collections.shouldHaveSize
3426
import io.kotest.matchers.shouldBe
3527
import org.apache.commons.io.FileUtils
3628
import org.apache.hadoop.fs.FileSystem
37-
import org.apache.kafka.clients.consumer.ConsumerConfig
38-
import org.apache.kafka.clients.consumer.ConsumerRecord
39-
import org.apache.kafka.clients.producer.ProducerConfig
40-
import org.apache.kafka.clients.producer.ProducerRecord
41-
import org.apache.kafka.common.serialization.StringDeserializer
4229
import org.apache.spark.SparkException
43-
import org.apache.spark.streaming.*
44-
import org.apache.spark.streaming.api.java.JavaDStream
45-
import org.apache.spark.streaming.api.java.JavaInputDStream
46-
import org.apache.spark.streaming.kafka010.ConsumerStrategies
47-
import org.apache.spark.streaming.kafka010.KafkaUtils
48-
import org.apache.spark.streaming.kafka010.LocationStrategies
30+
import org.apache.spark.streaming.Checkpoint
31+
import org.apache.spark.streaming.Duration
32+
import org.apache.spark.streaming.Durations
33+
import org.apache.spark.streaming.Time
4934
import org.apache.spark.util.Utils
50-
import org.jetbrains.kotlinx.spark.api.tuples.*
51-
import org.testcontainers.containers.KafkaContainer
52-
import org.testcontainers.utility.DockerImageName
35+
import org.jetbrains.kotlinx.spark.api.tuples.X
36+
import org.jetbrains.kotlinx.spark.api.tuples.component1
37+
import org.jetbrains.kotlinx.spark.api.tuples.component2
38+
import org.jetbrains.kotlinx.spark.api.tuples.t
5339
import scala.Tuple2
5440
import java.io.File
5541
import java.io.Serializable
5642
import java.nio.charset.StandardCharsets
5743
import java.util.*
5844
import java.util.concurrent.atomic.AtomicBoolean
59-
import kotlin.time.Duration.Companion.seconds
60-
import java.time.Duration
6145

6246

6347
class StreamingTest : ShouldSpec({

0 commit comments

Comments
 (0)