|
| 1 | +/*- |
| 2 | + * =LICENSE= |
| 3 | + * Kotlin Spark API: Examples for Spark 3.2+ (Scala 2.12) |
| 4 | + * ---------- |
| 5 | + * Copyright (C) 2019 - 2022 JetBrains |
| 6 | + * ---------- |
| 7 | + * Licensed under the Apache License, Version 2.0 (the "License"); |
| 8 | + * you may not use this file except in compliance with the License. |
| 9 | + * You may obtain a copy of the License at |
| 10 | + * |
| 11 | + * http://www.apache.org/licenses/LICENSE-2.0 |
| 12 | + * |
| 13 | + * Unless required by applicable law or agreed to in writing, software |
| 14 | + * distributed under the License is distributed on an "AS IS" BASIS, |
| 15 | + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 16 | + * See the License for the specific language governing permissions and |
| 17 | + * limitations under the License. |
| 18 | + * =LICENSEEND= |
| 19 | + */ |
| 20 | +package org.jetbrains.kotlinx.spark.examples.streaming |
| 21 | + |
| 22 | +import org.apache.kafka.clients.consumer.ConsumerConfig.* |
| 23 | +import org.apache.kafka.clients.consumer.ConsumerRecord |
| 24 | +import org.apache.kafka.common.serialization.StringDeserializer |
| 25 | +import org.apache.spark.streaming.Durations |
| 26 | +import org.apache.spark.streaming.api.java.JavaDStream |
| 27 | +import org.apache.spark.streaming.api.java.JavaInputDStream |
| 28 | +import org.apache.spark.streaming.kafka010.ConsumerStrategies |
| 29 | +import org.apache.spark.streaming.kafka010.KafkaUtils |
| 30 | +import org.apache.spark.streaming.kafka010.LocationStrategies |
| 31 | +import org.jetbrains.kotlinx.spark.api.reduceByKey |
| 32 | +import org.jetbrains.kotlinx.spark.api.tuples.* |
| 33 | +import org.jetbrains.kotlinx.spark.api.withSparkStreaming |
| 34 | +import scala.Tuple2 |
| 35 | +import java.io.Serializable |
| 36 | +import java.util.regex.Pattern |
| 37 | +import kotlin.system.exitProcess |
| 38 | + |
| 39 | + |
| 40 | +/** |
| 41 | + * Src: https://github.com/apache/spark/blob/master/examples/src/main/java/org/apache/spark/examples/streaming/JavaDirectKafkaWordCount.java |
| 42 | + * |
| 43 | + * Consumes messages from one or more topics in Kafka and does wordcount. |
| 44 | + * Usage: JavaDirectKafkaWordCount <brokers> <groupId> <topics> |
| 45 | + * <brokers> is a list of one or more Kafka brokers |
| 46 | + * <groupId> is a consumer group name to consume from topics |
| 47 | + * <topics> is a list of one or more kafka topics to consume from |
| 48 | + * |
| 49 | + * Example: |
| 50 | + * |
| 51 | + * First make sure you have a Kafka producer running. For instance, when running locally: |
| 52 | + * $ kafka-console-producer.sh --topic quickstart-events --bootstrap-server localhost:9092 |
| 53 | + * |
| 54 | + * Then start the program normally or like this: |
| 55 | + * $ bin/run-example streaming.JavaDirectKafkaWordCount broker1-host:port,broker2-host:port \ |
| 56 | + * consumer-group topic1,topic2 |
| 57 | + */ |
| 58 | +object KotlinDirectKafkaWordCount { |
| 59 | + |
| 60 | + private val SPACE = Pattern.compile(" ") |
| 61 | + |
| 62 | + private const val DEFAULT_BROKER = "localhost:9092" |
| 63 | + private const val DEFAULT_GROUP_ID = "consumer-group" |
| 64 | + private const val DEFAULT_TOPIC = "quickstart-events" |
| 65 | + |
| 66 | + @JvmStatic |
| 67 | + fun main(args: Array<String>) { |
| 68 | + if (args.size < 3 && args.isNotEmpty()) { |
| 69 | + System.err.println( |
| 70 | + """Usage: JavaDirectKafkaWordCount <brokers> <groupId> <topics> |
| 71 | + <brokers> is a list of one or more Kafka brokers |
| 72 | + <groupId> is a consumer group name to consume from topics |
| 73 | + <topics> is a list of one or more kafka topics to consume from |
| 74 | + """.trimIndent() |
| 75 | + ) |
| 76 | + exitProcess(1) |
| 77 | + } |
| 78 | + |
| 79 | + val brokers: String = args.getOrElse(0) { DEFAULT_BROKER } |
| 80 | + val groupId: String = args.getOrElse(1) { DEFAULT_GROUP_ID } |
| 81 | + val topics: String = args.getOrElse(2) { DEFAULT_TOPIC } |
| 82 | + |
| 83 | + // Create context with a 2 seconds batch interval |
| 84 | + withSparkStreaming(batchDuration = Durations.seconds(2), appName = "KotlinDirectKafkaWordCount") { |
| 85 | + |
| 86 | + val topicsSet: Set<String> = topics.split(',').toSet() |
| 87 | + |
| 88 | + val kafkaParams: Map<String, Serializable> = mapOf( |
| 89 | + BOOTSTRAP_SERVERS_CONFIG to brokers, |
| 90 | + GROUP_ID_CONFIG to groupId, |
| 91 | + KEY_DESERIALIZER_CLASS_CONFIG to StringDeserializer::class.java, |
| 92 | + VALUE_DESERIALIZER_CLASS_CONFIG to StringDeserializer::class.java, |
| 93 | + ) |
| 94 | + |
| 95 | + // Create direct kafka stream with brokers and topics |
| 96 | + val messages: JavaInputDStream<ConsumerRecord<String, String>> = KafkaUtils.createDirectStream( |
| 97 | + ssc, |
| 98 | + LocationStrategies.PreferConsistent(), |
| 99 | + ConsumerStrategies.Subscribe(topicsSet, kafkaParams), |
| 100 | + ) |
| 101 | + |
| 102 | + // Get the lines, split them into words, count the words and print |
| 103 | + val lines: JavaDStream<String> = messages.map { it.value() } |
| 104 | + val words: JavaDStream<String> = lines.flatMap { it.split(SPACE).iterator() } |
| 105 | + |
| 106 | + val wordCounts: JavaDStream<Tuple2<String, Int>> = words |
| 107 | + .map { it X 1 } |
| 108 | + .reduceByKey { a: Int, b: Int -> a + b } |
| 109 | + |
| 110 | + wordCounts.print() |
| 111 | + |
| 112 | + } |
| 113 | + } |
| 114 | +} |
0 commit comments