为什么一个Kafka流阻止另一个开始?

4xrmg8kj  于 2021-06-07  发布在  Kafka
关注(0)|答案(1)|浏览(274)

我正在使用lightbend最近开放的新kafka scala streams api。我试着跑两条小溪。但是发生的事情是他们两个不同时运行,我没有得到想要的输出。

package in.internity

import java.util.Properties
import java.util.concurrent.TimeUnit

import com.lightbend.kafka.scala.streams.{KStreamS, StreamsBuilderS}
import org.apache.kafka.common.serialization.Serdes
import org.apache.kafka.streams.kstream.Produced
import org.apache.kafka.streams.{StreamsConfig, _}
import org.json4s.DefaultFormats
import org.json4s.native.JsonMethods.parse
import org.json4s.native.Serialization.write

import scala.util.Try

/**
  * @author Shivansh <shiv4nsh@gmail.com>
  * @since 8/1/18
  */
object Boot extends App {
  implicit val formats: DefaultFormats.type = DefaultFormats
  val config: Properties = {
    val p = new Properties()
    p.put(StreamsConfig.APPLICATION_ID_CONFIG, "wordcount-application")
    p.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092")
    p.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass)
    p.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass)
    p
  }

  val streams1 = wordSplit("lines", "wordCount")
  val streams2 = readAndWriteJson("person", "personName")

  private def wordSplit(intopic: String, outTopic: String) = {
    val builder = new StreamsBuilderS()
    val produced = Produced.`with`(Serdes.String(), Serdes.String())
    val textLines: KStreamS[String, String] = builder.stream(intopic)
    val data: KStreamS[String, String] = textLines.flatMapValues(value => value.toLowerCase.split("\\W+").toIterable)
    data.to(outTopic, produced)

    val streams: KafkaStreams = new KafkaStreams(builder.build(), config)
    streams
  }

  private def readAndWriteJson(intopic: String, outTopic: String) = {
    val builder = new StreamsBuilderS()
    val produced = Produced.`with`(Serdes.String(), Serdes.String())
    val textLines: KStreamS[String, String] = builder.stream(intopic)
    val data: KStreamS[String, String] = textLines.mapValues(value => {
      val person = Try(parse(value).extract[Person]).toOption
      println("1::", person)
      val personNameAndEmail = person.map(a => PersonNameAndEmail(a.name, a.email))
      println("2::", personNameAndEmail)
      write(personNameAndEmail)
    })
    data.to(outTopic, produced)

    val streams: KafkaStreams = new KafkaStreams(builder.build(), config)
    streams
  }

  streams1.start()
  streams2.start()
  Runtime.getRuntime.addShutdownHook(new Thread(() => {
    streams2.close(10, TimeUnit.SECONDS)
    streams1.close(10, TimeUnit.SECONDS)
  }))
}

case class Person(name: String, age: Int, email: String)

case class PersonNameAndEmail(name: String, email: String)

当我运行这个并生成有关主题的消息时 person 它们不会被消耗掉。但当我改变他们的开始顺序

streams2.start()
streams1.start()

很好用。为什么一个流的开始会阻塞另一个流呢?我们不能同时运行多个流吗。

jjhzyzn0

jjhzyzn01#

让它工作,似乎我试图初始化流两次在不同的方法本身(愚蠢的我:p)
工作代码:

package in.internity

import java.util.Properties
import java.util.concurrent.TimeUnit

import com.lightbend.kafka.scala.streams.{KStreamS, StreamsBuilderS}
import org.apache.kafka.common.serialization.Serdes
import org.apache.kafka.streams.kstream.Produced
import org.apache.kafka.streams.{StreamsConfig, _}
import org.json4s.DefaultFormats
import org.json4s.native.JsonMethods.parse
import org.json4s.native.Serialization.write

import scala.util.Try

/**
  * @author Shivansh <shiv4nsh@gmail.com>
  * @since 8/1/18
  */
object Boot extends App {
  implicit val formats: DefaultFormats.type = DefaultFormats
  val config: Properties = {
    val p = new Properties()
    p.put(StreamsConfig.APPLICATION_ID_CONFIG, "wordcount-application")
    p.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092")
    p.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass)
    p.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass)
    p
  }

  val builder = new StreamsBuilderS()

  private def wordSplit(intopic: String, outTopic: String) = {
    val produced = Produced.`with`(Serdes.String(), Serdes.String())
    val textLines: KStreamS[String, String] = builder.stream(intopic)
    val data: KStreamS[String, String] = textLines.flatMapValues(value => value.toLowerCase.split("\\W+").toIterable)
    data.to(outTopic, produced)
  }

  private def readAndWriteJson(intopic: String, outTopic: String) = {
    val produced = Produced.`with`(Serdes.String(), Serdes.String())
    val textLines: KStreamS[String, String] = builder.stream(intopic)
    val data: KStreamS[String, String] = textLines.mapValues(value => {
      val person = Try(parse(value).extract[Person]).toOption
      println("1::", person)
      val personNameAndEmail = person.map(a => PersonNameAndEmail(a.name, a.email))
      println("2::", personNameAndEmail)
      write(personNameAndEmail)
    })
    data.to(outTopic, produced)
  }

  wordSplit("lines", "wordCount")
  readAndWriteJson("person", "personName")
  val streams: KafkaStreams = new KafkaStreams(builder.build(), config)
  streams.start()
  streams

  Runtime.getRuntime.addShutdownHook(new Thread(() => {
    streams.close(10, TimeUnit.SECONDS)
  }))
}

case class Person(name: String, age: Int, email: String)
case class PersonNameAndEmail(name: String, email: String)

相关问题