具有合流Kafka模式注册表的flink

rjee0c15  于 2021-06-07  发布在  Kafka
关注(0)|答案(2)|浏览(410)

我正在尝试使用flinkkafkaproducer10从flink用schema registry编写合流kafka。产生以下误差。我创建了自定义模式序列化程序,请参见confluentavroserializationschema类。代码编译但产生运行时错误。生成错误的完整代码示例已打开https://github.com/dmiljkovic/test-flink-schema-registry. 所有资源都是mock、kafka集群和schema注册表。代码实际上是测试用例。

import io.confluent.kafka.serializers.KafkaAvroSerializer;
import org.apache.flink.streaming.util.serialization.SerializationSchema;

public class ConfluentAvroSerializationSchema<T> implements SerializationSchema<T> {
private static final long serialVersionUID = 1L;
private final String topic;
private final KafkaAvroSerializer avroSerializer;

public ConfluentAvroSerializationSchema(String topic, KafkaAvroSerializer avroSerializer) {
    this.topic =topic;
    this.avroSerializer = avroSerializer;
}

@Override
public byte[] serialize(T obj) {
    return avroSerializer.serialize(topic, obj);
}
}

    //serialize avro
    KafkaAvroSerializer avroSerializer = new KafkaAvroSerializer(schemaRegistry);
    avroSerializer.configure(new HashMap(registryProp), false);
    ConfluentAvroSerializationSchema ser =
            new ConfluentAvroSerializationSchema<TestRecordAvro>(topic, avroSerializer);

    //write to kafka
    FlinkKafkaProducer010.writeToKafkaWithTimestamps(ds, topic, ser, flinkProp);
org.apache.flink.api.common.InvalidProgramException: Object org.apache.flink.streaming.util.serialization.KeyedSerializationSchemaWrapper@6e28bb87 is not serializable

at org.apache.flink.api.java.ClosureCleaner.ensureSerializable(ClosureCleaner.java:109)
at org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducerBase.<init>(FlinkKafkaProducerBase.java:145)
at org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer09.<init>(FlinkKafkaProducer09.java:130)
at org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer010.<init>(FlinkKafkaProducer010.java:227)
at org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer010.writeToKafkaWithTimestamps(FlinkKafkaProducer010.java:137)
at org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer010.writeToKafkaWithTimestamps(FlinkKafkaProducer010.java:115)
at com.acme.kafka_avro_util.TestProducer.testAvroConsumer(TestProducer.java:59)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:55)
at org.junit.rules.RunRules.evaluate(RunRules.java:20)
at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57)
at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290)
at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:71)
at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:288)
at org.junit.runners.ParentRunner.access$000(ParentRunner.java:58)
at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:268)
at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26)
at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27)
at org.junit.rules.ExternalResource$1.evaluate(ExternalResource.java:48)
at org.junit.rules.RunRules.evaluate(RunRules.java:20)
at org.junit.runners.ParentRunner.run(ParentRunner.java:363)
at org.junit.runner.JUnitCore.run(JUnitCore.java:137)
at com.intellij.junit4.JUnit4IdeaTestRunner.startRunnerWithArgs(JUnit4IdeaTestRunner.java:68)
at com.intellij.rt.execution.junit.IdeaTestRunner$Repeater.startRunnerWithArgs(IdeaTestRunner.java:47)
at com.intellij.rt.execution.junit.JUnitStarter.prepareStreamsAndStart(JUnitStarter.java:242)
at com.intellij.rt.execution.junit.JUnitStarter.main(JUnitStarter.java:70)
Caused by: java.io.NotSerializableException: io.confluent.kafka.serializers.KafkaAvroSerializer
at java.io.ObjectOutputStream.writeObject0(ObjectOutputStream.java:1184)
at java.io.ObjectOutputStream.defaultWriteFields(ObjectOutputStream.java:1548)
at java.io.ObjectOutputStream.writeSerialData(ObjectOutputStream.java:1509)
at java.io.ObjectOutputStream.writeOrdinaryObject(ObjectOutputStream.java:1432)
at java.io.ObjectOutputStream.writeObject0(ObjectOutputStream.java:1178)
at java.io.ObjectOutputStream.defaultWriteFields(ObjectOutputStream.java:1548)
at java.io.ObjectOutputStream.writeSerialData(ObjectOutputStream.java:1509)
at java.io.ObjectOutputStream.writeOrdinaryObject(ObjectOutputStream.java:1432)
at java.io.ObjectOutputStream.writeObject0(ObjectOutputStream.java:1178)
at java.io.ObjectOutputStream.writeObject(ObjectOutputStream.java:348)
at org.apache.flink.util.InstantiationUtil.serializeObject(InstantiationUtil.java:315)
at org.apache.flink.api.java.ClosureCleaner.ensureSerializable(ClosureCleaner.java:107)
ujv3wf0j

ujv3wf0j1#

flink必须序列化所有操作符(包括你的接收器)才能将其发送到任务管理器。
问题是 KafkaAvroSerializer 你正在使用的 ConfluentAvroSerializationSchema 类根本不可序列化。那会让你的Flume无法改变。你可以初始化 KafkaAvroSerializer 惰性(例如,传递模式注册表的属性,而不是 KafkaAvroSerializerConfluentAvroSerializationSchema 创建 KafkaAvroSerializer 一接到电话 serialize ).

hsgswve4

hsgswve42#

private final KafkaAvroSerializer avroSerializer;private final transient KafkaAvroSerializer avroSerializer; 然后在一个 setup 方法。某种程度上:

@Setup
public void start() {
    this.serializer = //initialization
}

相关问题