如何为kafka主题创建消费者?

ckx4rj1h  于 2021-06-07  发布在  Kafka
关注(0)|答案(2)|浏览(344)

我已经在kafka服务器上创建了主题,现在我正在创建一个消费者来读取服务器上的主题消息,但是当我尝试使用 consumer.on('message') ,知道下面代码中实现的错误是什么吗,我需要设置偏移量吗?
消费者.js

var kafka = require('kafka-node');
var config = require('./config.js');
var zk = require('node-zookeeper-client');
var kafkaConn = config.kafkaCon.dit;
var HighLevelConsumer = kafka.HighLevelConsumer;
var Client = kafka.Client;

function start() {
    topics = [{
        topic: 'test-1'
    }];
    var groupId = 'push';
    var clientId = "consumer-" + Math.floor(Math.random() * 10000);
    var options = {
        autoCommit: true,
        fetchMaxWaitMs: 100,
        fetchMaxBytes: 10 * 1024 * 1024,
        groupId: groupId
    };
    console.log("Started consumer: ", clientId);
    var consumer_client = new kafka.Client(kafkaConn, clientId);
    var client = new Client(consumer_client.connectionString, clientId);
    var consumer = new HighLevelConsumer(client, topics, options);
    consumer.on('message', function(message) {
        var topic = message.topic;
        console.log('Message', topic);
    });

};

start();
c2e8gylq

c2e8gylq1#

Kafka消费者可以用 kafka-node npm模块。对于我的用例,我的消费者是一个独立的express服务器,它监听事件并将它们存储在数据库中。

import kafka from "kafka-node"

const client = new kafka.Client("http://localhost:2181");

const topics = [
    {
        topic: "webevents.dev"
    }
];
const options = {
    autoCommit: true,
    fetchMaxWaitMs: 1000,
    fetchMaxBytes: 1024 * 1024,
    encoding: "buffer"
};

const consumer = new kafka.HighLevelConsumer(client, topics, options);

consumer.on("message", function(message) {

    // Read string into a buffer.
    var buf = new Buffer(message.value, "binary"); 
    var decodedMessage = JSON.parse(buf.toString());

    //Events is a Sequelize Model Object. 
    return Events.create({
        id: decodedMessage.id,
        type: decodedMessage.type,
        userId: decodedMessage.userId,
        sessionId: decodedMessage.sessionId,
        data: JSON.stringify(decodedMessage.data),
        createdAt: new Date()
    });
});

consumer.on("error", function(err) {
    console.log("error", err);
});

process.on("SIGINT", function() {
    consumer.close(true, function() {
        process.exit();
    });
});

更多信息https://nodewebapps.com/2017/11/04/getting-started-with-nodejs-and-kafka/

6g8kf2rb

6g8kf2rb2#

const Kafka = require("node-rdkafka");

const kafkaConf = {
  "group.id": "cloudkarafka-example",
  "metadata.broker.list": ["localhost:9092"],
  "socket.keepalive.enable": true,
  //"security.protocol": "SASL_SSL",
  //"sasl.mechanisms": "SCRAM-SHA-256",
  //"sasl.username": process.env.CLOUDKARAFKA_USERNAME,
  //"sasl.password": process.env.CLOUDKARAFKA_PASSWORD,
  "debug": "generic,broker,security",
  'enable.auto.commit': false,
};

//const prefix = process.env.CLOUDKARAFKA_USERNAME;
const topics = ['topicName'];
const consumer = new Kafka.KafkaConsumer(kafkaConf, {
  "auto.offset.reset": "beginning"
});

consumer.on("error", function(err) {
  console.error(err);
});
consumer.on("ready", function(arg) {
  console.log(`Consumer ${arg.name} ready`);
  consumer.subscribe(topics);
  consumer.consume();
});

consumer.on("data", function(m) {
 console.log(m.value.toString());
});
consumer.on("disconnected", function(arg) {
  process.exit();
});
consumer.on('event.error', function(err) {
  console.error(err);
  process.exit(1);
});
consumer.on('event.log', function(log) {
  console.log(log);
});
consumer.connect();

相关问题