![](/img/trans.png)
[英]How to JSON serialize message and post it to Kafka Topic with JSON Schema
[英]How to create consumer for kafka topic?
我現在在kafka服務器上創建了主題,我正在創建消費者以從服務器讀取主題消息,但是當我嘗試使用consumer.on('message')
消費消息時,我看不到任何數據,在下面的代碼中任何實現錯誤的想法,我需要設置偏移量嗎?
consumer.js
var kafka = require('kafka-node');
var config = require('./config.js');
var zk = require('node-zookeeper-client');
var kafkaConn = config.kafkaCon.dit;
var HighLevelConsumer = kafka.HighLevelConsumer;
var Client = kafka.Client;
function start() {
topics = [{
topic: 'test-1'
}];
var groupId = 'push';
var clientId = "consumer-" + Math.floor(Math.random() * 10000);
var options = {
autoCommit: true,
fetchMaxWaitMs: 100,
fetchMaxBytes: 10 * 1024 * 1024,
groupId: groupId
};
console.log("Started consumer: ", clientId);
var consumer_client = new kafka.Client(kafkaConn, clientId);
var client = new Client(consumer_client.connectionString, clientId);
var consumer = new HighLevelConsumer(client, topics, options);
consumer.on('message', function(message) {
var topic = message.topic;
console.log('Message', topic);
});
};
start();
const Kafka = require("node-rdkafka");
const kafkaConf = {
"group.id": "cloudkarafka-example",
"metadata.broker.list": ["localhost:9092"],
"socket.keepalive.enable": true,
//"security.protocol": "SASL_SSL",
//"sasl.mechanisms": "SCRAM-SHA-256",
//"sasl.username": process.env.CLOUDKARAFKA_USERNAME,
//"sasl.password": process.env.CLOUDKARAFKA_PASSWORD,
"debug": "generic,broker,security",
'enable.auto.commit': false,
};
//const prefix = process.env.CLOUDKARAFKA_USERNAME;
const topics = ['topicName'];
const consumer = new Kafka.KafkaConsumer(kafkaConf, {
"auto.offset.reset": "beginning"
});
consumer.on("error", function(err) {
console.error(err);
});
consumer.on("ready", function(arg) {
console.log(`Consumer ${arg.name} ready`);
consumer.subscribe(topics);
consumer.consume();
});
consumer.on("data", function(m) {
console.log(m.value.toString());
});
consumer.on("disconnected", function(arg) {
process.exit();
});
consumer.on('event.error', function(err) {
console.error(err);
process.exit(1);
});
consumer.on('event.log', function(log) {
console.log(log);
});
consumer.connect();
可以使用kafka-node
npm模塊編寫Kafka Consumer。 就我的用例而言,我的使用者是一個單獨的Express服務器,該服務器偵聽事件並將其存儲在數據庫中。
import kafka from "kafka-node"
const client = new kafka.Client("http://localhost:2181");
const topics = [
{
topic: "webevents.dev"
}
];
const options = {
autoCommit: true,
fetchMaxWaitMs: 1000,
fetchMaxBytes: 1024 * 1024,
encoding: "buffer"
};
const consumer = new kafka.HighLevelConsumer(client, topics, options);
consumer.on("message", function(message) {
// Read string into a buffer.
var buf = new Buffer(message.value, "binary");
var decodedMessage = JSON.parse(buf.toString());
//Events is a Sequelize Model Object.
return Events.create({
id: decodedMessage.id,
type: decodedMessage.type,
userId: decodedMessage.userId,
sessionId: decodedMessage.sessionId,
data: JSON.stringify(decodedMessage.data),
createdAt: new Date()
});
});
consumer.on("error", function(err) {
console.log("error", err);
});
process.on("SIGINT", function() {
consumer.close(true, function() {
process.exit();
});
});
更多信息請參見https://nodewebapps.com/2017/11/04/getting-started-with-nodejs-and-kafka/
聲明:本站的技術帖子網頁,遵循CC BY-SA 4.0協議,如果您需要轉載,請注明本站網址或者原文地址。任何問題請咨詢:yoyou2525@163.com.