This document describes how to access CKafka to send/receive messages with the SDK for Node.js in a VPC.
nodejskafkademo
in the downloaded demo to the Linux server.nodejskafkademo
directory.Run the following command to switch to the yum
source configuration directory /etc/yum.repos.d/
.
cd /etc/yum.repos.d/
Create the yum
source configuration file confluent.repo
.
[Confluent.dist]
name=Confluent repository (dist)
baseurl=https://packages.confluent.io/rpm/5.1/7
gpgcheck=1
gpgkey=https://packages.confluent.io/rpm/5.1/archive.key
enabled=1
[Confluent]
name=Confluent repository
baseurl=https://packages.confluent.io/rpm/5.1
gpgcheck=1
gpgkey=https://packages.confluent.io/rpm/5.1/archive.key
enabled=1
Run the following command to install the C++ dependency library.
yum install librdkafka-devel
Run the following command to specify the OpenSSL header file path for the preprocessor.
export CPPFLAGS=-I/usr/local/opt/openssl/include
Run the following command to specify the OpenSSL library path for the connector.
export LDFLAGS=-L/usr/local/opt/openssl/lib
Run the following command to install the Node.js dependency library.
npm install i --unsafe-perm node-rdkafka
Create the CKafka configuration file setting.js
.
module.exports = {
'bootstrap_servers': ["xxx.ckafka.tencentcloudmq.com:6018"],
'topic_name': 'xxx',
'group_id': 'xxx'
}
Parameter | Description |
---|---|
bootstrap_servers | Accessed network, which can be copied from the Network column in the Access Mode section on the Instance Details page in the console. ![]() |
topic_name | Topic name, which can be copied from the Topic Management page in the console. ![]() |
group_id | You can customize it. After the demo runs successfully, you can see the consumer on the Consumer Group page. |
Write the message production program producer.js
.
const Kafka = require('node-rdkafka');
const config = require('./setting');
console.log("features:" + Kafka.features);
console.log(Kafka.librdkafkaVersion);
var producer = new Kafka.Producer({
'api.version.request': 'true',
// To set the entry service, please get the corresponding service address in the console
'bootstrap.servers': config['bootstrap_servers'],
'dr_cb': true,
'dr_msg_cb': true,
// Number of retries upon request error. We recommend you set the value to be greater than 0. Retries can ensure as much as possible that the message will not be lost
'retries': '0',
// The time between when a request fails and the next time the request is retried
"retry.backoff.ms": 100,
// Timeout period of the producer network request
'socket.timeout.ms': 6000,
});
var connected = false
producer.setPollInterval(100);
producer.connect();
producer.on('ready', function() {
connected = true
console.log("connect ok")
});
producer.on("disconnected", function() {
connected = false;
producer.connect();
})
producer.on('event.log', function(event) {
console.log("event.log", event);
});
producer.on("error", function(error) {
console.log("error:" + error);
});
function produce() {
try {
producer.produce(
config['topic_name'],
null,
new Buffer('Hello CKafka Default'),
null,
Date.now()
);
} catch (err) {
console.error('Error occurred when sending message(s)');
console.error(err);
}
}
producer.on('delivery-report', function(err, report) {
console.log("delivery-report: producer ok");
});
producer.on('event.error', function(err) {
console.error('event.error:' + err);
})
setInterval(produce, 1000, "Interval");
Run the following command to send the message.
node producer.js
View the operation result.
On the Topic Management page in the CKafka console, select the corresponding topic and click More > Message Query to view the just sent message.
Create the message consumer program consumer.js
.
const Kafka = require('node-rdkafka');
const config = require('./setting');
console.log(Kafka.features);
console.log(Kafka.librdkafkaVersion);
console.log(config)
var consumer = new Kafka.KafkaConsumer({
'api.version.request': 'true',
// To set the entry service, please get the corresponding service address in the console
'bootstrap.servers': config['bootstrap_servers'],
'group.id' : config['group_id'],
// Consumer timeout period when the Kafka consumer grouping mechanism is used. If the broker does not receive the heartbeat of the consumer within this period,
// the consumer will be considered to have failed and the broker will initiate rebalance
'session.timeout.ms': 10000,
// Client request timeout period. If no response is received after this time elapses, the request will time out and fail
'metadata.request.timeout.ms': 305000,
// Set the internal retry interval of the client
'reconnect.backoff.max.ms': 3000
});
consumer.connect();
consumer.on('ready', function() {
console.log("connect ok");
consumer.subscribe([config['topic_name']]);
consumer.consume();
})
consumer.on('data', function(data) {
console.log(data);
});
consumer.on('event.log', function(event) {
console.log("event.log", event);
});
consumer.on('error', function(error) {
console.log("error:" + error);
});
consumer.on('event', function(event) {
console.log("event:" + event);
});
Run the following command to consume the message.
node consumer.js
View the operation result.
On the Consumer Group page in the CKafka console, select the corresponding consumer group, enter the topic name in Topic Name, and click Query Details to view the consumption details.
Was this page helpful?