I am using mqtt protocol to push some message and my broker is emqx .I write this script by nodejs to push 200,000 message on specific topic:
const mqtt = require('mqtt');
const options = {
clientId: "tazikpush",
clean: true
}
const client = mqtt.connect("mqtt://xxxxxxxxxx", options);
var topic = "/ApplyData/";
var pushOptions = {
retain: false,
qos: 2
};
const snooze = ms => new Promise(resolve => setTimeout(resolve, ms));
const example = async () => {
console.log('Waiting 5 sec and then start');
await snooze(5000);
for (var i = 0; i < 200000; i++) {
// await snooze(250);
client.publish(topic, message, pushOptions);
console.log(`done! ${i}`);
}
};
example();
and by nodejs i write a subscriber to listen on this topic and then store data into redis database. But i have a problem about:
Why listener should stop till push client push all 200000 message?
Why subscriber just receive 100 message? and other message dropped.
On my subscriber i create a js file.In this file i create a client and subscribe on my topic with qos 2 mqttClient.js
const mqtt = require('mqtt');
const log4js = require('log4js');
const config = require('config');
const topic_sub = "/ApplyData/";
log4js.configure(JSON.parse(JSON.stringify(config.get('Logger'))));
var logger = log4js.getLogger('app');
logger.level = 'debug';
const options = {
clientId: "mqttjs01",
clean: true
}
const client = mqtt.connect("mqtt://xxxxxxx", options);
client.on("connect", () => {
console.log("connected " + client.connected);
client.subscribe(topic_sub, { qos: 2 });
});
client.on("error", (error) => {
console.log("Can't connect" + error);
logger.debug(`Client Error : `, error);
});
module.exports = client;
And i use client event on my controller . my subscriber actually is a worker, means it's job is just subscribe message and store these messages and store to database.
In App.js i require:
const client = require('./mqttClient');
const controller = require('./controller/mainController');
and in main Controller i subscribe messages with call client.on
:
client.on('message', async (topic, message, packet) => {
console.log(topic);
if (topic === '/ApplyData/') {
var jobject = JSON.parse(message);
jobject.nid = uuid()
try {
let res = await redis_cache.cache(jobject);
} catch (err) {
console.log(err);
}
I debug broker with running from console after pushing 200,000 messages i got
2019-09-25 11:41:50.885 [warning] tazikpush [Session] Dropped qos2 packet 36998 for too many awaiting_rel
2019-09-25 11:41:50.900 [warning] tazikpush [Session] Dropped qos2 packet 36999 for too many awaiting_rel
2019-09-25 11:41:50.900 [warning] tazikpush [Session] Dropped qos2 packet 37000 for too many awaiting_rel
2019-09-25 11:41:50.900 [warning] tazikpush [Session] Dropped qos2 packet 37001 for too many awaiting_rel
2019-09-25 11:41:50.900 [warning] tazikpush [Session] Dropped qos2 packet 37002 for too many awaiting_rel....
2019-09-25 11:49:57.544 [warning] tazikpush [Session] Dropped qos2 packet 40292 for too many awaiting_rel
2019-09-25 11:49:57.544 [warning] tazikpush [Session] The PUBREL PacketId 36898 is not found
2019-09-25 11:49:57.544 [warning] tazikpush [Session] The PUBREL PacketId 36899 is not found
2019-09-25 11:49:57.544 [warning] tazikpush [Session] The PUBREL PacketId 36900 is not found
2019-09-25 11:49:57.544 [warning] tazikpush [Session] The PUBREL PacketId 36901 is not found ...
log
Your publish is too fast and using a single client to handle consumption is too slow.
This may be related to your config:
etc/emqx.conf
## Maximum QoS2 packets (Client -> Broker) awaiting PUBREL, 0 means no limit.
##
## Value: Number
zone.external.max_awaiting_rel = 100
## The QoS2 messages (Client -> Broker) will be dropped if awaiting PUBREL timeout.
##
## Value: Duration
zone.external.await_rel_timeout = 300s
The best way is to use EMQ X Enterprise or share subscriptions to add more clients:
https://docs.emqx.io/tutorial/v3/en/advanced/share_subscribe.html