I have a simple program that should retrieve the HTML from a website URL.
main.cpp
#include "Downloader.h"
#include <QCoreApplication>
int main(int argc, char *argv[])
{
QCoreApplication a(argc, argv);
auto dl = new Downloader(&a);
QString url = "https://www.dognow.at/ergebnisse/?page=1";
dl->fetch(url);
return a.exec();
}
Downloader.h
#ifndef DOWNLOADER_H
#define DOWNLOADER_H
#include <QNetworkReply>
#include <QObject>
class Downloader : public QObject
{
Q_OBJECT
public:
explicit Downloader(QObject* parent = nullptr);
void fetch(QString &url);
private:
QNetworkAccessManager* m_manager;
private slots:
void replyFinished(QNetworkReply* rep);
};
#endif // DOWNLOADER_H
Downloader.cpp
#include "Downloader.h"
#include <QDebug>
Downloader::Downloader(QObject* parent): QObject(parent),
m_manager(new QNetworkAccessManager(parent))
{}
void Downloader::fetch(QString& url)
{
qDebug() << "fetch " << url;
connect(m_manager, &QNetworkAccessManager::finished, this, &Downloader::replyFinished);
m_manager->get(QNetworkRequest(QUrl(url)));
}
void Downloader::replyFinished(QNetworkReply* rep)
{
QByteArray data=rep->readAll();
QString str(data);
qDebug() << "data len: " << str.length();
rep->close();
}
When I run the program on my local PC it works fine. When I run it on another machine the reply data is empty. On both systems I use Linux (x86_64) and Qt 5.15.0.
I hope someone can give me a hint where I should have a look at.
UPDATE: 2022-04-04 - 16:22: when I run a simple curl command on the failing machine it works fine.
Ok, I found the problem. On the failing machin I have an older ubuntu (16.04 LTS) running with an incompatible openssl version. I found it out because I copied my own Qt libs build (debug) to the other machine and I got SSL error (incompatbile version). I installed a newer openssl version and know it works.