I am interested in getting working a solution where i am using libvlc library to get video frames from h264 stream. I have set callback to libvlc_video_set_format_callbacks
and received following info from my callback function (format_callback
) parameters: chroma: "J420", width: 1088 , height: 1922
When i call in main
Player p;
p.play("rtsp://path/to/camera?videocodec=h264");
it print outs following errors
chroma "J420" width: 1088 , height: 1922
[00007fddfc001268] core vout display error: Failed to change zoom
[00007fddfc001268] core vout display error: Failed to set on top
[00007fddfc001268] core vout display error: Failed to change source AR
[h264 @ 0x7fde1c06cea0] error while decoding MB 24 111, bytestream -15
[swscaler @ 0x7fddfc002ca0] bad dst image pointers
[swscaler @ 0x7fddfc002ca0] bad dst image pointers
My guess is that there is problem with buffers and their sizes. Where, which type and how big buffers to use to get video frame by frame? Later I plan to forward the frame data to QImage. Below is the Player.h file:
const int ResoHeight = 1922;
const int ResoWidth = 1088;
const int BytesPerPixel = 3; // not sure about this
struct ImageData
{
QVector<unsigned char> raw;
QVector<unsigned char> picture;
ImageData()
{
raw.resize(BytesPerPixel * ResoHeight * ResoWidth);
picture.resize(BytesPerPixel * ResoHeight * ResoWidth);
}
};
class Player : public QObject
{
Q_OBJECT
public:
explicit Player(QObject *parent = nullptr);
~Player();
void play(const std::string& path);
signals:
void newImage(const QImage& image);
private:
libvlc_instance_t* vlcInstance;
libvlc_media_player_t* player;
libvlc_media_t* media;
ImageData buffer;
};
Player.cpp is following:
namespace {
void* lock_frame(void *opaque, void **planes)
{
ImageData* buf = (ImageData*)(opaque);
*planes = buf->raw.data();
return buf->picture.data();
}
void unlock_frame(void *opaque, void *picture, void *const *planes)
{
// will be logic to announce new image
}
unsigned format_callback(void** opaque, char* chroma, unsigned *width, unsigned *height, unsigned *pitches, unsigned *lines)
{
qDebug() << "chroma:" << QString(chroma) << "width:" << *width << ", height:" << *height;
*pitches= (*width) * BytesPerPixel;
*lines= *height;
return 1;
}
} // namespace
Player::Player(QObject* parent)
: QObject(parent)
, vlcInstance(libvlc_new(0, nullptr))
, player(libvlc_media_player_new(vlcInstance))
, media(nullptr)
{
}
Player::~Player()
{
libvlc_media_player_stop(player);
libvlc_media_player_release(player);
libvlc_release(vlcInstance);
}
void Player::play(const std::string& path)
{
media = libvlc_media_new_location(vlcInstance, path.c_str());
libvlc_media_player_set_media(player, media);
libvlc_media_release(media);
libvlc_video_set_callbacks(player, lock_frame, unlock_frame, nullptr, &buffer);
libvlc_video_set_format_callbacks(player, format_callback, nullptr);
libvlc_media_player_play(player);
}
J420 chroma is a planar YUV format. Which means you have to provide 3 dimensional pitches
and lines
in format_callback
and 3 different planes
pointers (for each plane) in lock_frame
function. If you just want an RGB(RV24) image, see this question.