I would like to ask I am using Gstreamer to display FPS (Framerate) of a playing video on Linux terminal using fpsdisplaysink. But now, I wanna display the FPS on screen (via a wayland client against weston background). Anyone can help ? Thanks.
EDIT: Below is the source gstreamer written in C.
#include <gst/gst.h>
#include <fcntl.h>
#include <stdbool.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <unistd.h>
#include <wayland-client.h>
#include <wayland-egl.h>
#include <wayland-client-protocol.h>
#include <cairo.h>
#include "helpers.h"
#define INPUT_FILE "/home/root/videos/vga1.h264"
#define POSITION_X 100
#define POSITION_Y 100
#define DELAY_VALUE 1000000
static const unsigned WIDTH = 320;
static const unsigned HEIGHT = 200;
static const unsigned CURSOR_WIDTH = 100;
static const unsigned CURSOR_HEIGHT = 59;
static const int32_t CURSOR_HOT_SPOT_X = 10;
static const int32_t CURSOR_HOT_SPOT_Y = 35;
static char prv_time_str[25] = {0,};
static volatile int fps_counter = 0;
static char str_fps[10] = "";
static time_t timer;
static char time_str[25];
static struct tm* tm_info;
int ignored_first = 0;
static GstElement *overlay;
static GstPadProbeReturn
cb_have_data (GstPad *pad,
GstPadProbeInfo *info,
gpointer user_data)
{
time(&timer);
tm_info = localtime(&timer);
strftime(time_str, 25, "%Y:%m:%d%H:%M:%S\n", tm_info);
fps_counter++;
if (!strlen(prv_time_str))
strcpy(prv_time_str, time_str);
if (strcmp(prv_time_str, time_str)) {
if (ignored_first) {
sprintf(str_fps, "FPS: %d", fps_counter);
g_object_set (G_OBJECT (overlay), "text", str_fps, NULL);
g_print("fps: %d\n", fps_counter);
}
ignored_first = 1;
fps_counter = 0;
}
strcpy(prv_time_str, time_str);
return GST_PAD_PROBE_OK;
}
int
main (int argc, char *argv[])
{
GstElement *pipeline, *source, *parser, *decoder, *sink;
GstBus *bus;
GstMessage *msg;
GstPad *pad;
gchar *fps_msg;
guint delay_show_FPS = 0;
const gchar *input_file = INPUT_FILE;
/* Initialization */
gst_init (&argc, &argv);
/* Create gstreamer elements */
pipeline = gst_pipeline_new ("video-play");
source = gst_element_factory_make ("filesrc", "file-source");
parser = gst_element_factory_make ("h264parse", "h264-parser");
decoder = gst_element_factory_make ("omxh264dec", "h264-decoder");
sink = gst_element_factory_make ("waylandsink", "video-output");
overlay = gst_element_factory_make ("textoverlay", "overlay");
if (!pipeline || !source || !parser || !decoder || !sink || !overlay ) {
g_printerr ("One element could not be created. Exiting.\n");
return -1;
}
/* Set input video file for source element */
g_object_set (G_OBJECT (source), "location", input_file, NULL);
/* Set position for displaying (100, 100) */
g_object_set (G_OBJECT (sink), "position-x", POSITION_X, "position-y", POSITION_Y, NULL);
/* Add textoverlay element to display text in foreground */
g_object_set (G_OBJECT (overlay), "font-desc", "Sans, 72", NULL);
/* Add all elements into the pipeline */
/* pipeline---[ file-source + h264-parser + h264-decoder + video-output ] */
gst_bin_add_many (GST_BIN (pipeline), source, parser, decoder, overlay, sink, NULL);
/* Link the elements together */
/* file-source -> h264-parser -> h264-decoder -> video-output */
if (gst_element_link_many (source, parser, decoder, overlay, sink, NULL) != TRUE) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (pipeline);
return -1;
}
/* Retrieve a pad from waylandsink */
pad = gst_element_get_static_pad (sink, "sink");
/* Add buffer probe from within event probe callback of having data */
gst_pad_add_probe (pad, GST_PAD_PROBE_TYPE_BUFFER, (GstPadProbeCallback)cb_have_data, NULL, NULL);
gst_object_unref (pad);
/* Set the pipeline to "playing" state */
g_print ("Now playing: %s\n", input_file);
if (gst_element_set_state (pipeline, GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (pipeline);
return -1;
}
g_print ("Running...\n");
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
while(1) {
msg = gst_bus_pop (bus);
/* Loop forever until a matching message was posted
on the bus (GST_MESSAGE_ERROR or GST_MESSAGE_EOS). */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s.\n",
GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s.\n",
debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
goto stop_pipeline;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
goto stop_pipeline;
default:
/* do nothing */
break;
}
gst_message_unref (msg);
}
}
/* Free resources and change state to NULL */
stop_pipeline:
gst_object_unref (bus);
g_print ("Returned, stopping playback...\n");
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print ("Freeing pipeline...\n");
gst_object_unref (GST_OBJECT (pipeline));
g_print ("Completed. Goodbye!\n");
return EXIT_SUCCESS;
}
I tried the above code, but it seemed not work.
Following is the example code, where you can get the fps of xvimagesink, so if you could just replace the sink to waylandsink you should be able to get the fps of waylandsink.
#include <stdio.h>
#include <string.h>
#include <gst/gst.h>
static char prv_time_str[25] = {0,};
static volatile int fps_counter = 0;
static char str_fps[10] = "";
static time_t timer;
static char time_str[25];
static struct tm* tm_info;
int ignored_first = 0;
static GstElement *pipeline, *src, *overlay, *sink;
static GstPadProbeReturn
cb_have_data (GstPad *pad,
GstPadProbeInfo *info,
gpointer user_data)
{
time(&timer);
tm_info = localtime(&timer);
strftime(time_str, 25, "%Y:%m:%d%H:%M:%S\n", tm_info);
fps_counter++;
if (!strlen(prv_time_str))
strcpy(prv_time_str, time_str);
if (strcmp(prv_time_str, time_str)) {
if (ignored_first) {
sprintf(str_fps, "FPS: %d", fps_counter);
g_object_set (G_OBJECT (overlay), "text", str_fps, NULL);
g_print("fps: %d\n", fps_counter);
}
ignored_first = 1;
fps_counter = 0;
}
strcpy(prv_time_str, time_str);
return GST_PAD_PROBE_OK;
}
gint main (gint argc, gchar *argv[])
{
GMainLoop *loop;
GstPad *pad;
/* init GStreamer */
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
/* build */
pipeline = gst_pipeline_new ("my-pipeline");
src = gst_element_factory_make ("videotestsrc", "src");
if (src == NULL)
g_error ("Could not create 'videotestsrc' element");
overlay = gst_element_factory_make ("textoverlay", "overlay");
if (overlay == NULL) {
g_error ("Could not create neither 'textoverlay' nor 'ximagesink' element");
}
g_object_set (G_OBJECT (overlay), "font-desc", "Sans, 72", NULL);
sink = gst_element_factory_make ("xvimagesink", "sink");
if (sink == NULL) {
g_error ("Could not create neither 'xvimagesink' nor 'ximagesink' element");
}
gst_bin_add_many (GST_BIN (pipeline), src, overlay, sink, NULL);
gst_element_link_many (src, overlay, sink, NULL);
pad = gst_element_get_static_pad (sink, "sink");
gst_pad_add_probe (pad, GST_PAD_PROBE_TYPE_BUFFER,
(GstPadProbeCallback) cb_have_data, NULL, NULL);
gst_object_unref (pad);
/* run */
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* wait until it's up and running or failed */
if (gst_element_get_state (pipeline, NULL, NULL, -1) == GST_STATE_CHANGE_FAILURE) {
g_error ("Failed to go into PLAYING state");
}
g_print ("Running ...\n");
g_main_loop_run (loop);
/* exit */
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}