How to show the GStreamer video in Qt frameless window ? - visual-studio-2010

I am trying to show the gstreamer video in qt frameless window .
My gstreamer pipeline for which i am creating code is : gst-launch-1.0 -v rtspsrc location=rtsp://192.168.1.15:8554/test ! rtpjitterbuffer ! rtph264depay ! avdec_h264 ! d3dvideosink sync=false
This is my first question please answer me. My code is as below
My code is working for a qt window. It is showing the video which it is receiving
from rtsp link but i have two major issues in it:
1. When i minimize this window, It loses it's output video and starts to show a blank screen. 2. I want to open this video in a frameless window but if i do so then it displays nothing.
I am using Qt-4.8.12 and gstreamer version 1.4.5 for windows 7 64 bit. Any help regarding these two issues is highly appreciated. Thanks in Advance.
#include <glib.h>
#include <gst/gst.h>
#include <gst/video/videooverlay.h>
#include <QApplication>
#include <QTimer>
#include <QWidget>
#include <stdio.h>
#include "qmainwindow.h"
static void on_pad_added (GstElement *element, GstPad *pad, gpointer data);
static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer data);
int main(int argc, char *argv[])
{
if (!g_thread_supported ())
g_thread_init (NULL);
/* Initialize GStreamer */
gst_init (&argc, &argv);
QApplication app(argc, argv);
app.connect(&app, SIGNAL(lastWindowClosed()), &app, SLOT(quit ()));
/* Creating Elements */
//GstElement *pipeLine = gst_pipeline_new ("xvoverlay");
QWidget window;
// QMainWindow window;
window.resize(1024,768);
WId xwinid=window.winId();
GMainLoop *loop;
GstElement *pipeLine, *rtspSrc, *rtpJitterBuffer, *rtpH264Depay, *avDecH264, *videoSink;
rtspSrc = gst_element_factory_make("rtspsrc", NULL);
rtpJitterBuffer = gst_element_factory_make("rtpjitterbuffer", NULL);
rtpH264Depay = gst_element_factory_make("rtph264depay", NULL);
avDecH264 = gst_element_factory_make("avdec_h264", NULL);
videoSink = gst_element_factory_make("d3dvideosink", NULL);
loop = g_main_loop_new (NULL, FALSE);
if (!rtspSrc || !rtpJitterBuffer || !rtpH264Depay || !avDecH264 || !videoSink)
{
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Set element properties */
g_object_set( rtspSrc, "location", "rtsp://192.168.1.16:8554/test" , NULL);
g_object_set( videoSink, "sync", false, NULL);
/*Initializing Pipeline*/
pipeLine = gst_pipeline_new ("TestPipeLine");
if (!pipeLine)
{
g_printerr ("Pipeline could not be created.");
}
/* we add a message handler */
GstBus *bus = gst_pipeline_get_bus (GST_PIPELINE (pipeLine));
gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
/*Adding Components to the pipeline */
gst_bin_add_many (GST_BIN(pipeLine),
rtspSrc,
rtpJitterBuffer,
rtpH264Depay,
avDecH264,
videoSink,
NULL);
/* if (gst_element_link (rtspSrc, rtpJitterBuffer) != TRUE)
{
g_printerr ("rtspSrc & rtpJitterBuffer could not be linked.\n");
gst_object_unref (pipeLine);
return -1;
}
*/
if (gst_element_link (rtpJitterBuffer, rtpH264Depay) != TRUE)
{
g_printerr ("rtpJitterBuffer and rtpH264Depay could not be linked.\n");
gst_object_unref (pipeLine);
return -1;
}
if (gst_element_link (rtpH264Depay, avDecH264) != TRUE)
{
g_printerr ("rtpH264Depay and avDecH264 could not be linked.\n");
gst_object_unref (pipeLine);
return -1;
}
if (gst_element_link (avDecH264, videoSink) != TRUE)
{
g_printerr ("avDecH264 and videoSink could not be linked.\n");
gst_object_unref (pipeLine);
return -1;
}
g_signal_connect (rtspSrc, "pad-added", G_CALLBACK (on_pad_added), rtpJitterBuffer);
window.setWindowFlags(Qt::FramelessWindowHint);
// else
// g_printerr("Pipeline created");zz
gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY(videoSink), guintptr(xwinid));
window.show();
/* Set the pipeline to "playing" state*/
g_print ("Now playing: %s\n", argv[1]);
gst_element_set_state (pipeLine, GST_STATE_PLAYING);
app.exec();
/* Iterate */
g_print ("Running...\n");
g_main_loop_run (loop);
/* Out of the main loop, clean up nicely */
g_print ("Returned, stopping playback\n");
gst_element_set_state (pipeLine, GST_STATE_NULL);
g_print ("Deleting pipeline\n");
gst_object_unref (GST_OBJECT (pipeLine));
return 0;
}
static void on_pad_added (GstElement *element, GstPad *pad, gpointer data)
{
GstPad *sinkpad;
GstElement *decoder = (GstElement *) data;
/* We can now link this pad with the vorbis-decoder sink pad */
g_print ("Dynamic pad created, linking demuxer/decoder\n");
sinkpad = gst_element_get_static_pad (decoder, "sink");
gst_pad_link (pad, sinkpad);
gst_object_unref (sinkpad);
}
static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer data)
{
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End of stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_free (debug);
g_printerr ("Error: %s\n", error->message);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return TRUE;
}

If you switch back to using QWindow, this will work:
window.setWindowFlags (Qt::FramelessWindowHint);
I'm not sure how, but I think something similar is available for QWidget.

Related

How to use Filesink in Deepstream test application to save video

Hello everyone my question is related with Nvidia Deepstream-5.0 SDK,
I am trying to run a sample test deepstream-nvdsanalytics-test which is in "/source/apps/sample_apps" in NVIDIA Deepstream container. I want to save the video file by using Filesink. I got the suggestion to look into create_encode_file_bin function which is in"/source/apps/apps-common/src/deepstream_sink_bin.c".
I tried changing the code deepstream_nvdsanalytics_test.cpp taking create_encode_file_bin as a reference but got some errors. I am posting my pipeline, edited code and error please have a look.
pipeline used-
pgie->nvtracker->nvdsanalytics->tiler->nvvidconv->nvosd->nvideoconvvert->caps
filter(x/raw)->encoder->codecparse->mux->filesink
Error-
(deepstream-nvdsanalytics-test:203): GStreamer-WARNING **:
16:08:13.115: Name ‘nvvideo-converter’ is not unique in bin
‘nvdsanalytics-test-pipeline’, not adding
(deepstream-nvdsanalytics-test:203): GStreamer-CRITICAL **:
16:08:13.116: gst_element_link_pads_full: assertion ‘GST_IS_ELEMENT
(dest)’ failed Elements could not be linked. Exiting.
code-
#include <gst/gst.h>
#include <glib.h>
#include <stdio.h>
#include <math.h>
#include <string.h>
#include <sys/time.h>
#include <iostream>
#include <vector>
#include <unordered_map>
#include "gstnvdsmeta.h"
#include "nvds_analytics_meta.h"
#include "deepstream_config.h"
#ifndef PLATFORM_TEGRA
#include "gst-nvmessage.h"
#endif
[....]
int
main (int argc, char *argv[])
{
GMainLoop *loop = NULL;
GstElement *pipeline = NULL, *streammux = NULL, *sink = NULL, *pgie = NULL,
*nvtracker = NULL, *nvdsanalytics = NULL,
*nvvidconv = NULL, *nvosd = NULL, *nvvidconv1 = NULL, *transform1 = NULL, *cap_filter = NULL, *encoder = NULL, *codecparse = NULL, *mux = NULL, *tiler = NULL;
GstCaps *caps = NULL;
#ifdef PLATFORM_TEGRA
GstElement *transform = NULL;
#endif
GstBus *bus = NULL;
guint bus_watch_id;
GstPad *nvdsanalytics_src_pad = NULL;
guint i, num_sources;
guint tiler_rows, tiler_columns;
guint pgie_batch_size;
gulong bitrate = 2000000;
guint profile = 0;
/* Check input arguments */
if (argc < 2) {
g_printerr ("Usage: %s <uri1> [uri2] ... [uriN] \n", argv[0]);
return -1;
}
num_sources = argc - 1;
/* Standard GStreamer initialization */
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
/* Create gstreamer elements */
/* Create Pipeline element that will form a connection of other elements */
pipeline = gst_pipeline_new ("nvdsanalytics-test-pipeline");
/* Create nvstreammux instance to form batches from one or more sources. */
streammux = gst_element_factory_make ("nvstreammux", "stream-muxer");
if (!pipeline || !streammux) {
g_printerr ("One element could not be created. Exiting.\n");
return -1;
}
gst_bin_add (GST_BIN (pipeline), streammux);
for (i = 0; i < num_sources; i++) {
GstPad *sinkpad, *srcpad;
gchar pad_name[16] = { };
GstElement *source_bin = create_source_bin (i, argv[i + 1]);
if (!source_bin) {
g_printerr ("Failed to create source bin. Exiting.\n");
return -1;
}
gst_bin_add (GST_BIN (pipeline), source_bin);
g_snprintf (pad_name, 15, "sink_%u", i);
sinkpad = gst_element_get_request_pad (streammux, pad_name);
if (!sinkpad) {
g_printerr ("Streammux request sink pad failed. Exiting.\n");
return -1;
}
srcpad = gst_element_get_static_pad (source_bin, "src");
if (!srcpad) {
g_printerr ("Failed to get src pad of source bin. Exiting.\n");
return -1;
}
if (gst_pad_link (srcpad, sinkpad) != GST_PAD_LINK_OK) {
g_printerr ("Failed to link source bin to stream muxer. Exiting.\n");
return -1;
}
gst_object_unref (srcpad);
gst_object_unref (sinkpad);
}
/* Use nvinfer to infer on batched frame. */
pgie = gst_element_factory_make ("nvinfer", "primary-nvinference-engine");
/* Use nvtracker to track detections on batched frame. */
nvtracker = gst_element_factory_make ("nvtracker", "nvtracker");
/* Use nvdsanalytics to perform analytics on object */
nvdsanalytics = gst_element_factory_make ("nvdsanalytics", "nvdsanalytics");
/* Use nvtiler to composite the batched frames into a 2D tiled array based
* on the source of the frames. */
tiler = gst_element_factory_make ("nvmultistreamtiler", "nvtiler");
/* Use convertor to convert from NV12 to RGBA as required by nvosd */
nvvidconv = gst_element_factory_make ("nvvideoconvert", "nvvideo-converter");
if (!nvvidconv) {
g_printerr ("nvvdiconv element could not be created. Exiting.\n");
}
/* Create OSD to draw on the converted RGBA buffer */
nvosd = gst_element_factory_make ("nvdsosd", "nv-onscreendisplay");
if (!nvosd) {
g_printerr ("nvosd element could not be created. Exiting.\n");
}
/* converter to convert RGBA to NV12 */
nvvidconv1 = gst_element_factory_make ("nvvideoconvert", "nvvideo-converter1");
if (!nvvidconv1) {
g_printerr ("nvvidconv1 element could not be created. Exiting.\n");
}
/*create cap_filter */
cap_filter = gst_element_factory_make (NVDS_ELEM_CAPS_FILTER, "cap_filter");
if (!cap_filter) {
g_printerr ("cap_filter element could not be created. Exiting.\n");
}
/* create cap for filter */
caps = gst_caps_from_string ("video/x-raw, format=I420");
g_object_set (G_OBJECT (cap_filter), "caps", caps, NULL);
/* creatge encoder*/
encoder = gst_element_factory_make (NVDS_ELEM_ENC_H264_HW, "encoder");
if (!encoder) {
g_printerr ("encoder element could not be created. Exiting.\n");
}
/* create transform1 */
transform1 = gst_element_factory_make (NVDS_ELEM_VIDEO_CONV, "transform1");
g_object_set (G_OBJECT (transform1), "gpu-id", 0, NULL);
if (!transform1) {
g_printerr ("transform1 element could not be created. Exiting.\n");
}
#ifdef IS_TEGRA
g_object_set (G_OBJECT (encoder), "bufapi-version", 1, NULL);
#endif
g_object_set (G_OBJECT (encoder), "profile", profile, NULL);
g_object_set (G_OBJECT (encoder), "bitrate", bitrate, NULL);
/* create codecparse */
codecparse = gst_element_factory_make ("h264parse", "h264-parser");
if (!codecparse) {
g_printerr ("codecparse element could not be created. Exiting.\n");
}
/* create mux */
mux = gst_element_factory_make (NVDS_ELEM_MUX_MP4, "mux");
if (!mux) {
g_printerr ("mux element could not be created. Exiting.\n");
}
/* create sink */
sink = gst_element_factory_make (NVDS_ELEM_SINK_FILE, "filesink");
if (!sink) {
g_printerr ("sink element could not be created. Exiting.\n");
}
g_object_set (G_OBJECT (sink), "location", "capture.mp4", "sync", 0, "async" , FALSE, NULL);
// /* Finally render the osd output */
#ifdef PLATFORM_TEGRA
transform = gst_element_factory_make ("nvegltransform", "nvegl-transform");
#endif
// sink = gst_element_factory_make (NVDS_ELEM_SINK_FILE, "filesink");
// g_object_set (G_OBJECT (sink), "location", "capture.mp4", "sync", 0, "async" , FALSE, NULL);
if (!pgie || !nvtracker || !nvdsanalytics || !nvvidconv ||
!nvosd || !nvvidconv1 || !cap_filter || !encoder || !codecparse || !mux || !sink) {
g_printerr ("One element could not be created. Exiting.\n");
return -1;
}
#ifdef PLATFORM_TEGRA
if(!transform) {
g_printerr ("One tegra element could not be created. Exiting.\n");
return -1;
}
#endif
g_object_set (G_OBJECT (streammux), "width", MUXER_OUTPUT_WIDTH, "height",
MUXER_OUTPUT_HEIGHT, "batch-size", num_sources,
"batched-push-timeout", MUXER_BATCH_TIMEOUT_USEC, NULL);
/* Configure the nvinfer element using the nvinfer config file. */
g_object_set (G_OBJECT (pgie),
"config-file-path", "nvdsanalytics_pgie_config.txt", NULL);
/* Configure the nvtracker element for using the particular tracker algorithm. */
g_object_set (G_OBJECT (nvtracker),
"ll-lib-file", "/opt/nvidia/deepstream/deepstream-5.0/lib/libnvds_nvdcf.so",
"ll-config-file", "tracker_config.yml", "tracker-width", 640, "tracker-height", 480,
NULL);
/* Configure the nvdsanalytics element for using the particular analytics config file*/
g_object_set (G_OBJECT (nvdsanalytics),
"config-file", "config_nvdsanalytics.txt",
NULL);
/* Override the batch-size set in the config file with the number of sources. */
g_object_get (G_OBJECT (pgie), "batch-size", &pgie_batch_size, NULL);
if (pgie_batch_size != num_sources) {
g_printerr
("WARNING: Overriding infer-config batch-size (%d) with number of sources (%d)\n",
pgie_batch_size, num_sources);
g_object_set (G_OBJECT (pgie), "batch-size", num_sources, NULL);
}
tiler_rows = (guint) sqrt (num_sources);
tiler_columns = (guint) ceil (1.0 * num_sources / tiler_rows);
/* we set the tiler properties here */
g_object_set (G_OBJECT (tiler), "rows", tiler_rows, "columns", tiler_columns,
"width", TILED_OUTPUT_WIDTH, "height", TILED_OUTPUT_HEIGHT, NULL);
/* we add a message handler */
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
/* Set up the pipeline */
/* we add all elements into the pipeline */
#ifdef PLATFORM_TEGRA
gst_bin_add_many (GST_BIN (pipeline), pgie, nvtracker, nvdsanalytics ,
nvvidconv, nvosd, nvvidconv1, cap_filter, encoder, codecparse, mux, sink,
NULL);
/* we link the elements together
* nvstreammux -> nvinfer -> nvtracker -> nvdsanalytics -> nvtiler ->
* nvvideoconvert -> nvosd -> transform -> sink
*/
if (!gst_element_link_many (streammux, pgie, nvtracker, nvdsanalytics,
nvvidconv, nvosd, nvvidconv1, cap_filter, encoder, codecparse, mux, sink, NULL)) {
g_printerr ("Elements could not be linked. Exiting.\n");
return -1;
}
#else
gst_bin_add_many (GST_BIN (pipeline), pgie, nvtracker, nvdsanalytics,
nvvidconv, nvosd, nvvidconv1, cap_filter, encoder, codecparse, mux, sink, NULL);
/* we link the elements together
* nvstreammux -> nvinfer -> nvtracker -> nvdsanalytics -> nvtiler ->
* nvvideoconvert -> nvosd -> sink
*/
if (!gst_element_link_many (streammux, pgie, nvtracker, nvdsanalytics,
nvvidconv, nvosd, nvvidconv1, cap_filter, encoder, codecparse, mux, sink, NULL)) {
g_printerr ("Elements could not be linked. Exiting.\n");
return -1;
}
#endif
/* Lets add probe to get informed of the meta data generated, we add probe to
* the sink pad of the nvdsanalytics element, since by that time, the buffer
* would have had got all the metadata.
*/
nvdsanalytics_src_pad = gst_element_get_static_pad (nvdsanalytics, "src");
if (!nvdsanalytics_src_pad)
g_print ("Unable to get src pad\n");
else
gst_pad_add_probe (nvdsanalytics_src_pad, GST_PAD_PROBE_TYPE_BUFFER,
nvdsanalytics_src_pad_buffer_probe, NULL, NULL);
/* Set the pipeline to "playing" state */
g_print ("Now playing:");
for (i = 0; i < num_sources; i++) {
g_print (" %s,", argv[i + 1]);
}
g_print ("\n");
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Wait till pipeline encounters an error or EOS */
g_print ("Running...\n");
g_main_loop_run (loop);
/* Out of the main loop, clean up nicely */
g_print ("Returned, stopping playback\n");
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print ("Deleting pipeline\n");
gst_object_unref (GST_OBJECT (pipeline));
g_source_remove (bus_watch_id);
g_main_loop_unref (loop);
return 0;
}
Please let me know if any other information is required from my side. Thank you in advance.
Modify the pipeline as per the below git diff (note: the below diff is for deepstream-test2 app, it'll work deepstream-nvdsanalytics-test as well) source:
diff --git a/deepstream-test2/deepstream_test2_app.c b/deepstream-test2/deepstream_test2_app.c
index 2b1ff34..c31441e 100644
--- a/deepstream-test2/deepstream_test2_app.c
+++ b/deepstream-test2/deepstream_test2_app.c
## -318,6 +318,16 ## main (int argc, char *argv[])
GstBus *bus = NULL;
guint bus_watch_id = 0;
GstPad *osd_sink_pad = NULL;
+
+ /* Added to save output to file */
+ GstElement *nvvidconv1 = NULL,
+ *filter1 = NULL, *filter2 = NULL,
+ *filter3 = NULL,
+ *videoconvert = NULL,
+ *filter4 = NULL,
+ *x264enc = NULL,
+ *qtmux = NULL;
+ GstCaps *caps1 = NULL, *caps2 = NULL, *caps3 = NULL, *caps4 = NULL;
/* Check input arguments */
if (argc != 2) {
## -373,17 +383,35 ## main (int argc, char *argv[])
/* Create OSD to draw on the converted RGBA buffer */
nvosd = gst_element_factory_make ("nvdsosd", "nv-onscreendisplay");
+ /* Added to save output to file */
+ nvvidconv1 = gst_element_factory_make ("nvvideoconvert", "nvvideo-converter1");
+ videoconvert = gst_element_factory_make ("videoconvert", "converter");
+ x264enc = gst_element_factory_make ("x264enc", "h264 encoder");
+ qtmux = gst_element_factory_make ("qtmux", "muxer");
+
/* Finally render the osd output */
#ifdef PLATFORM_TEGRA
transform = gst_element_factory_make ("nvegltransform", "nvegl-transform");
#endif
- sink = gst_element_factory_make ("nveglglessink", "nvvideo-renderer");
+ sink = gst_element_factory_make ("filesink", "nvvideo-renderer");
+
+ /* caps filter for nvvidconv to convert NV12 to RGBA as nvosd expects input
+ * in RGBA format */
+ filter1 = gst_element_factory_make ("capsfilter", "filter1");
+ filter2 = gst_element_factory_make ("capsfilter", "filter2");
+ filter3 = gst_element_factory_make ("capsfilter", "filter3");
+ filter4 = gst_element_factory_make ("capsfilter", "filter4");
if (!source || !h264parser || !decoder || !pgie ||
!nvtracker || !sgie1 || !sgie2 || !sgie3 || !nvvidconv || !nvosd || !sink) {
g_printerr ("One element could not be created. Exiting.\n");
return -1;
}
+ /* Added to test saving output to file */
+ if (!nvvidconv1 || !x264enc || !qtmux || !filter3 || !filter4) {
+ g_printerr ("One element could not be created. Exiting.\n");
+ return -1;
+ }
#ifdef PLATFORM_TEGRA
if(!transform) {
## -395,6 +423,9 ## main (int argc, char *argv[])
/* Set the input filename to the source element */
g_object_set (G_OBJECT (source), "location", argv[1], NULL);
+ /* Added to save output to file */
+ g_object_set (G_OBJECT (sink), "location", "out.mp4", NULL);
+
g_object_set (G_OBJECT (streammux), "batch-size", 1, NULL);
g_object_set (G_OBJECT (streammux), "width", MUXER_OUTPUT_WIDTH, "height",
## -429,9 +460,24 ## main (int argc, char *argv[])
#else
gst_bin_add_many (GST_BIN (pipeline),
source, h264parser, decoder, streammux, pgie, nvtracker, sgie1, sgie2, sgie3,
- nvvidconv, nvosd, sink, NULL);
+ filter1, nvvidconv, filter2, nvosd, nvvidconv1, filter3, videoconvert, filter4,
+ x264enc, qtmux, sink, NULL);
#endif
+ /* Added to save output to file */
+ caps1 = gst_caps_from_string ("video/x-raw(memory:NVMM), format=NV12");
+ g_object_set (G_OBJECT (filter1), "caps", caps1, NULL);
+ gst_caps_unref (caps1);
+ caps2 = gst_caps_from_string ("video/x-raw(memory:NVMM), format=RGBA");
+ g_object_set (G_OBJECT (filter2), "caps", caps2, NULL);
+ gst_caps_unref (caps2);
+ caps3 = gst_caps_from_string ("video/x-raw, format=RGBA");
+ g_object_set (G_OBJECT (filter3), "caps", caps3, NULL);
+ gst_caps_unref (caps3);
+ caps4 = gst_caps_from_string ("video/x-raw, format=NV12");
+ g_object_set (G_OBJECT (filter4), "caps", caps4, NULL);
+ gst_caps_unref (caps4);
+
GstPad *sinkpad, *srcpad;
gchar pad_name_sink[16] = "sink_0";
gchar pad_name_src[16] = "src";
## -470,7 +516,8 ## main (int argc, char *argv[])
}
#else
if (!gst_element_link_many (streammux, pgie, nvtracker, sgie1,
- sgie2, sgie3, nvvidconv, nvosd, sink, NULL)) {
+ sgie2, sgie3, filter1, nvvidconv, filter2, nvosd, nvvidconv1, filter3,
+ videoconvert, filter4, x264enc, qtmux, sink, NULL)) {
g_printerr ("Elements could not be linked. Exiting.\n");
return -1;
}
For deepstream-nvdsanalytics-test, pipeline would be:
gst_bin_add_many (GST_BIN (pipeline), pgie, nvtracker, nvdsanalytics, tiler,
filter1, nvvidconv, filter2, nvosd, nvvidconv1, filter3, videoconvert, filter4,
x264enc, qtmux, sink, NULL);

FFmpeg doesn't use GPU

I got the latest version binaries of ffmpeg from here. When I examine CPU and GPU usages when I play a video by its ffplay, I see that GPU is used during play. Not much using of CPU also indicates it. But when I get the latest version sources from the original site, I can't use GPU. To clarify, I include a player test program I wrote until now. When I uncomment the line which includes avcodec_find_decoder_by_name("h264_cuvid"), I get error -1. The error happens in avcodec_open2 with the description of Operation not permitted.
CString format(const char *fmt, ...)
{
va_list ap;
va_start(ap, fmt);
char buf[512];
vsnprintf(buf, sizeof(buf), fmt, ap);
va_end(ap);
return buf;
}
int CplayerDlg::play()
{
FILE *fp = fopen("video_files/1010.brf", "rb");
if (!fp)
{
AfxMessageBox("can't open video file");
return -1;
}
RecordFrame frame;
RecordHeader hdr;
fread(&frame, sizeof(frame), 1, fp);
if (frame.frameType != FRAME_TYPE_HEADER)
{
AfxMessageBox("record file doesn't begin with header");
return -1;
}
fread(&hdr, sizeof(hdr), 1, fp);
GetDlgItem(IDC_DIM)->SetWindowText(format("%dx%d", hdr.width, hdr.height));
GetDlgItem(IDC_CODEC_ID)->SetWindowText(format("%d", hdr.codecId));
GetDlgItem(IDC_PIXEL_FORMAT)->SetWindowText(format("%d", hdr.pixelFormat));
GetDlgItem(IDC_TIMEBASE)->SetWindowText(format("%d/%d", hdr.timebaseNum, hdr.timebaseDen));
AVCodec *pCodec;
#if 0
#define CHECK(decoder)\
pCodec = avcodec_find_decoder_by_name(#decoder);\
AfxMessageBox(pCodec ? #decoder " found" : "can't find " #decoder);
CHECK(h264_cuvid);
#undef CHECK
#endif
pCodec = avcodec_find_decoder(AV_CODEC_ID_H264);
//pCodec = avcodec_find_decoder_by_name("h264_cuvid");
if (!pCodec)
{
AfxMessageBox("can't find h264 decoder");
return -1;
}
AVCodecContext *pCodecContext = avcodec_alloc_context3(pCodec);
if (!pCodecContext)
{
AfxMessageBox("can't allocate codec context");
return -1;
}
#if 0
// enumerating available codecs
//av_register_all();
avcodec_register_all();
AVCodec *current_codec = av_codec_next(NULL);
while (current_codec != NULL)
{
TRACE("%s\n", current_codec->name);
current_codec = av_codec_next(current_codec);
}
#endif
int err = avcodec_open2(pCodecContext, pCodec, NULL);
if (err != 0)
{
char buf[AV_ERROR_MAX_STRING_SIZE];
av_make_error_string(buf, AV_ERROR_MAX_STRING_SIZE, err);
char buf2[AV_ERROR_MAX_STRING_SIZE];
sprintf(buf2, "%d (%x): %s\n", err, err, buf);
AfxMessageBox(buf2);
return -1;
}
AfxMessageBox("operation completed successfully");
fclose(fp);
return 0;
}

How to determine font used in dialog window

How to determine font used for some control in some dialog window in runnning process on Windows? Something like Microsoft Spy++ does.
I did not find this functionality in Spy++, but here's a small program that I just wrote for this task:
#include <windows.h>
#include <stdio.h>
int main(int argc, char **argv)
{
if (argc != 2) {
fprintf(stderr, "Usage: findfont WINDOWTITLE\n");
return 1;
}
LPCSTR title = argv[1];
HWND hWnd = FindWindow(NULL, title);
if (hWnd == NULL) {
fprintf(stderr, "Window titled \"%s\" not found\n", title);
return 1;
}
HFONT hFont = (HFONT) SendMessage(hWnd, WM_GETFONT, 0, 0);
if (hFont == NULL) {
fprintf(stderr, "WM_GETFONT failed\n");
return 1;
}
LOGFONT lf = { 0 };
if (!GetObject(hFont, sizeof(LOGFONT), &lf)) {
fprintf(stderr, "GetObject failed\n");
return 1;
}
printf("Face name: %s Height: %ld\n", lf.lfFaceName, lf.lfHeight);
return 0;
}

Possible to capture unhandled exception in win32 user application ? (setunhandledexceptionfilter())

I spent much time to capture unhandled exceptions in my process (win32) using API so called setunhandledexceptionfilter().
But I haven't captured exception when WER(Windows Error Report - which is well know for DR.watson) is showed.
Is impossible to catch all of exceptions without third-party in my APP?
I think that there is method for handling, but I don't get it.
I am not accustomed to Windows DEV environment. that's why I lost my mental in googling.
Below is my test-case in vc110(Visual Studio 2012).
chat test[65];
int main() {
// after attaching unhandled exception call-back using setunhandledexceptionfilter()
// die point (ACCESS_VIOLATION c0000005)
for (int k=0; k<1000000; k++)
test[k]=65;
My callback isn't called after WER(windows Error Report) occurs. It doesn't work as my intend.
*But strcpy(NULL, "TEST") which is okay (SUCCESS)*
Below is my source code.
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include <signal.h>
#include <sys/stat.h>
#include <assert.h>
#include <process.h>
#include <direct.h>
#include <conio.h>
#include <time.h>
#include <Windows.h>
#include <tchar.h>
#include <dbghelp.h>
#include <stdio.h>
#include <crtdbg.h>
#include <WinBase.h>
#pragma comment ( lib, "dbghelp.lib" )
void CreateMiniDump( EXCEPTION_POINTERS* pep );
BOOL CALLBACK MyMiniDumpCallback(
PVOID pParam,
const PMINIDUMP_CALLBACK_INPUT pInput,
PMINIDUMP_CALLBACK_OUTPUT pOutput
);
///////////////////////////////////////////////////////////////////////////////
// Minidump creation function
//
#if 0
LONG WINAPI lpTopLevelExceptionFilter(EXCEPTION_POINTERS* ExceptionInfo);
#endif
void CreateMiniDump( EXCEPTION_POINTERS* pep )
{
time_t t;
struct tm *tinfo;
wchar_t dump_name[128];
HANDLE hFile;
time(&t);
tinfo = localtime(&t);
wcsftime(dump_name, 128, L"MiniDump[%Y%m%d][%H_%M_%S].dmp", tinfo);
// file format MiniDump[YYYYMMDD][HH_MM_SEC]
hFile = CreateFile(dump_name, GENERIC_READ | GENERIC_WRITE,
0, NULL, CREATE_ALWAYS, FILE_ATTRIBUTE_NORMAL, NULL );
if( ( hFile != NULL ) && ( hFile != INVALID_HANDLE_VALUE ) )
{
// Create the minidump
MINIDUMP_EXCEPTION_INFORMATION mdei;
MINIDUMP_CALLBACK_INFORMATION mci;
MINIDUMP_TYPE mdt;
BOOL rv;
mdei.ThreadId = GetCurrentThreadId();
mdei.ExceptionPointers = pep;
mdei.ClientPointers = FALSE;
mci.CallbackRoutine = (MINIDUMP_CALLBACK_ROUTINE)MyMiniDumpCallback;
mci.CallbackParam = 0;
mdt = (MINIDUMP_TYPE)(MiniDumpWithIndirectlyReferencedMemory | MiniDumpScanMemory| MiniDumpWithThreadInfo);
rv = MiniDumpWriteDump( GetCurrentProcess(), GetCurrentProcessId(),
hFile, mdt, (pep != 0) ? &mdei : 0, 0, &mci );
if( !rv )
_tprintf( _T("MiniDumpWriteDump failed. Error: %u \n"), GetLastError() );
else
_tprintf( _T("Minidump created.\n") );
// Close the file
CloseHandle( hFile );
}
else
{
_tprintf( _T("CreateFile failed. Error: %u \n"), GetLastError() );
}
}
///////////////////////////////////////////////////////////////////////////////
// Custom minidump callback
//
BOOL CALLBACK MyMiniDumpCallback(
PVOID pParam,
const PMINIDUMP_CALLBACK_INPUT pInput,
PMINIDUMP_CALLBACK_OUTPUT pOutput
)
{
BOOL bRet = FALSE;
// Check parameters
if( pInput == 0 )
return FALSE;
if( pOutput == 0 )
return FALSE;
// Process the callbacks
switch( pInput->CallbackType )
{
case IncludeModuleCallback:
{
// Include the module into the dump
bRet = TRUE;
}
break;
case IncludeThreadCallback:
{
// Include the thread into the dump
bRet = TRUE;
}
break;
case ModuleCallback:
{
// Does the module have ModuleReferencedByMemory flag set ?
if( !(pOutput->ModuleWriteFlags & ModuleReferencedByMemory) )
{
// No, it does not - exclude it
wprintf( L"Excluding module: %s \n", pInput->Module.FullPath );
pOutput->ModuleWriteFlags &= (~ModuleWriteModule);
}
bRet = TRUE;
}
break;
case ThreadCallback:
{
// Include all thread information into the minidump
bRet = TRUE;
}
break;
case ThreadExCallback:
{
// Include this information
bRet = TRUE;
}
break;
case MemoryCallback:
{
// We do not include any information here -> return FALSE
bRet = FALSE;
}
break;
case CancelCallback:
break;
}
return bRet;
}
LONG WINAPI exception_filter_func(EXCEPTION_POINTERS* pep)
{
if (pep == NULL) {
return EXCEPTION_EXECUTE_HANDLER;
}
if (pep->ExceptionRecord->ExceptionCode == EXCEPTION_STACK_OVERFLOW) {
HANDLE hThread = CreateThread(NULL, 0, (LPTHREAD_START_ROUTINE)CreateMiniDump, pep, 0, NULL);
WaitForSingleObject(hThread, INFINITE);
CloseHandle(hThread);
} else {
CreateMiniDump(pep);
}
return EXCEPTION_EXECUTE_HANDLER;
}
char test[65];
int main(int argc, char **argv)
{
int k;
SetUnhandledExceptionFilter(exception_filter_func);
// exception occured (ACCESS_VIOLATION)
for (k=0; k<1000000; k++)
test[k]=65;
}

Gstreamer basic tutorial gives blank screen

I have the following gstreamer basic tutorial. When I compile and run the program, I am only getting a blank screen. The same happens when I use mplayer, but I supply -vo x11 option to it and it works well(for mplayer). So, I am guessing the issue gstreamer is also the same. So, my question is..."What should I do to tell the program that my output driver is x11?" How do I get rid of the blank screen?
Thanks
#include <gst/gst.h>
int main(int argc, char *argv[]) {
GstElement *pipeline;
GstBus *bus;
GstMessage *msg;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Build the pipeline */
/*
pipeline = gst_parse_launch ("playbin2 uri=http://docs.gstreamer.com/media/sintel_trailer-480p.webm", NULL);
*/
pipeline = gst_parse_launch ("playbin2 uri=file:///home/mylogin/gstreamerstuff/sintel_trailer-480p.webm", NULL);
/* Start playing */
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Free resources */
if (msg != NULL)
gst_message_unref (msg);
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}
Try adding this code, before setting the pipeline to playing.
{
GstElement* vsink = gst_element_factory_make ("ximagesink", NULL);
g_object_set (G_OBJECT (pipeline), "video-sink", vsink, NULL);
}
The above code makes sure that the pipeline uses the GstElement called ximagesink.
This element (type = sink) is X11 based and dumps (to screen) the incoming buffers using
X11 APIs

Resources