Initial vdpau hw decoder support.

This commit is contained in:
Johns 2011-12-23 19:33:30 +01:00
parent 1e18da47f7
commit 06fddc206e
7 changed files with 431 additions and 204 deletions

View File

@ -1,6 +1,9 @@
User johns User johns
Date: Date:
Release Version 0.1.1
Initial VDPAU decoder support.
Initial VDPAU output support.
Configurable audio delay. Configurable audio delay.
Make pts monotonic. Make pts monotonic.
Support old libav and ffmpeg libs. Support old libav and ffmpeg libs.

7
Todo
View File

@ -1,15 +1,16 @@
missing: missing:
video out with xv video out with xv
video out with opengl video out with opengl
video out with vdpau software decoder for xv / opengl
software decoder
vdpau decoder
software deinterlace software deinterlace
auto crop auto crop
atmolight atmolight
zoom/fit-zoom 4:3 zoom/fit-zoom 4:3
multistream handling multistream handling
vdpau:
1080i with temporal spatial too slow GT 520
libva-intel-driver: libva-intel-driver:
intel still has hangups most with 1080i intel still has hangups most with 1080i
1080i does no v-sync (workaround written) 1080i does no v-sync (workaround written)

82
codec.c
View File

@ -40,6 +40,9 @@
#include <alsa/iatomic.h> #include <alsa/iatomic.h>
#include <libavcodec/avcodec.h> #include <libavcodec/avcodec.h>
#include <libavcodec/vaapi.h> #include <libavcodec/vaapi.h>
#ifdef USE_VDPAU
#include <libavcodec/vdpau.h>
#endif
#ifdef MAIN_H #ifdef MAIN_H
#include MAIN_H #include MAIN_H
@ -67,6 +70,7 @@ struct _video_decoder_
{ {
VideoHwDecoder *HwDecoder; ///< video hardware decoder VideoHwDecoder *HwDecoder; ///< video hardware decoder
int GetFormatDone; ///< flag get format called!
AVCodec *VideoCodec; ///< video codec AVCodec *VideoCodec; ///< video codec
AVCodecContext *VideoCtx; ///< video codec context AVCodecContext *VideoCtx; ///< video codec context
AVFrame *Frame; ///< decoded video frame AVFrame *Frame; ///< decoded video frame
@ -76,8 +80,6 @@ struct _video_decoder_
// Call-backs // Call-backs
//---------------------------------------------------------------------------- //----------------------------------------------------------------------------
static int CodecFfmpegOk; ///< check ffmpeg idiotics
/** /**
** Callback to negotiate the PixelFormat. ** Callback to negotiate the PixelFormat.
** **
@ -92,7 +94,7 @@ static enum PixelFormat Codec_get_format(AVCodecContext * video_ctx,
decoder = video_ctx->opaque; decoder = video_ctx->opaque;
Debug(3, "codec: %s: %18p\n", __FUNCTION__, decoder); Debug(3, "codec: %s: %18p\n", __FUNCTION__, decoder);
CodecFfmpegOk = 1; decoder->GetFormatDone = 1;
return Video_get_format(decoder->HwDecoder, video_ctx, fmt); return Video_get_format(decoder->HwDecoder, video_ctx, fmt);
} }
@ -106,10 +108,14 @@ static enum PixelFormat Codec_get_format(AVCodecContext * video_ctx,
*/ */
static int Codec_get_buffer(AVCodecContext * video_ctx, AVFrame * frame) static int Codec_get_buffer(AVCodecContext * video_ctx, AVFrame * frame)
{ {
if (!CodecFfmpegOk) { // get_format missing VideoDecoder *decoder;
decoder = video_ctx->opaque;
if (!decoder->GetFormatDone) { // get_format missing
enum PixelFormat fmts[2]; enum PixelFormat fmts[2];
fprintf(stderr, "codec: buggy ffmpeg\n"); fprintf(stderr, "codec: buggy ffmpeg\n");
Warning(_("codec: buggy ffmpeg\n"));
fmts[0] = video_ctx->pix_fmt; fmts[0] = video_ctx->pix_fmt;
fmts[1] = PIX_FMT_NONE; fmts[1] = PIX_FMT_NONE;
Codec_get_format(video_ctx, fmts); Codec_get_format(video_ctx, fmts);
@ -118,11 +124,12 @@ static int Codec_get_buffer(AVCodecContext * video_ctx, AVFrame * frame)
if ((PIX_FMT_VDPAU_H264 <= video_ctx->pix_fmt if ((PIX_FMT_VDPAU_H264 <= video_ctx->pix_fmt
&& video_ctx->pix_fmt <= PIX_FMT_VDPAU_VC1) && video_ctx->pix_fmt <= PIX_FMT_VDPAU_VC1)
|| video_ctx->pix_fmt == PIX_FMT_VDPAU_MPEG4) { || video_ctx->pix_fmt == PIX_FMT_VDPAU_MPEG4) {
VideoDecoder *decoder;
unsigned surface; unsigned surface;
struct vdpau_render_state *vrs;
decoder = video_ctx->opaque;
surface = VideoGetSurface(decoder->HwDecoder); surface = VideoGetSurface(decoder->HwDecoder);
vrs = av_calloc(1, sizeof(struct vdpau_render_state));
vrs->surface = surface;
//Debug(3, "codec: use surface %#010x\n", surface); //Debug(3, "codec: use surface %#010x\n", surface);
@ -130,7 +137,11 @@ static int Codec_get_buffer(AVCodecContext * video_ctx, AVFrame * frame)
#if LIBAVCODEC_VERSION_INT <= AV_VERSION_INT(53,46,0) #if LIBAVCODEC_VERSION_INT <= AV_VERSION_INT(53,46,0)
frame->age = 256 * 256 * 256 * 64; frame->age = 256 * 256 * 256 * 64;
#endif #endif
frame->data[0] = (void *)(size_t) surface; // render
frame->data[0] = (void *)vrs;
frame->data[1] = NULL;
frame->data[2] = NULL;
frame->data[3] = NULL;
// reordered frames // reordered frames
if (video_ctx->pkt) { if (video_ctx->pkt) {
@ -142,10 +153,8 @@ static int Codec_get_buffer(AVCodecContext * video_ctx, AVFrame * frame)
} }
// VA-API: // VA-API:
if (video_ctx->hwaccel_context) { if (video_ctx->hwaccel_context) {
VideoDecoder *decoder;
unsigned surface; unsigned surface;
decoder = video_ctx->opaque;
surface = VideoGetSurface(decoder->HwDecoder); surface = VideoGetSurface(decoder->HwDecoder);
//Debug(3, "codec: use surface %#010x\n", surface); //Debug(3, "codec: use surface %#010x\n", surface);
@ -184,15 +193,19 @@ static void Codec_release_buffer(AVCodecContext * video_ctx, AVFrame * frame)
&& video_ctx->pix_fmt <= PIX_FMT_VDPAU_VC1) && video_ctx->pix_fmt <= PIX_FMT_VDPAU_VC1)
|| video_ctx->pix_fmt == PIX_FMT_VDPAU_MPEG4) { || video_ctx->pix_fmt == PIX_FMT_VDPAU_MPEG4) {
VideoDecoder *decoder; VideoDecoder *decoder;
struct vdpau_render_state *vrs;
unsigned surface; unsigned surface;
decoder = video_ctx->opaque; decoder = video_ctx->opaque;
surface = (unsigned)(size_t) frame->data[0]; vrs = (struct vdpau_render_state *)frame->data[0];
surface = vrs->surface;
//Debug(3, "codec: release surface %#010x\n", surface); //Debug(3, "codec: release surface %#010x\n", surface);
VideoReleaseSurface(decoder->HwDecoder, surface); VideoReleaseSurface(decoder->HwDecoder, surface);
frame->data[0] = NULL; av_freep(&vrs->bitstream_buffers);
vrs->bitstream_buffers_allocated = 0;
av_freep(&frame->data[0]);
return; return;
} }
@ -216,6 +229,45 @@ static void Codec_release_buffer(AVCodecContext * video_ctx, AVFrame * frame)
return avcodec_default_release_buffer(video_ctx, frame); return avcodec_default_release_buffer(video_ctx, frame);
} }
/**
** Draw a horizontal band.
**
** @param video_ctx Codec context
** @param frame draw this frame
** @param y y position of slice
** @param type 1->top field, 2->bottom field, 3->frame
** @param offset offset into AVFrame.data from which slice
** should be read
** @param height height of slice
*/
static void Codec_draw_horiz_band(AVCodecContext * video_ctx,
const AVFrame * frame, __attribute__ ((unused))
int offset[AV_NUM_DATA_POINTERS], __attribute__ ((unused))
int y, __attribute__ ((unused))
int type, __attribute__ ((unused))
int height)
{
// VDPAU: PIX_FMT_VDPAU_H264 .. PIX_FMT_VDPAU_VC1 PIX_FMT_VDPAU_MPEG4
if ((PIX_FMT_VDPAU_H264 <= video_ctx->pix_fmt
&& video_ctx->pix_fmt <= PIX_FMT_VDPAU_VC1)
|| video_ctx->pix_fmt == PIX_FMT_VDPAU_MPEG4) {
VideoDecoder *decoder;
struct vdpau_render_state *vrs;
//unsigned surface;
decoder = video_ctx->opaque;
vrs = (struct vdpau_render_state *)frame->data[0];
//surface = vrs->surface;
//Debug(3, "codec: draw slice surface %#010x\n", surface);
//Debug(3, "codec: %d references\n", vrs->info.h264.num_ref_frames);
VideoDrawRenderState(decoder->HwDecoder, vrs);
}
return;
}
//---------------------------------------------------------------------------- //----------------------------------------------------------------------------
// Test // Test
//---------------------------------------------------------------------------- //----------------------------------------------------------------------------
@ -255,7 +307,7 @@ void CodecVideoOpen(VideoDecoder * decoder, const char *name, int codec_id)
// //
// ffmpeg compatibility hack // ffmpeg compatibility hack
// //
#if LIBAVCODEC_VERSION_INT <= AV_VERSION_INT(52,96,0) #if 1 || (LIBAVCODEC_VERSION_INT <= AV_VERSION_INT(52,96,0))
if (name) { if (name) {
if (!strcmp(name, "h264video_vdpau")) { if (!strcmp(name, "h264video_vdpau")) {
name = "h264_vdpau"; name = "h264_vdpau";
@ -330,7 +382,11 @@ void CodecVideoOpen(VideoDecoder * decoder, const char *name, int codec_id)
decoder->VideoCtx->get_buffer = Codec_get_buffer; decoder->VideoCtx->get_buffer = Codec_get_buffer;
decoder->VideoCtx->release_buffer = Codec_release_buffer; decoder->VideoCtx->release_buffer = Codec_release_buffer;
decoder->VideoCtx->reget_buffer = Codec_get_buffer; decoder->VideoCtx->reget_buffer = Codec_get_buffer;
//decoder->VideoCtx->draw_horiz_band = Codec_draw_horiz_band; decoder->VideoCtx->draw_horiz_band = Codec_draw_horiz_band;
decoder->VideoCtx->slice_flags =
SLICE_FLAG_CODED_ORDER | SLICE_FLAG_ALLOW_FIELD;
decoder->VideoCtx->thread_count = 1;
decoder->VideoCtx->active_thread_type = 0;
} else { } else {
decoder->VideoCtx->hwaccel_context = decoder->VideoCtx->hwaccel_context =
VideoGetVaapiContext(decoder->HwDecoder); VideoGetVaapiContext(decoder->HwDecoder);

View File

@ -44,6 +44,8 @@
static char BrokenThreadsAndPlugins; ///< broken vdr threads and plugins static char BrokenThreadsAndPlugins; ///< broken vdr threads and plugins
static char ConfigVdpauDecoder = 1; ///< use vdpau decoder, if possible
////////////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////////////
// Audio // Audio
////////////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////////////
@ -366,14 +368,16 @@ int VideoDecode(void)
case CODEC_ID_MPEG2VIDEO: case CODEC_ID_MPEG2VIDEO:
if (last_codec_id != CODEC_ID_MPEG2VIDEO) { if (last_codec_id != CODEC_ID_MPEG2VIDEO) {
last_codec_id = CODEC_ID_MPEG2VIDEO; last_codec_id = CODEC_ID_MPEG2VIDEO;
CodecVideoOpen(MyVideoDecoder, 0 ? "mpegvideo_vdpau" : NULL, CodecVideoOpen(MyVideoDecoder,
ConfigVdpauDecoder ? "mpegvideo_vdpau" : NULL,
CODEC_ID_MPEG2VIDEO); CODEC_ID_MPEG2VIDEO);
} }
break; break;
case CODEC_ID_H264: case CODEC_ID_H264:
if (last_codec_id != CODEC_ID_H264) { if (last_codec_id != CODEC_ID_H264) {
last_codec_id = CODEC_ID_H264; last_codec_id = CODEC_ID_H264;
CodecVideoOpen(MyVideoDecoder, 0 ? "h264video_vdpau" : NULL, CodecVideoOpen(MyVideoDecoder,
ConfigVdpauDecoder ? "h264video_vdpau" : NULL,
CODEC_ID_H264); CODEC_ID_H264);
} }
break; break;

View File

@ -39,7 +39,7 @@ extern "C" {
////////////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////////////
static const char *const VERSION = "0.1.0"; static const char *const VERSION = "0.1.1";
static const char *const DESCRIPTION = static const char *const DESCRIPTION =
trNOOP("A software and GPU emulated HD device"); trNOOP("A software and GPU emulated HD device");

527
video.c
View File

@ -107,6 +107,7 @@
#ifdef USE_VDPAU #ifdef USE_VDPAU
#include <vdpau/vdpau_x11.h> #include <vdpau/vdpau_x11.h>
#include <libavcodec/vdpau.h>
#endif #endif
#include <libavcodec/avcodec.h> #include <libavcodec/avcodec.h>
@ -930,6 +931,7 @@ static VASurfaceID VaapiGetSurface(VaapiDecoder * decoder)
for (i = 0; i < decoder->SurfaceFreeN; ++i) { for (i = 0; i < decoder->SurfaceFreeN; ++i) {
decoder->SurfacesFree[i] = decoder->SurfacesFree[i + 1]; decoder->SurfacesFree[i] = decoder->SurfacesFree[i + 1];
} }
decoder->SurfacesFree[i] = VA_INVALID_ID;
// save as used // save as used
decoder->SurfacesUsed[decoder->SurfaceUsedN++] = surface; decoder->SurfacesUsed[decoder->SurfaceUsedN++] = surface;
@ -1044,6 +1046,11 @@ static VaapiDecoder *VaapiNewDecoder(void)
decoder->Image->image_id = VA_INVALID_ID; decoder->Image->image_id = VA_INVALID_ID;
for (i = 0; i < CODEC_SURFACES_MAX; ++i) {
decoder->SurfacesUsed[i] = VA_INVALID_ID;
decoder->SurfacesFree[i] = VA_INVALID_ID;
}
// setup video surface ring buffer // setup video surface ring buffer
atomic_set(&decoder->SurfacesFilled, 0); atomic_set(&decoder->SurfacesFilled, 0);
@ -1314,11 +1321,11 @@ static void VideoVaapiExit(void)
} }
} }
/** ///
** Update output for new size or aspect ratio. /// Update output for new size or aspect ratio.
** ///
** @param decoder VA-API decoder /// @param decoder VA-API decoder
*/ ///
static void VaapiUpdateOutput(VaapiDecoder * decoder) static void VaapiUpdateOutput(VaapiDecoder * decoder)
{ {
AVRational input_aspect_ratio; AVRational input_aspect_ratio;
@ -1336,7 +1343,7 @@ static void VaapiUpdateOutput(VaapiDecoder * decoder)
decoder->InputWidth * input_aspect_ratio.num, decoder->InputWidth * input_aspect_ratio.num,
decoder->InputHeight * input_aspect_ratio.den, 1024 * 1024); decoder->InputHeight * input_aspect_ratio.den, 1024 * 1024);
Debug(3, "video: aspect %d : %d\n", display_aspect_ratio.num, Debug(3, "video: aspect %d:%d\n", display_aspect_ratio.num,
display_aspect_ratio.den); display_aspect_ratio.den);
// FIXME: store different positions for the ratios // FIXME: store different positions for the ratios
@ -1570,20 +1577,20 @@ static enum PixelFormat Vaapi_get_format(VaapiDecoder * decoder,
if (vaCreateSurfaceGLX(decoder->VaDisplay, GL_TEXTURE_2D, if (vaCreateSurfaceGLX(decoder->VaDisplay, GL_TEXTURE_2D,
decoder->GlTexture[0], &decoder->GlxSurface[0]) decoder->GlTexture[0], &decoder->GlxSurface[0])
!= VA_STATUS_SUCCESS) { != VA_STATUS_SUCCESS) {
Fatal(_("video: can't create glx surfaces")); Fatal(_("video/glx: can't create glx surfaces"));
} }
// FIXME: this isn't usable with vdpau-backend // FIXME: this isn't usable with vdpau-backend
/* /*
if (vaCreateSurfaceGLX(decoder->VaDisplay, GL_TEXTURE_2D, if (vaCreateSurfaceGLX(decoder->VaDisplay, GL_TEXTURE_2D,
decoder->GlTexture[1], &decoder->GlxSurface[1]) decoder->GlTexture[1], &decoder->GlxSurface[1])
!= VA_STATUS_SUCCESS) { != VA_STATUS_SUCCESS) {
Fatal(_("video: can't create glx surfaces")); Fatal(_("video/glx: can't create glx surfaces"));
} }
*/ */
} }
#endif #endif
Debug(3, "\tpixel format %#010x\n", *fmt_idx); Debug(3, "\t%#010x %s\n", fmt_idx[0], av_get_pix_fmt_name(fmt_idx[0]));
return *fmt_idx; return *fmt_idx;
slow_path: slow_path:
@ -1668,7 +1675,7 @@ static void VaapiPutSurfaceX11(VaapiDecoder * decoder, VASurfaceID surface,
if (vaQuerySurfaceStatus(VaDisplay, surface, if (vaQuerySurfaceStatus(VaDisplay, surface,
&status) != VA_STATUS_SUCCESS) { &status) != VA_STATUS_SUCCESS) {
Error(_("video: vaQuerySurface failed\n")); Error(_("video/vaapi: vaQuerySurface failed\n"));
} }
Debug(3, "video/vaapi: %2d %d\n", i, status); Debug(3, "video/vaapi: %2d %d\n", i, status);
usleep(1 * 1000); usleep(1 * 1000);
@ -1755,7 +1762,7 @@ static void VaapiPutSurfaceGLX(VaapiDecoder * decoder, VASurfaceID surface,
start = GetMsTicks(); start = GetMsTicks();
if (vaCopySurfaceGLX(decoder->VaDisplay, decoder->GlxSurface[0], surface, if (vaCopySurfaceGLX(decoder->VaDisplay, decoder->GlxSurface[0], surface,
type | decoder->SurfaceFlags) != VA_STATUS_SUCCESS) { type | decoder->SurfaceFlags) != VA_STATUS_SUCCESS) {
Error(_("video: vaCopySurfaceGLX failed\n")); Error(_("video/glx: vaCopySurfaceGLX failed\n"));
return; return;
} }
copy = GetMsTicks(); copy = GetMsTicks();
@ -1836,13 +1843,13 @@ static int VaapiFindImageFormat(VaapiDecoder * decoder,
return 0; return 0;
} }
/** ///
** Configure VA-API for new video format. /// Configure VA-API for new video format.
** ///
** @param decoder VA-API decoder /// @param decoder VA-API decoder
** ///
** @note called only for software decoder. /// @note called only for software decoder.
*/ ///
static void VaapiSetup(VaapiDecoder * decoder, static void VaapiSetup(VaapiDecoder * decoder,
const AVCodecContext * video_ctx) const AVCodecContext * video_ctx)
{ {
@ -1860,14 +1867,14 @@ static void VaapiSetup(VaapiDecoder * decoder,
if (decoder->Image->image_id != VA_INVALID_ID) { if (decoder->Image->image_id != VA_INVALID_ID) {
if (vaDestroyImage(VaDisplay, decoder->Image->image_id) if (vaDestroyImage(VaDisplay, decoder->Image->image_id)
!= VA_STATUS_SUCCESS) { != VA_STATUS_SUCCESS) {
Error("video: can't destroy image!\n"); Error("video/vaapi: can't destroy image!\n");
} }
} }
VaapiFindImageFormat(decoder, video_ctx->pix_fmt, format); VaapiFindImageFormat(decoder, video_ctx->pix_fmt, format);
if (vaCreateImage(VaDisplay, format, width, height, if (vaCreateImage(VaDisplay, format, width, height,
decoder->Image) != VA_STATUS_SUCCESS) { decoder->Image) != VA_STATUS_SUCCESS) {
Fatal("video: can't create image!\n"); Fatal("video/vaapi: can't create image!\n");
} }
Debug(3, Debug(3,
"video/vaapi: created image %dx%d with id 0x%08x and buffer id 0x%08x\n", "video/vaapi: created image %dx%d with id 0x%08x and buffer id 0x%08x\n",
@ -1884,13 +1891,13 @@ static void VaapiSetup(VaapiDecoder * decoder,
if (vaCreateSurfaceGLX(decoder->VaDisplay, GL_TEXTURE_2D, if (vaCreateSurfaceGLX(decoder->VaDisplay, GL_TEXTURE_2D,
decoder->GlTexture[0], &decoder->GlxSurface[0]) decoder->GlTexture[0], &decoder->GlxSurface[0])
!= VA_STATUS_SUCCESS) { != VA_STATUS_SUCCESS) {
Fatal(_("video: can't create glx surfaces")); Fatal(_("video/glx: can't create glx surfaces"));
} }
/* /*
if (vaCreateSurfaceGLX(decoder->VaDisplay, GL_TEXTURE_2D, if (vaCreateSurfaceGLX(decoder->VaDisplay, GL_TEXTURE_2D,
decoder->GlTexture[1], &decoder->GlxSurface[1]) decoder->GlTexture[1], &decoder->GlxSurface[1])
!= VA_STATUS_SUCCESS) { != VA_STATUS_SUCCESS) {
Fatal(_("video: can't create glx surfaces")); Fatal(_("video/glx: can't create glx surfaces"));
} }
*/ */
} }
@ -3009,6 +3016,7 @@ typedef struct _vdpau_decoder_
int OutputHeight; ///< output window height int OutputHeight; ///< output window height
enum PixelFormat PixFmt; ///< ffmpeg frame pixfmt enum PixelFormat PixFmt; ///< ffmpeg frame pixfmt
int WrongInterlacedWarned; ///< warning about interlace flag issued
int Interlaced; ///< ffmpeg interlaced flag int Interlaced; ///< ffmpeg interlaced flag
int TopFieldFirst; ///< ffmpeg top field displayed first int TopFieldFirst; ///< ffmpeg top field displayed first
@ -3023,6 +3031,7 @@ typedef struct _vdpau_decoder_
void *GlxSurface[2]; ///< VDPAU/GLX surface void *GlxSurface[2]; ///< VDPAU/GLX surface
#endif #endif
VdpDecoder VideoDecoder; ///< vdp video decoder
VdpVideoMixer VideoMixer; ///< vdp video mixer VdpVideoMixer VideoMixer; ///< vdp video mixer
VdpChromaType ChromaType; ///< vdp video surface chroma format VdpChromaType ChromaType; ///< vdp video surface chroma format
@ -3128,6 +3137,8 @@ static VdpDecoderQueryCapabilities *VdpauDecoderQueryCapabilities;
static VdpDecoderCreate *VdpauDecoderCreate; static VdpDecoderCreate *VdpauDecoderCreate;
static VdpDecoderDestroy *VdpauDecoderDestroy; static VdpDecoderDestroy *VdpauDecoderDestroy;
static VdpDecoderRender *VdpauDecoderRender;
static VdpVideoMixerQueryFeatureSupport *VdpauVideoMixerQueryFeatureSupport; static VdpVideoMixerQueryFeatureSupport *VdpauVideoMixerQueryFeatureSupport;
static VdpVideoMixerCreate *VdpauVideoMixerCreate; static VdpVideoMixerCreate *VdpauVideoMixerCreate;
static VdpVideoMixerSetFeatureEnables *VdpauVideoMixerSetFeatureEnables; static VdpVideoMixerSetFeatureEnables *VdpauVideoMixerSetFeatureEnables;
@ -3163,7 +3174,8 @@ static void VdpauCreateSurfaces(VdpauDecoder * decoder, int width, int height)
{ {
int i; int i;
Debug(3, "video/vdpau: %s %dx%d\n", __FUNCTION__, width, height); Debug(3, "video/vdpau: %s: %dx%d * %d\n", __FUNCTION__, width, height,
CODEC_SURFACES_DEFAULT);
// FIXME: allocate only the number of needed surfaces // FIXME: allocate only the number of needed surfaces
decoder->SurfaceFreeN = CODEC_SURFACES_DEFAULT; decoder->SurfaceFreeN = CODEC_SURFACES_DEFAULT;
@ -3178,7 +3190,7 @@ static void VdpauCreateSurfaces(VdpauDecoder * decoder, int width, int height)
VdpauGetErrorString(status)); VdpauGetErrorString(status));
// FIXME: no fatal // FIXME: no fatal
} }
Debug(4, "video/vdpau: created video surface %dx%d with id 0x%08x\n", Debug(3, "video/vdpau: created video surface %dx%d with id 0x%08x\n",
width, height, decoder->SurfacesFree[i]); width, height, decoder->SurfacesFree[i]);
} }
} }
@ -3196,6 +3208,11 @@ static void VdpauDestroySurfaces(VdpauDecoder * decoder)
Debug(3, "video/vdpau: %s\n", __FUNCTION__); Debug(3, "video/vdpau: %s\n", __FUNCTION__);
for (i = 0; i < decoder->SurfaceFreeN; ++i) { for (i = 0; i < decoder->SurfaceFreeN; ++i) {
#ifdef DEBUG
if (decoder->SurfacesFree[i] == VDP_INVALID_HANDLE) {
Debug(3, "video/vdpau: invalid surface\n");
}
#endif
status = VdpauVideoSurfaceDestroy(decoder->SurfacesFree[i]); status = VdpauVideoSurfaceDestroy(decoder->SurfacesFree[i]);
if (status != VDP_STATUS_OK) { if (status != VDP_STATUS_OK) {
Error(_("video/vdpau: can't destroy video surface: %s\n"), Error(_("video/vdpau: can't destroy video surface: %s\n"),
@ -3203,6 +3220,11 @@ static void VdpauDestroySurfaces(VdpauDecoder * decoder)
} }
} }
for (i = 0; i < decoder->SurfaceUsedN; ++i) { for (i = 0; i < decoder->SurfaceUsedN; ++i) {
#ifdef DEBUG
if (decoder->SurfacesUsed[i] == VDP_INVALID_HANDLE) {
Debug(3, "video/vdpau: invalid surface\n");
}
#endif
status = VdpauVideoSurfaceDestroy(decoder->SurfacesUsed[i]); status = VdpauVideoSurfaceDestroy(decoder->SurfacesUsed[i]);
if (status != VDP_STATUS_OK) { if (status != VDP_STATUS_OK) {
Error(_("video/vdpau: can't destroy video surface: %s\n"), Error(_("video/vdpau: can't destroy video surface: %s\n"),
@ -3237,6 +3259,7 @@ static unsigned VdpauGetSurface(VdpauDecoder * decoder)
for (i = 0; i < decoder->SurfaceFreeN; ++i) { for (i = 0; i < decoder->SurfaceFreeN; ++i) {
decoder->SurfacesFree[i] = decoder->SurfacesFree[i + 1]; decoder->SurfacesFree[i] = decoder->SurfacesFree[i + 1];
} }
decoder->SurfacesFree[i] = VDP_INVALID_HANDLE;
// save as used // save as used
decoder->SurfacesUsed[decoder->SurfaceUsedN++] = surface; decoder->SurfacesUsed[decoder->SurfaceUsedN++] = surface;
@ -3320,6 +3343,7 @@ static void VdpauMixerSetup(VdpauDecoder * decoder)
} }
decoder->ChromaType = chroma_type = VDP_CHROMA_TYPE_420; decoder->ChromaType = chroma_type = VDP_CHROMA_TYPE_420;
// FIXME: use best chroma
// //
// Setup parameter/value tables // Setup parameter/value tables
@ -3413,8 +3437,14 @@ static VdpauDecoder *VdpauNewDecoder(void)
decoder->Device = VdpauDevice; decoder->Device = VdpauDevice;
decoder->Window = VideoWindow; decoder->Window = VideoWindow;
decoder->VideoDecoder = VDP_INVALID_HANDLE;
decoder->VideoMixer = VDP_INVALID_HANDLE; decoder->VideoMixer = VDP_INVALID_HANDLE;
for (i = 0; i < CODEC_SURFACES_MAX; ++i) {
decoder->SurfacesUsed[i] = VDP_INVALID_HANDLE;
decoder->SurfacesFree[i] = VDP_INVALID_HANDLE;
}
// //
// setup video surface ring buffer // setup video surface ring buffer
// //
@ -3650,9 +3680,9 @@ static void VideoVdpauInit(const char *display_name)
#if 0 #if 0
VdpauGetProc(VDP_FUNC_ID_DECODER_GET_PARAMETERS, VdpauGetProc(VDP_FUNC_ID_DECODER_GET_PARAMETERS,
&VdpauDecoderGetParameters, "DecoderGetParameters"); &VdpauDecoderGetParameters, "DecoderGetParameters");
#endif
VdpauGetProc(VDP_FUNC_ID_DECODER_RENDER, &VdpauDecoderRender, VdpauGetProc(VDP_FUNC_ID_DECODER_RENDER, &VdpauDecoderRender,
"DecoderRender"); "DecoderRender");
#endif
VdpauGetProc(VDP_FUNC_ID_VIDEO_MIXER_QUERY_FEATURE_SUPPORT, VdpauGetProc(VDP_FUNC_ID_VIDEO_MIXER_QUERY_FEATURE_SUPPORT,
&VdpauVideoMixerQueryFeatureSupport, "VideoMixerQueryFeatureSupport"); &VdpauVideoMixerQueryFeatureSupport, "VideoMixerQueryFeatureSupport");
#if 0 #if 0
@ -3909,9 +3939,54 @@ static void VideoVdpauExit(void)
} }
} }
///
/// Update output for new size or aspect ratio.
///
/// @param decoder VDPAU hw decoder
///
static void VdpauUpdateOutput(VdpauDecoder * decoder)
{
AVRational input_aspect_ratio;
AVRational display_aspect_ratio;
input_aspect_ratio = decoder->InputAspect;
if (!input_aspect_ratio.num || !input_aspect_ratio.den) {
input_aspect_ratio.num = 1;
input_aspect_ratio.den = 1;
Debug(3, "video: aspect defaults to %d:%d\n", input_aspect_ratio.num,
input_aspect_ratio.den);
}
av_reduce(&display_aspect_ratio.num, &display_aspect_ratio.den,
decoder->InputWidth * input_aspect_ratio.num,
decoder->InputHeight * input_aspect_ratio.den, 1024 * 1024);
Debug(3, "video: aspect %d:%d\n", display_aspect_ratio.num,
display_aspect_ratio.den);
// FIXME: store different positions for the ratios
decoder->OutputX = 0;
decoder->OutputY = 0;
decoder->OutputWidth = (VideoWindowHeight * display_aspect_ratio.num)
/ display_aspect_ratio.den;
decoder->OutputHeight = (VideoWindowWidth * display_aspect_ratio.num)
/ display_aspect_ratio.den;
if ((unsigned)decoder->OutputWidth > VideoWindowWidth) {
decoder->OutputWidth = VideoWindowWidth;
decoder->OutputY = (VideoWindowHeight - decoder->OutputHeight) / 2;
} else {
decoder->OutputHeight = VideoWindowHeight;
decoder->OutputX = (VideoWindowWidth - decoder->OutputWidth) / 2;
}
}
/// ///
/// Check profile supported. /// Check profile supported.
/// ///
/// @param decoder VDPAU hw decoder
/// @param profile VDPAU profile requested
///
static VdpDecoderProfile VdpauCheckProfile(VdpauDecoder * decoder, static VdpDecoderProfile VdpauCheckProfile(VdpauDecoder * decoder,
VdpDecoderProfile profile) VdpDecoderProfile profile)
{ {
@ -3926,7 +4001,7 @@ static VdpDecoderProfile VdpauCheckProfile(VdpauDecoder * decoder,
VdpauDecoderQueryCapabilities(decoder->Device, profile, &is_supported, VdpauDecoderQueryCapabilities(decoder->Device, profile, &is_supported,
&max_level, &max_macroblocks, &max_width, &max_height); &max_level, &max_macroblocks, &max_width, &max_height);
if (status != VDP_STATUS_OK) { if (status != VDP_STATUS_OK) {
Error(_("video/vdpau: can't queey decoder capabilities: %s\n"), Error(_("video/vdpau: can't query decoder capabilities: %s\n"),
VdpauGetErrorString(status)); VdpauGetErrorString(status));
return VDP_INVALID_HANDLE; return VDP_INVALID_HANDLE;
} }
@ -3947,35 +4022,49 @@ static VdpDecoderProfile VdpauCheckProfile(VdpauDecoder * decoder,
static enum PixelFormat Vdpau_get_format(VdpauDecoder * decoder, static enum PixelFormat Vdpau_get_format(VdpauDecoder * decoder,
AVCodecContext * video_ctx, const enum PixelFormat *fmt) AVCodecContext * video_ctx, const enum PixelFormat *fmt)
{ {
const enum PixelFormat *fmt_idx;
VdpDecoderProfile profile; VdpDecoderProfile profile;
int i; VdpStatus status;
int max_refs;
Debug(3, "%s: %18p\n", __FUNCTION__, decoder);
Debug(3, "video: new stream format %d\n", GetMsTicks() - VideoSwitch); Debug(3, "video: new stream format %d\n", GetMsTicks() - VideoSwitch);
VdpauCleanup(decoder); VdpauCleanup(decoder);
if (getenv("NO_HW")) { if (getenv("NO_HW")) { // FIXME: make config option
Debug(3, "codec: hardware acceleration disabled\n");
goto slow_path; goto slow_path;
} }
#ifdef DEBUG //
#ifndef FF_API_GET_PIX_FMT_NAME // look through formats
//
Debug(3, "%s: codec %d fmts:\n", __FUNCTION__, video_ctx->codec_id); Debug(3, "%s: codec %d fmts:\n", __FUNCTION__, video_ctx->codec_id);
for (i = 0; fmt[i] != PIX_FMT_NONE; ++i) { for (fmt_idx = fmt; *fmt_idx != PIX_FMT_NONE; fmt_idx++) {
Debug(3, "\t%#010x %s\n", fmt[i], avcodec_get_pix_fmt_name(fmt[i])); Debug(3, "\t%#010x %s\n", *fmt_idx, av_get_pix_fmt_name(*fmt_idx));
// check supported pixel format with entry point
switch (*fmt_idx) {
case PIX_FMT_VDPAU_H264:
case PIX_FMT_VDPAU_MPEG1:
case PIX_FMT_VDPAU_MPEG2:
case PIX_FMT_VDPAU_WMV3:
case PIX_FMT_VDPAU_VC1:
case PIX_FMT_VDPAU_MPEG4:
break;
default:
continue;
}
break;
} }
Debug(3, "\n");
#else
Debug(3, "%s: codec %d fmts:\n", __FUNCTION__, video_ctx->codec_id);
for (i = 0; fmt[i] != PIX_FMT_NONE; ++i) {
Debug(3, "\t%#010x %s\n", fmt[i], av_get_pix_fmt_name(fmt[i]));
}
Debug(3, "\n");
#endif
#endif
if (*fmt_idx == PIX_FMT_NONE) {
Error(_("video/vdpau: no valid vdpau pixfmt found\n"));
goto slow_path;
}
max_refs = CODEC_SURFACES_DEFAULT;
// check profile // check profile
switch (video_ctx->codec_id) { switch (video_ctx->codec_id) {
case CODEC_ID_MPEG2VIDEO: case CODEC_ID_MPEG2VIDEO:
max_refs = 2;
profile = profile =
VdpauCheckProfile(decoder, VDP_DECODER_PROFILE_MPEG2_MAIN); VdpauCheckProfile(decoder, VDP_DECODER_PROFILE_MPEG2_MAIN);
break; break;
@ -3987,22 +4076,35 @@ static enum PixelFormat Vdpau_get_format(VdpauDecoder * decoder,
*/ */
break; break;
case CODEC_ID_H264: case CODEC_ID_H264:
/* // FIXME: can calculate level 4.1 limits
// try more simple formats, fallback to better max_refs = 16;
if (video_ctx->profile == FF_PROFILE_H264_BASELINE) { // try more simple formats, fallback to better
p = VaapiFindProfile(profiles, profile_n, if (video_ctx->profile == FF_PROFILE_H264_BASELINE) {
VAProfileH264Baseline); profile =
if (p == -1) { VdpauCheckProfile(decoder,
p = VaapiFindProfile(profiles, profile_n, VDP_DECODER_PROFILE_H264_BASELINE);
VAProfileH264Main); if (profile == VDP_INVALID_HANDLE) {
} profile =
} else if (video_ctx->profile == FF_PROFILE_H264_MAIN) { VdpauCheckProfile(decoder,
p = VaapiFindProfile(profiles, profile_n, VAProfileH264Main); VDP_DECODER_PROFILE_H264_MAIN);
} }
if (p == -1) { if (profile == VDP_INVALID_HANDLE) {
p = VaapiFindProfile(profiles, profile_n, VAProfileH264High); profile =
} VdpauCheckProfile(decoder,
*/ VDP_DECODER_PROFILE_H264_HIGH);
}
} else if (video_ctx->profile == FF_PROFILE_H264_MAIN) {
profile =
VdpauCheckProfile(decoder, VDP_DECODER_PROFILE_H264_MAIN);
if (profile == VDP_INVALID_HANDLE) {
profile =
VdpauCheckProfile(decoder,
VDP_DECODER_PROFILE_H264_HIGH);
}
} else {
profile =
VdpauCheckProfile(decoder, VDP_DECODER_PROFILE_H264_MAIN);
}
break; break;
case CODEC_ID_WMV3: case CODEC_ID_WMV3:
/* /*
@ -4017,46 +4119,22 @@ static enum PixelFormat Vdpau_get_format(VdpauDecoder * decoder,
default: default:
goto slow_path; goto slow_path;
} }
#if 0
// if (profile == VDP_INVALID_HANDLE) {
// prepare decoder Error(_("video/vdpau: no valid profile found\n"));
//
memset(&attrib, 0, sizeof(attrib));
attrib.type = VAConfigAttribRTFormat;
if (vaGetConfigAttributes(decoder->VaDisplay, p, e, &attrib, 1)) {
Error("codec: can't get attributes");
goto slow_path;
}
if (attrib.value & VA_RT_FORMAT_YUV420) {
Info(_("codec: YUV 420 supported\n"));
}
if (attrib.value & VA_RT_FORMAT_YUV422) {
Info(_("codec: YUV 422 supported\n"));
}
if (attrib.value & VA_RT_FORMAT_YUV444) {
Info(_("codec: YUV 444 supported\n"));
}
// only YUV420 supported
if (!(attrib.value & VA_RT_FORMAT_YUV420)) {
Warning("codec: YUV 420 not supported");
goto slow_path;
}
// create a configuration for the decode pipeline
if (vaCreateConfig(decoder->VaDisplay, p, e, &attrib, 1,
&decoder->VaapiContext->config_id)) {
Error("codec: can't create config");
goto slow_path; goto slow_path;
} }
VaapiCreateSurfaces(decoder, video_ctx->width, video_ctx->height); Debug(3, "video/vdpau: create decoder profile=%d %dx%d #%d refs\n",
profile, video_ctx->width, video_ctx->height, max_refs);
// bind surfaces to context status =
if (vaCreateContext(decoder->VaDisplay, decoder->VaapiContext->config_id, VdpauDecoderCreate(VdpauDevice, profile, video_ctx->width,
video_ctx->width, video_ctx->height, VA_PROGRESSIVE, video_ctx->height, max_refs, &decoder->VideoDecoder);
decoder->SurfacesFree, decoder->SurfaceFreeN, if (status != VDP_STATUS_OK) {
&decoder->VaapiContext->context_id)) { Error(_("video/vdpau: can't create decoder: %s\n"),
Error("codec: can't create context"); VdpauGetErrorString(status));
// FIXME: must cleanup abort();
goto slow_path; goto slow_path;
} }
@ -4064,11 +4142,17 @@ static enum PixelFormat Vdpau_get_format(VdpauDecoder * decoder,
decoder->InputY = 0; decoder->InputY = 0;
decoder->InputWidth = video_ctx->width; decoder->InputWidth = video_ctx->width;
decoder->InputHeight = video_ctx->height; decoder->InputHeight = video_ctx->height;
decoder->InputAspect = video_ctx->sample_aspect_ratio;
VdpauUpdateOutput(decoder);
VdpauMixerSetup(decoder);
// FIXME: need only to create and destroy surfaces for size changes!
VdpauCreateSurfaces(decoder, video_ctx->width, video_ctx->height);
Debug(3, "\t%#010x %s\n", fmt_idx[0], av_get_pix_fmt_name(fmt_idx[0]));
Debug(3, "\tpixel format %#010x\n", *fmt_idx);
return *fmt_idx; return *fmt_idx;
#endif
return *fmt;
slow_path: slow_path:
// no accelerated format found // no accelerated format found
@ -4082,32 +4166,18 @@ static enum PixelFormat Vdpau_get_format(VdpauDecoder * decoder,
/// @param decoder VDPAU hw decoder /// @param decoder VDPAU hw decoder
/// @param video_ctx ffmpeg video codec context /// @param video_ctx ffmpeg video codec context
/// ///
/// @todo FIXME: use VdpauCleanup
///
static void VdpauSetup(VdpauDecoder * decoder, static void VdpauSetup(VdpauDecoder * decoder,
const AVCodecContext * video_ctx) const AVCodecContext * video_ctx)
{ {
VdpStatus status; VdpStatus status;
VdpChromaType chroma_type; VdpChromaType chroma_type;
int i;
uint32_t width; uint32_t width;
uint32_t height; uint32_t height;
// decoder->Input... already setup by caller // decoder->Input... already setup by caller
VdpauCleanup(decoder);
if (decoder->VideoMixer != VDP_INVALID_HANDLE) {
status = VdpauVideoMixerDestroy(decoder->VideoMixer);
if (status != VDP_STATUS_OK) {
Error(_("video/vdpau: can't destroy video mixer: %s\n"),
VdpauGetErrorString(status));
}
decoder->VideoMixer = VDP_INVALID_HANDLE;
}
VdpauMixerSetup(decoder); VdpauMixerSetup(decoder);
if (decoder->SurfaceFreeN || decoder->SurfaceUsedN) {
VdpauDestroySurfaces(decoder);
}
VdpauCreateSurfaces(decoder, video_ctx->width, video_ctx->height); VdpauCreateSurfaces(decoder, video_ctx->width, video_ctx->height);
// get real surface size // get real surface size
@ -4115,7 +4185,7 @@ static void VdpauSetup(VdpauDecoder * decoder,
VdpauVideoSurfaceGetParameters(decoder->SurfacesFree[0], &chroma_type, VdpauVideoSurfaceGetParameters(decoder->SurfacesFree[0], &chroma_type,
&width, &height); &width, &height);
if (status != VDP_STATUS_OK) { if (status != VDP_STATUS_OK) {
Fatal(_("video/vdpau: can't get video surface parameters: %s\n"), Error(_("video/vdpau: can't get video surface parameters: %s\n"),
VdpauGetErrorString(status)); VdpauGetErrorString(status));
} }
// vdpau can choose different sizes, must use them for putbits // vdpau can choose different sizes, must use them for putbits
@ -4126,22 +4196,67 @@ static void VdpauSetup(VdpauDecoder * decoder,
Fatal(_("video/vdpau: video surface type/size mismatch\n")); Fatal(_("video/vdpau: video surface type/size mismatch\n"));
} }
// //
// reset video surface ring buffer
//
atomic_set(&decoder->SurfacesFilled, 0);
for (i = 0; i < VIDEO_SURFACES_MAX; ++i) {
decoder->SurfacesRb[i] = VDP_INVALID_HANDLE;
}
decoder->SurfaceRead = 0;
decoder->SurfaceWrite = 0;
decoder->SurfaceField = 0;
//
// When window output size changes update VdpauSurfacesRb // When window output size changes update VdpauSurfacesRb
// //
} }
///
/// Queue output surface.
///
/// @param decoder VDPAU hw decoder
/// @param surface output surface
/// @param softdec software decoder
///
/// @note we can't mix software and hardware decoder surfaces
///
static void VdpauQueueSurface(VdpauDecoder * decoder, VdpVideoSurface surface,
int softdec)
{
VdpVideoSurface old;
++decoder->FrameCounter;
if (1) { // can't wait for output queue empty
if (atomic_read(&decoder->SurfacesFilled) >= VIDEO_SURFACES_MAX) {
Warning(_
("video/vdpau: output buffer full, dropping frame (%d/%d)\n"),
++decoder->FramesDropped, decoder->FrameCounter);
VdpauPrintFrames(decoder);
// software surfaces only
if (softdec) {
VdpauReleaseSurface(decoder, surface);
}
return;
}
#if 0
} else { // wait for output queue empty
while (atomic_read(&decoder->SurfacesFilled) >= VIDEO_SURFACES_MAX) {
VideoDisplayHandler();
}
#endif
}
//
// Check and release, old surface
//
if ((old = decoder->SurfacesRb[decoder->SurfaceWrite])
!= VDP_INVALID_HANDLE) {
// now we can release the surface, software surfaces only
if (softdec) {
VdpauReleaseSurface(decoder, old);
}
}
Debug(4, "video/vdpau: yy video surface %#x@%d ready\n", surface,
decoder->SurfaceWrite);
decoder->SurfacesRb[decoder->SurfaceWrite] = surface;
decoder->SurfaceWrite = (decoder->SurfaceWrite + 1)
% VIDEO_SURFACES_MAX;
atomic_inc(&decoder->SurfacesFilled);
}
/// ///
/// Render a ffmpeg frame. /// Render a ffmpeg frame.
/// ///
@ -4154,25 +4269,78 @@ static void VdpauRenderFrame(VdpauDecoder * decoder,
{ {
VdpStatus status; VdpStatus status;
VdpVideoSurface surface; VdpVideoSurface surface;
VdpVideoSurface old;
// //
// Hardware render // Hardware render
// //
if (video_ctx->hwaccel_context) { // VDPAU: PIX_FMT_VDPAU_H264 .. PIX_FMT_VDPAU_VC1 PIX_FMT_VDPAU_MPEG4
surface = (size_t) frame->data[3]; if ((PIX_FMT_VDPAU_H264 <= video_ctx->pix_fmt
&& video_ctx->pix_fmt <= PIX_FMT_VDPAU_VC1)
|| video_ctx->pix_fmt == PIX_FMT_VDPAU_MPEG4) {
struct vdpau_render_state *vrs;
int interlaced;
Debug(2, "video/vdpau: display surface %#x\n", surface); vrs = (struct vdpau_render_state *)frame->data[0];
surface = vrs->surface;
Debug(4, "video/vdpau: hw render hw surface %#x\n", surface);
// FIXME: should be done by init // FIXME: some tv-stations toggle interlace on/off
if (decoder->Interlaced != frame->interlaced_frame // frame->interlaced_frame isn't always correct set
|| decoder->TopFieldFirst != frame->top_field_first) { interlaced = frame->interlaced_frame;
Debug(3, "video/vdpau: interlaced %d top-field-first %d\n", if (video_ctx->height == 720) {
frame->interlaced_frame, frame->top_field_first); if (interlaced && !decoder->WrongInterlacedWarned) {
decoder->Interlaced = frame->interlaced_frame; Debug(3, "video/vdpau: wrong interlace flag fixed\n");
decoder->TopFieldFirst = frame->top_field_first; decoder->WrongInterlacedWarned = 1;
decoder->SurfaceField = 0; }
interlaced = 0;
} else {
if (!interlaced && !decoder->WrongInterlacedWarned) {
Debug(3, "video/vdpau: wrong interlace flag fixed\n");
decoder->WrongInterlacedWarned = 1;
}
interlaced = 1;
} }
// update aspect ratio changes
#ifdef still_to_detect_define
if (av_cmp_q(decoder->InputAspect, frame->sample_aspect_ratio)) {
Debug(3, "video/vdpau: aspect ratio changed\n");
//decoder->InputWidth = video_ctx->width;
//decoder->InputHeight = video_ctx->height;
decoder->InputAspect = frame->sample_aspect_ratio;
VdpauUpdateOutput(decoder);
}
#else
if (av_cmp_q(decoder->InputAspect, video_ctx->sample_aspect_ratio)) {
Debug(3, "video/vdpau: aspect ratio changed\n");
//decoder->InputWidth = video_ctx->width;
//decoder->InputHeight = video_ctx->height;
decoder->InputAspect = video_ctx->sample_aspect_ratio;
VdpauUpdateOutput(decoder);
}
#endif
if (VideoDeinterlace == VideoDeinterlaceSoftware && interlaced) {
// FIXME: software deinterlace avpicture_deinterlace
// FIXME: VdpauCpuDeinterlace(decoder, surface);
} else {
// FIXME: should be done by init
if (decoder->Interlaced != interlaced
|| decoder->TopFieldFirst != frame->top_field_first) {
Debug(3, "video/vdpau: interlaced %d top-field-first %d\n",
interlaced, frame->top_field_first);
decoder->Interlaced = interlaced;
decoder->TopFieldFirst = frame->top_field_first;
decoder->SurfaceField = 1;
}
VdpauQueueSurface(decoder, surface, 0);
}
// //
// PutBitsYCbCr render // PutBitsYCbCr render
// //
@ -4234,55 +4402,13 @@ static void VdpauRenderFrame(VdpauDecoder * decoder,
Error(_("video/vdpau: can't put video surface bits: %s\n"), Error(_("video/vdpau: can't put video surface bits: %s\n"),
VdpauGetErrorString(status)); VdpauGetErrorString(status));
} }
VdpauQueueSurface(decoder, surface, 1);
} }
if (frame->interlaced_frame) { if (frame->interlaced_frame) {
++decoder->FrameCounter; ++decoder->FrameCounter;
} }
++decoder->FrameCounter;
// place in output queue
// I place it here, for later thread support
if (1) { // can't wait for output queue empty
if (atomic_read(&decoder->SurfacesFilled) >= VIDEO_SURFACES_MAX) {
Warning(_
("video/vdpau: output buffer full, dropping frame (%d/%d)\n"),
++decoder->FramesDropped, decoder->FrameCounter);
VdpauPrintFrames(decoder);
// software surfaces only
if (!video_ctx->hwaccel_context) {
VdpauReleaseSurface(decoder, surface);
}
return;
}
#if 0
} else { // wait for output queue empty
while (atomic_read(&decoder->SurfacesFilled) >= VIDEO_SURFACES_MAX) {
VideoDisplayHandler();
}
#endif
}
//
// Check and release, old surface
//
if ((old = decoder->SurfacesRb[decoder->SurfaceWrite])
!= VDP_INVALID_HANDLE) {
// now we can release the surface, software surfaces only
if (!video_ctx->hwaccel_context) {
VdpauReleaseSurface(decoder, old);
}
}
Debug(4, "video: yy video surface %#x@%d ready\n", surface,
decoder->SurfaceWrite);
decoder->SurfacesRb[decoder->SurfaceWrite] = surface;
decoder->SurfaceWrite = (decoder->SurfaceWrite + 1)
% VIDEO_SURFACES_MAX;
atomic_inc(&decoder->SurfacesFilled);
} }
/// ///
@ -4394,7 +4520,6 @@ static void VdpauMixVideo(VdpauDecoder * decoder)
#ifdef DEBUG #ifdef DEBUG
if (atomic_read(&decoder->SurfacesFilled) < 3) { if (atomic_read(&decoder->SurfacesFilled) < 3) {
Debug(3, "only %d\n", atomic_read(&decoder->SurfacesFilled)); Debug(3, "only %d\n", atomic_read(&decoder->SurfacesFilled));
abort();
} }
#endif #endif
@ -4448,7 +4573,7 @@ static void VdpauMixVideo(VdpauDecoder * decoder)
VdpauGetErrorString(status)); VdpauGetErrorString(status));
} }
Debug(4, "video: yy video surface %#x@%d displayed\n", current, Debug(4, "video/vdpau: yy video surface %#x@%d displayed\n", current,
decoder->SurfaceRead); decoder->SurfaceRead);
} }
@ -5237,7 +5362,7 @@ static void VideoEvent(void)
Debug(3, "video/event: ClientMessage\n"); Debug(3, "video/event: ClientMessage\n");
if (event.xclient.data.l[0] == (long)WmDeleteWindowAtom) { if (event.xclient.data.l[0] == (long)WmDeleteWindowAtom) {
// FIXME: wrong, kills recordings ... // FIXME: wrong, kills recordings ...
Error(_("video: FIXME: wm-delete-message\n")); Error(_("video/event: FIXME: wm-delete-message\n"));
} }
break; break;
@ -5264,7 +5389,8 @@ static void VideoEvent(void)
} }
#endif #endif
if (keysym == NoSymbol) { if (keysym == NoSymbol) {
Warning(_("video: No symbol for %d\n"), event.xkey.keycode); Warning(_("video/event: No symbol for %d\n"),
event.xkey.keycode);
} }
FeedKeyPress("XKeySym", XKeysymToString(keysym), 0, 0); FeedKeyPress("XKeySym", XKeysymToString(keysym), 0, 0);
/* /*
@ -5637,6 +5763,37 @@ struct vaapi_context *VideoGetVaapiContext(VideoHwDecoder * decoder)
return NULL; return NULL;
} }
///
/// Draw ffmpeg vdpau render state.
///
/// @param decoder VDPAU decoder
/// @param vrs vdpau render state
///
void VideoDrawRenderState(VideoHwDecoder * decoder,
struct vdpau_render_state *vrs)
{
#ifdef USE_VDPAU
if (VideoVdpauEnabled) {
VdpStatus status;
Debug(4, "video/vdpau: decoder render to %#010x\n", vrs->surface);
status =
VdpauDecoderRender(decoder->Vdpau.VideoDecoder, vrs->surface,
(VdpPictureInfo const *)&vrs->info, vrs->bitstream_buffers_used,
vrs->bitstream_buffers);
if (status != VDP_STATUS_OK) {
Error(_("video/vdpau: decoder rendering failed: %s\n"),
VdpauGetErrorString(status));
}
return;
}
#endif
(void)decoder;
(void)vrs;
Error(_("video/vdpau: draw render state, without vdpau enabled\n"));
return;
}
#ifndef USE_VIDEO_THREAD #ifndef USE_VIDEO_THREAD
/** /**

View File

@ -60,6 +60,12 @@ extern struct vaapi_context *VideoGetVaapiContext(VideoHwDecoder *);
/// Callback to negotiate the PixelFormat. /// Callback to negotiate the PixelFormat.
extern enum PixelFormat Video_get_format(VideoHwDecoder *, AVCodecContext *, extern enum PixelFormat Video_get_format(VideoHwDecoder *, AVCodecContext *,
const enum PixelFormat *); const enum PixelFormat *);
#ifdef AVCODEC_VDPAU_H
/// Draw vdpau render state
extern void VideoDrawRenderState(VideoHwDecoder *,
struct vdpau_render_state *);
#endif
#endif #endif
/// Display video TEST /// Display video TEST