8 Commits
0.1.2 ... 0.1.3

Author SHA1 Message Date
Johns
23300b0383 Add missing VdpauDecoderDestroy. 2011-12-29 00:55:57 +01:00
Johns
baf10db48e Cleanup video packet ringbuffer. 2011-12-26 14:27:12 +01:00
Johns
7fbfe0396e Allow build without VDPAU. 2011-12-26 14:25:42 +01:00
Johns
a5c28b9fe2 Fix bug: swapped end and start. 2011-12-25 17:23:29 +01:00
Johns
7f0ad63209 Support other than "PCM" alsa mixer channels. 2011-12-25 15:36:04 +01:00
Johns
10ab0274ab Show time used for VdpDecoderRender. 2011-12-25 11:50:17 +01:00
Johns
83413c1adf Use only one thread for hw decoding. 2011-12-25 11:36:02 +01:00
Johns
63d18ea488 Fix bug: wrong aspect video size calculation. 2011-12-25 11:35:18 +01:00
7 changed files with 210 additions and 37 deletions

View File

@@ -57,6 +57,18 @@ Install:
cd vdr-softhddevice
make VDRDIR=<path-to-your-vdr-files> LIBDIR=.
Setup: environment
Following is supported:
DISPLAY=:0.0
x11 display name
ALSA_DEVICE=default
alsa PCM device name
ALSA_MIXER=default
alsa control device name
ALSA_MIXER_CHANNEL=PCM
alsa control channel name
Setup: /etc/vdr/setup.conf
Following is supported:

3
Todo
View File

@@ -10,7 +10,8 @@ missing:
vdpau:
1080i with temporal spatial too slow GT 520
Dr. Dish H264 black picture
VdpPreemptionCallback handling
Loose a surface
libva-intel-driver:
intel still has hangups most with 1080i

11
audio.c
View File

@@ -60,6 +60,7 @@
static const char *AudioPCMDevice; ///< alsa PCM device name
static const char *AudioMixerDevice; ///< alsa mixer device name
static const char *AudioMixerChannel; ///< alsa mixer channel name
static volatile char AudioRunning; ///< thread running / stopped
static int AudioPaused; ///< audio paused
static unsigned AudioSampleRate; ///< audio sample rate in hz
@@ -589,6 +590,7 @@ void AudioSetVolume(int volume)
static void AlsaInitMixer(void)
{
const char *device;
const char *channel;
snd_mixer_t *alsa_mixer;
snd_mixer_elem_t *alsa_mixer_elem;
long alsa_mixer_elem_min;
@@ -599,13 +601,18 @@ static void AlsaInitMixer(void)
device = "default";
}
}
Debug(3, "audio/alsa: mixer open\n");
if (!(channel = AudioMixerChannel)) {
if (!(channel = getenv("ALSA_MIXER_CHANNEL"))) {
channel = "PCM";
}
}
Debug(3, "audio/alsa: mixer %s - %s open\n", device, channel);
snd_mixer_open(&alsa_mixer, 0);
if (alsa_mixer && snd_mixer_attach(alsa_mixer, device) >= 0
&& snd_mixer_selem_register(alsa_mixer, NULL, NULL) >= 0
&& snd_mixer_load(alsa_mixer) >= 0) {
const char *const alsa_mixer_elem_name = "PCM";
const char *const alsa_mixer_elem_name = channel;
alsa_mixer_elem = snd_mixer_first_elem(alsa_mixer);
while (alsa_mixer_elem) {

35
codec.c
View File

@@ -120,6 +120,7 @@ static int Codec_get_buffer(AVCodecContext * video_ctx, AVFrame * frame)
fmts[1] = PIX_FMT_NONE;
Codec_get_format(video_ctx, fmts);
}
#ifdef USE_VDPAU
// VDPAU: PIX_FMT_VDPAU_H264 .. PIX_FMT_VDPAU_VC1 PIX_FMT_VDPAU_MPEG4
if ((PIX_FMT_VDPAU_H264 <= video_ctx->pix_fmt
&& video_ctx->pix_fmt <= PIX_FMT_VDPAU_VC1)
@@ -151,6 +152,7 @@ static int Codec_get_buffer(AVCodecContext * video_ctx, AVFrame * frame)
}
return 0;
}
#endif
// VA-API:
if (video_ctx->hwaccel_context) {
unsigned surface;
@@ -188,6 +190,7 @@ static int Codec_get_buffer(AVCodecContext * video_ctx, AVFrame * frame)
*/
static void Codec_release_buffer(AVCodecContext * video_ctx, AVFrame * frame)
{
#ifdef USE_VDPAU
// VDPAU: PIX_FMT_VDPAU_H264 .. PIX_FMT_VDPAU_VC1 PIX_FMT_VDPAU_MPEG4
if ((PIX_FMT_VDPAU_H264 <= video_ctx->pix_fmt
&& video_ctx->pix_fmt <= PIX_FMT_VDPAU_VC1)
@@ -209,6 +212,7 @@ static void Codec_release_buffer(AVCodecContext * video_ctx, AVFrame * frame)
return;
}
#endif
// VA-API
if (video_ctx->hwaccel_context) {
VideoDecoder *decoder;
@@ -252,6 +256,7 @@ static void Codec_draw_horiz_band(AVCodecContext * video_ctx,
int type, __attribute__ ((unused))
int height)
{
#ifdef USE_VDPAU
// VDPAU: PIX_FMT_VDPAU_H264 .. PIX_FMT_VDPAU_VC1 PIX_FMT_VDPAU_MPEG4
if ((PIX_FMT_VDPAU_H264 <= video_ctx->pix_fmt
&& video_ctx->pix_fmt <= PIX_FMT_VDPAU_VC1)
@@ -269,8 +274,12 @@ static void Codec_draw_horiz_band(AVCodecContext * video_ctx,
//Debug(3, "codec: %d references\n", vrs->info.h264.num_ref_frames);
VideoDrawRenderState(decoder->HwDecoder, vrs);
return;
}
return;
#else
(void)video_ctx;
(void)frame;
#endif
}
//----------------------------------------------------------------------------
@@ -337,6 +346,8 @@ void CodecVideoOpen(VideoDecoder * decoder, const char *name, int codec_id)
if (!(decoder->VideoCtx = avcodec_alloc_context3(video_codec))) {
Fatal(_("codec: can't allocate video codec context\n"));
}
// FIXME: for software decoder use all cpus, otherwise 1
decoder->VideoCtx->thread_count = 1;
// open codec
#if LIBAVCODEC_VERSION_INT <= AV_VERSION_INT(53,5,0)
if (avcodec_open(decoder->VideoCtx, video_codec) < 0) {
@@ -350,12 +361,6 @@ void CodecVideoOpen(VideoDecoder * decoder, const char *name, int codec_id)
decoder->VideoCtx->opaque = decoder; // our structure
/*
// FIXME: the number of cpu's should be configurable
// Today this makes no big sense H264 is broken with current streams.
avcodec_thread_init(decoder->VideoCtx, 2); // support dual-cpu's
*/
Debug(3, "codec: video '%s'\n", decoder->VideoCtx->codec_name);
if (codec_id == CODEC_ID_H264) {
// 2.53 Ghz CPU is too slow for this codec at 1080i
@@ -551,8 +556,8 @@ struct _audio_decoder_
/// audio parser to support wired dvb streaks
AVCodecParserContext *AudioParser;
int SampleRate; ///< current sample rate
int Channels; ///< current channels
int SampleRate; ///< current stream sample rate
int Channels; ///< current stream channels
int HwSampleRate; ///< hw sample rate
int HwChannels; ///< hw channels
@@ -665,11 +670,8 @@ void CodecAudioDecode(AudioDecoder * audio_decoder, AVPacket * avpkt)
AVCodecContext *audio_ctx;
int index;
if (!audio_decoder->AudioParser) {
Fatal(_("codec: internal error parser freeded while running\n"));
}
#define spkt avpkt
#if 0 // didn't fix crash in av_parser_parse2
//#define spkt avpkt
#if 1 // didn't fix crash in av_parser_parse2
AVPacket spkt[1];
// av_new_packet reserves FF_INPUT_BUFFER_PADDING_SIZE and clears it
@@ -681,6 +683,9 @@ void CodecAudioDecode(AudioDecoder * audio_decoder, AVPacket * avpkt)
spkt->pts = avpkt->pts;
spkt->dts = avpkt->dts;
#endif
if (!audio_decoder->AudioParser) {
Fatal(_("codec: internal error parser freeded while running\n"));
}
audio_ctx = audio_decoder->AudioCtx;
index = 0;
@@ -778,7 +783,7 @@ void CodecAudioDecode(AudioDecoder * audio_decoder, AVPacket * avpkt)
index += n;
}
#if 0
#if 1
av_destruct_packet(spkt);
#endif
}

View File

@@ -213,11 +213,12 @@ static int VideoMaxPacketSize; ///< biggest used packet buffer
static void VideoPacketInit(void)
{
int i;
AVPacket *avpkt;
Debug(4, "[softhddev]: %s\n", __FUNCTION__);
for (i = 0; i < VIDEO_PACKET_MAX; ++i) {
AVPacket *avpkt;
avpkt = &VideoPacketRb[i];
// build a clean ffmpeg av packet
if (av_new_packet(avpkt, VIDEO_BUFFER_SIZE)) {
@@ -229,8 +230,32 @@ static void VideoPacketInit(void)
atomic_set(&VideoPacketsFilled, 0);
}
/**
** Cleanup video packet ringbuffer.
*/
static void VideoPacketExit(void)
{
int i;
Debug(4, "[softhddev]: %s\n", __FUNCTION__);
atomic_set(&VideoPacketsFilled, 0);
for (i = 0; i < VIDEO_PACKET_MAX; ++i) {
AVPacket *avpkt;
avpkt = &VideoPacketRb[i];
// build a clean ffmpeg av packet
av_free_packet(avpkt);
}
}
/**
** Place video data in packet ringbuffer.
**
** @param pts presentation timestamp of pes packet
** @param data data of pes packet
** @param data size of pes packet
*/
static void VideoEnqueue(int64_t pts, const void *data, int size)
{
@@ -242,18 +267,16 @@ static void VideoEnqueue(int64_t pts, const void *data, int size)
if (!avpkt->stream_index) { // add pts only for first added
avpkt->pts = pts;
}
if (avpkt->stream_index + size + FF_INPUT_BUFFER_PADDING_SIZE >=
avpkt->size) {
if (avpkt->stream_index + size >= avpkt->size) {
Warning(_("video: packet buffer too small for %d\n"),
avpkt->stream_index + size + FF_INPUT_BUFFER_PADDING_SIZE);
avpkt->stream_index + size);
av_grow_packet(avpkt,
((size + FF_INPUT_BUFFER_PADDING_SIZE + VIDEO_BUFFER_SIZE / 2)
// new + grow reserves FF_INPUT_BUFFER_PADDING_SIZE
av_grow_packet(avpkt, ((size + VIDEO_BUFFER_SIZE / 2)
/ (VIDEO_BUFFER_SIZE / 2)) * (VIDEO_BUFFER_SIZE / 2));
#ifdef DEBUG
if (avpkt->size <
avpkt->stream_index + size + FF_INPUT_BUFFER_PADDING_SIZE) {
if (avpkt->size <= avpkt->stream_index + size) {
abort();
}
#endif
@@ -444,6 +467,7 @@ int PlayVideo(const uint8_t * data, int size)
Debug(3, "video: new stream %d\n", GetMsTicks() - VideoSwitch);
// FIXME: hack to test results
if (atomic_read(&VideoPacketsFilled) >= VIDEO_PACKET_MAX - 1) {
Debug(3, "video: new video stream lost\n");
NewVideoStream = 0;
return 0;
}
@@ -452,7 +476,7 @@ int PlayVideo(const uint8_t * data, int size)
NewVideoStream = 0;
}
// must be a PES start code
if (size < 9 || data[0] || data[1] || data[2] != 0x01) {
if (size < 9 || !data || data[0] || data[1] || data[2] != 0x01) {
Error(_("[softhddev] invalid PES video packet\n"));
return size;
}
@@ -864,9 +888,11 @@ void Stop(void)
MyAudioDecoder = NULL;
}
VideoOsdExit();
VideoExit();
AudioExit();
CodecExit();
VideoPacketExit();
if (StartX11Server) {
Debug(3, "x-setup: Stop x11 server\n");

View File

@@ -39,7 +39,7 @@ extern "C" {
//////////////////////////////////////////////////////////////////////////////
static const char *const VERSION = "0.1.2";
static const char *const VERSION = "0.1.3";
static const char *const DESCRIPTION =
trNOOP("A software and GPU emulated HD device");

140
video.c
View File

@@ -137,7 +137,7 @@ typedef enum _video_deinterlace_modes_
} VideoDeinterlaceModes;
///
/// Video scalinng modes.
/// Video scaleing modes.
///
typedef enum _video_scaling_modes_
{
@@ -147,6 +147,17 @@ typedef enum _video_scaling_modes_
VideoScalingAnamorphic, ///< anamorphic scaling
} VideoScalingModes;
///
/// Video zoom modes.
///
typedef enum _video_zoom_modes_
{
VideoNormal, ///< normal
VideoStretch, ///< stretch to all edges
VideoZoom, ///< zoom out
VideoAnamorphic, ///< anamorphic scaled (unsupported)
} VideoZoomModes;
//----------------------------------------------------------------------------
// Defines
//----------------------------------------------------------------------------
@@ -184,6 +195,9 @@ static VideoScalingModes VideoScaling;
/// Default audio/video delay
static int VideoAudioDelay;
/// Default zoom mode
static VideoZoomModes Video4to3ZoomMode;
//static char VideoSoftStartSync; ///< soft start sync audio/video
static char Video60HzMode; ///< handle 60hz displays
@@ -1347,13 +1361,22 @@ static void VaapiUpdateOutput(VaapiDecoder * decoder)
display_aspect_ratio.den);
// FIXME: store different positions for the ratios
if (display_aspect_ratio.num == 4 && display_aspect_ratio.den == 3) {
switch (Video4to3ZoomMode) {
case VideoNormal:
case VideoStretch:
case VideoZoom:
case VideoAnamorphic:
break;
}
}
decoder->OutputX = 0;
decoder->OutputY = 0;
decoder->OutputWidth = (VideoWindowHeight * display_aspect_ratio.num)
/ display_aspect_ratio.den;
decoder->OutputHeight = (VideoWindowWidth * display_aspect_ratio.num)
/ display_aspect_ratio.den;
decoder->OutputHeight = (VideoWindowWidth * display_aspect_ratio.den)
/ display_aspect_ratio.num;
if ((unsigned)decoder->OutputWidth > VideoWindowWidth) {
decoder->OutputWidth = VideoWindowWidth;
decoder->OutputY = (VideoWindowHeight - decoder->OutputHeight) / 2;
@@ -1361,6 +1384,8 @@ static void VaapiUpdateOutput(VaapiDecoder * decoder)
decoder->OutputHeight = VideoWindowHeight;
decoder->OutputX = (VideoWindowWidth - decoder->OutputWidth) / 2;
}
Debug(3, "video: aspect output %dx%d+%d+%d\n", decoder->OutputWidth,
decoder->OutputHeight, decoder->OutputX, decoder->OutputY);
}
/**
@@ -3186,11 +3211,12 @@ static void VdpauCreateSurfaces(VdpauDecoder * decoder, int width, int height)
VdpauVideoSurfaceCreate(decoder->Device, decoder->ChromaType,
width, height, decoder->SurfacesFree + i);
if (status != VDP_STATUS_OK) {
Fatal(_("video/vdpau: can't create video surface: %s\n"),
Error(_("video/vdpau: can't create video surface: %s\n"),
VdpauGetErrorString(status));
// FIXME: no fatal
decoder->SurfacesFree[i] = VDP_INVALID_HANDLE;
// FIXME: better error handling
}
Debug(3, "video/vdpau: created video surface %dx%d with id 0x%08x\n",
Debug(4, "video/vdpau: created video surface %dx%d with id 0x%08x\n",
width, height, decoder->SurfacesFree[i]);
}
}
@@ -3213,11 +3239,14 @@ static void VdpauDestroySurfaces(VdpauDecoder * decoder)
Debug(3, "video/vdpau: invalid surface\n");
}
#endif
Debug(4, "video/vdpau: destroy video surface with id 0x%08x\n",
decoder->SurfacesFree[i]);
status = VdpauVideoSurfaceDestroy(decoder->SurfacesFree[i]);
if (status != VDP_STATUS_OK) {
Error(_("video/vdpau: can't destroy video surface: %s\n"),
VdpauGetErrorString(status));
}
decoder->SurfacesFree[i] = VDP_INVALID_HANDLE;
}
for (i = 0; i < decoder->SurfaceUsedN; ++i) {
#ifdef DEBUG
@@ -3225,11 +3254,14 @@ static void VdpauDestroySurfaces(VdpauDecoder * decoder)
Debug(3, "video/vdpau: invalid surface\n");
}
#endif
Debug(4, "video/vdpau: destroy video surface with id 0x%08x\n",
decoder->SurfacesUsed[i]);
status = VdpauVideoSurfaceDestroy(decoder->SurfacesUsed[i]);
if (status != VDP_STATUS_OK) {
Error(_("video/vdpau: can't destroy video surface: %s\n"),
VdpauGetErrorString(status));
}
decoder->SurfacesUsed[i] = VDP_INVALID_HANDLE;
}
decoder->SurfaceFreeN = 0;
decoder->SurfaceUsedN = 0;
@@ -3492,6 +3524,15 @@ static void VdpauCleanup(VdpauDecoder * decoder)
VdpStatus status;
int i;
if (decoder->VideoDecoder != VDP_INVALID_HANDLE) {
status = VdpauDecoderDestroy(decoder->VideoDecoder);
if (status != VDP_STATUS_OK) {
Error(_("video/vdpau: can't destroy video decoder: %s\n"),
VdpauGetErrorString(status));
}
decoder->VideoDecoder = VDP_INVALID_HANDLE;
}
if (decoder->VideoMixer != VDP_INVALID_HANDLE) {
status = VdpauVideoMixerDestroy(decoder->VideoMixer);
if (status != VDP_STATUS_OK) {
@@ -3909,6 +3950,8 @@ static void VideoVdpauInit(const char *display_name)
Fatal(_("video/vdpau: can't create output surface: %s\n"),
VdpauGetErrorString(status));
}
Debug(3, "video/vdpau: created output surface %dx%d with id 0x%08x\n",
VideoWindowWidth, VideoWindowHeight, VdpauSurfacesRb[i]);
}
}
@@ -3917,12 +3960,15 @@ static void VideoVdpauInit(const char *display_name)
///
static void VideoVdpauExit(void)
{
int i;
if (VdpauDecoders[0]) {
VdpauDelDecoder(VdpauDecoders[0]);
VdpauDecoders[0] = NULL;
}
if (VdpauDevice) {
if (VdpauQueue) {
VdpauPresentationQueueDestroy(VdpauQueue);
VdpauQueue = 0;
@@ -3931,7 +3977,24 @@ static void VideoVdpauExit(void)
VdpauPresentationQueueTargetDestroy(VdpauQueueTarget);
VdpauQueueTarget = 0;
}
//
// destroy display output surfaces
//
for (i = 0; i < OUTPUT_SURFACES_MAX; ++i) {
VdpStatus status;
Debug(4, "video/vdpau: destroy output surface with id 0x%08x\n",
VdpauSurfacesRb[i]);
status = VdpauOutputSurfaceDestroy(VdpauSurfacesRb[i]);
if (status != VDP_STATUS_OK) {
Error(_("video/vdpau: can't destroy output surface: %s\n"),
VdpauGetErrorString(status));
}
VdpauSurfacesRb[i] = VDP_INVALID_HANDLE;
}
// FIXME: more VDPAU cleanups...
if (VdpauDeviceDestroy) {
VdpauDeviceDestroy(VdpauDevice);
}
@@ -3950,6 +4013,8 @@ static void VdpauUpdateOutput(VdpauDecoder * decoder)
AVRational display_aspect_ratio;
input_aspect_ratio = decoder->InputAspect;
Debug(3, "video: input aspect %d:%d\n", input_aspect_ratio.num,
input_aspect_ratio.den);
if (!input_aspect_ratio.num || !input_aspect_ratio.den) {
input_aspect_ratio.num = 1;
input_aspect_ratio.den = 1;
@@ -3965,20 +4030,31 @@ static void VdpauUpdateOutput(VdpauDecoder * decoder)
display_aspect_ratio.den);
// FIXME: store different positions for the ratios
if (display_aspect_ratio.num == 4 && display_aspect_ratio.den == 3) {
switch (Video4to3ZoomMode) {
case VideoNormal:
case VideoStretch:
case VideoZoom:
case VideoAnamorphic:
break;
}
}
decoder->OutputX = 0;
decoder->OutputY = 0;
decoder->OutputWidth = (VideoWindowHeight * display_aspect_ratio.num)
/ display_aspect_ratio.den;
decoder->OutputHeight = (VideoWindowWidth * display_aspect_ratio.num)
/ display_aspect_ratio.den;
decoder->OutputHeight = (VideoWindowWidth * display_aspect_ratio.den)
/ display_aspect_ratio.num;
if ((unsigned)decoder->OutputWidth > VideoWindowWidth) {
decoder->OutputWidth = VideoWindowWidth;
decoder->OutputY = (VideoWindowHeight - decoder->OutputHeight) / 2;
} else {
} else if ((unsigned)decoder->OutputHeight > VideoWindowHeight) {
decoder->OutputHeight = VideoWindowHeight;
decoder->OutputX = (VideoWindowWidth - decoder->OutputWidth) / 2;
}
Debug(3, "video: aspect output %dx%d+%d+%d\n", decoder->OutputWidth,
decoder->OutputHeight, decoder->OutputX, decoder->OutputY);
}
///
@@ -4063,6 +4139,10 @@ static enum PixelFormat Vdpau_get_format(VdpauDecoder * decoder,
max_refs = CODEC_SURFACES_DEFAULT;
// check profile
switch (video_ctx->codec_id) {
case CODEC_ID_MPEG1VIDEO:
max_refs = 2;
profile = VdpauCheckProfile(decoder, VDP_DECODER_PROFILE_MPEG1);
break;
case CODEC_ID_MPEG2VIDEO:
max_refs = 2;
profile =
@@ -5029,6 +5109,7 @@ static void VdpauOsdInit(int width, int height)
if (!VdpauDevice) {
Debug(3, "video/vdpau: vdpau not setup\n");
return;
}
VdpauOsdWidth = width;
@@ -5047,6 +5128,9 @@ static void VdpauOsdInit(int width, int height)
Error(_("video/vdpau: can't create bitmap surface: %s\n"),
VdpauGetErrorString(status));
}
Debug(4,
"video/vdpau: created bitmap surface %dx%d with id 0x%08x\n",
width, height, VdpauOsdBitmapSurface[i]);
}
}
#else
@@ -5059,6 +5143,9 @@ static void VdpauOsdInit(int width, int height)
Error(_("video/vdpau: can't create output surface: %s\n"),
VdpauGetErrorString(status));
}
Debug(4,
"video/vdpau: created osd output surface %dx%d with id 0x%08x\n",
width, height, VdpauOsdOutputSurface[i]);
}
}
#endif
@@ -5068,6 +5155,14 @@ static void VdpauOsdInit(int width, int height)
VdpauOsdClear();
}
/**
** Cleanup osd.
*/
static void VdpauOsdExit(void)
{
Debug(3, "FIXME: %s\n", __FUNCTION__);
}
#endif
//----------------------------------------------------------------------------
@@ -5222,6 +5317,25 @@ void VideoOsdInit(void)
#endif
}
/**
** Cleanup OSD.
*/
void VideoOsdExit(void)
{
#ifdef USE_VAAPI
if (VideoVaapiEnabled) {
// FIXME: VaapiOsdExit();
return;
}
#endif
#ifdef USE_VDPAU
if (VideoVdpauEnabled) {
VdpauOsdExit();
return;
}
#endif
}
#if 0
//----------------------------------------------------------------------------
@@ -5777,16 +5891,24 @@ void VideoDrawRenderState(VideoHwDecoder * decoder,
#ifdef USE_VDPAU
if (VideoVdpauEnabled) {
VdpStatus status;
uint32_t start;
uint32_t end;
Debug(4, "video/vdpau: decoder render to %#010x\n", vrs->surface);
start = GetMsTicks();
status =
VdpauDecoderRender(decoder->Vdpau.VideoDecoder, vrs->surface,
(VdpPictureInfo const *)&vrs->info, vrs->bitstream_buffers_used,
vrs->bitstream_buffers);
end = GetMsTicks();
if (status != VDP_STATUS_OK) {
Error(_("video/vdpau: decoder rendering failed: %s\n"),
VdpauGetErrorString(status));
}
if (end - start > 35) {
Debug(3, "video/vdpau: decoder render too slow %u ms\n",
end - start);
}
return;
}
#endif