Prepared reentrant video stream.

This commit is contained in:
Johns 2013-01-03 18:52:34 +01:00
parent 7cf6c1ab2b
commit d42475f2dc
7 changed files with 550 additions and 242 deletions

View File

@ -20,6 +20,7 @@ GIT_REV = $(shell git describe --always 2>/dev/null)
CONFIG := #-DDEBUG #-DOSD_DEBUG CONFIG := #-DDEBUG #-DOSD_DEBUG
CONFIG += -DAV_INFO -DAV_INFO_TIME=3000 # debug a/v sync CONFIG += -DAV_INFO -DAV_INFO_TIME=3000 # debug a/v sync
#CONFIG += -DUSE_PIP # too experimental PIP support
#CONFIG += -DHAVE_PTHREAD_NAME # supports new pthread_setname_np #CONFIG += -DHAVE_PTHREAD_NAME # supports new pthread_setname_np
#CONFIG += -DNO_TS_AUDIO # disable ts audio parser #CONFIG += -DNO_TS_AUDIO # disable ts audio parser
#CONFIG += -DUSE_TS_VIDEO # build new ts video parser #CONFIG += -DUSE_TS_VIDEO # build new ts video parser
@ -34,8 +35,8 @@ CC ?= gcc
CXX ?= g++ CXX ?= g++
CFLAGS ?= -g -O2 -W -Wall -Wextra -Winit-self \ CFLAGS ?= -g -O2 -W -Wall -Wextra -Winit-self \
-Wdeclaration-after-statement \ -Wdeclaration-after-statement \
-ftree-vectorize -msse3 -flax-vector-conversions -ftree-vectorize -msse3 -flax-vector-conversions #-fPIC
CXXFLAGS ?= -g -O2 -W -Wall -Wextra -Werror=overloaded-virtual CXXFLAGS ?= -g -O2 -W -Wall -Wextra -Werror=overloaded-virtual #-fPIC
### The directory environment: ### The directory environment:

View File

@ -1,7 +1,7 @@
/// ///
/// @file audio.c @brief Audio module /// @file audio.c @brief Audio module
/// ///
/// Copyright (c) 2009 - 2012 by Johns. All Rights Reserved. /// Copyright (c) 2009 - 2013 by Johns. All Rights Reserved.
/// ///
/// Contributor(s): /// Contributor(s):
/// ///
@ -163,7 +163,6 @@ static int AudioStereoDescent; ///< volume descent for stereo
static int AudioVolume; ///< current volume (0 .. 1000) static int AudioVolume; ///< current volume (0 .. 1000)
extern int VideoAudioDelay; ///< import audio/video delay extern int VideoAudioDelay; ///< import audio/video delay
extern int VideoGetBuffers(void); ///< Get number of input buffers.
/// default ring buffer size ~2s 8ch 16bit (3 * 5 * 7 * 8) /// default ring buffer size ~2s 8ch 16bit (3 * 5 * 7 * 8)
static const unsigned AudioRingBufferSize = 3 * 5 * 7 * 8 * 2 * 1000; static const unsigned AudioRingBufferSize = 3 * 5 * 7 * 8 * 2 * 1000;

View File

@ -84,6 +84,7 @@ static volatile char SkipAudio; ///< skip audio stream
static AudioDecoder *MyAudioDecoder; ///< audio decoder static AudioDecoder *MyAudioDecoder; ///< audio decoder
static enum CodecID AudioCodecID; ///< current codec id static enum CodecID AudioCodecID; ///< current codec id
static int AudioChannelID; ///< current audio channel id static int AudioChannelID; ///< current audio channel id
static VideoStream *AudioSyncStream; ///< video stream for audio/video sync
/// Minimum free space in audio buffer 8 packets for 8 channels /// Minimum free space in audio buffer 8 packets for 8 channels
#define AUDIO_MIN_BUFFER_FREE (3072 * 8 * 8) #define AUDIO_MIN_BUFFER_FREE (3072 * 8 * 8)
@ -987,7 +988,8 @@ int PlayAudio(const uint8_t * data, int size, uint8_t id)
return 0; return 0;
} }
// soft limit buffer full // soft limit buffer full
if (AudioUsedBytes() > AUDIO_MIN_BUFFER_FREE && VideoGetBuffers() > 3) { if (AudioUsedBytes() > AUDIO_MIN_BUFFER_FREE && (!AudioSyncStream
|| VideoGetBuffers(AudioSyncStream) > 3)) {
return 0; return 0;
} }
// PES header 0x00 0x00 0x01 ID // PES header 0x00 0x00 0x01 ID
@ -1210,7 +1212,8 @@ int PlayTsAudio(const uint8_t * data, int size)
return 0; return 0;
} }
// soft limit buffer full // soft limit buffer full
if (AudioUsedBytes() > AUDIO_MIN_BUFFER_FREE && VideoGetBuffers() > 3) { if (AudioUsedBytes() > AUDIO_MIN_BUFFER_FREE && (!AudioSyncStream
|| VideoGetBuffers(AudioSyncStream) > 3)) {
return 0; return 0;
} }
@ -1235,89 +1238,104 @@ void SetVolumeDevice(int volume)
#include <alsa/iatomic.h> // portable atomic_t #include <alsa/iatomic.h> // portable atomic_t
#define VIDEO_BUFFER_SIZE (512 * 1024) ///< video PES buffer default size
#define VIDEO_PACKET_MAX 192 ///< max number of video packets
/**
** Video output stream device structure. Parser, decoder, display.
*/
struct __video_stream__
{
VideoHwDecoder *HwDecoder; ///< video hardware decoder
VideoDecoder *Decoder; ///< video decoder
enum CodecID CodecID; ///< current codec id
enum CodecID LastCodecID; ///< last codec id
volatile char NewStream; ///< flag new video stream
volatile char ClosingStream; ///< flag closing video stream
volatile char SkipStream; ///< skip video stream
volatile char Freezed; ///< stream freezed
volatile char TrickSpeed; ///< current trick speed
volatile char ClearBuffers; ///< clear video buffers
volatile char ClearClose; ///< clear video buffers for close
AVPacket PacketRb[VIDEO_PACKET_MAX]; ///< video PES packet ring buffer
int PacketWrite; ///< ring buffer write pointer
int PacketRead; ///< ring buffer read pointer
atomic_t PacketsFilled; ///< how many of the ring buffer is used
};
static VideoStream MyVideoStream[1]; ///< normal video stream
#ifdef DEBUG #ifdef DEBUG
uint32_t VideoSwitch; ///< debug video switch ticks uint32_t VideoSwitch; ///< debug video switch ticks
static int VideoMaxPacketSize; ///< biggest used packet buffer
#endif #endif
//#define STILL_DEBUG 2 //#define STILL_DEBUG 2
#ifdef STILL_DEBUG #ifdef STILL_DEBUG
static char InStillPicture; ///< flag still picture static char InStillPicture; ///< flag still picture
#endif #endif
static volatile char NewVideoStream; ///< flag new video stream
static volatile char ClosingVideoStream; ///< flag closing video stream
static VideoHwDecoder *MyHwDecoder; ///< video hw decoder
static VideoDecoder *MyVideoDecoder; ///< video decoder
static enum CodecID VideoCodecID; ///< current codec id
const char *X11DisplayName; ///< x11 display name const char *X11DisplayName; ///< x11 display name
static volatile char Usr1Signal; ///< true got usr1 signal static volatile char Usr1Signal; ///< true got usr1 signal
#define VIDEO_BUFFER_SIZE (512 * 1024) ///< video PES buffer default size
#define VIDEO_PACKET_MAX 192 ///< max number of video packets
/// video PES packet ring buffer
static AVPacket VideoPacketRb[VIDEO_PACKET_MAX];
static int VideoPacketWrite; ///< write pointer
static int VideoPacketRead; ///< read pointer
static atomic_t VideoPacketsFilled; ///< how many of the buffer is used
static volatile char VideoClearBuffers; ///< clear video buffers
static volatile char VideoClearClose; ///< clear video buffers upto close
static volatile char SkipVideo; ///< skip video
static volatile char CurrentTrickSpeed; ///< current trick speed
#ifdef DEBUG
static int VideoMaxPacketSize; ///< biggest used packet buffer
#endif
/** /**
** Initialize video packet ringbuffer. ** Initialize video packet ringbuffer.
**
** @param stream video stream
*/ */
static void VideoPacketInit(void) static void VideoPacketInit(VideoStream * stream)
{ {
int i; int i;
for (i = 0; i < VIDEO_PACKET_MAX; ++i) { for (i = 0; i < VIDEO_PACKET_MAX; ++i) {
AVPacket *avpkt; AVPacket *avpkt;
avpkt = &VideoPacketRb[i]; avpkt = &stream->PacketRb[i];
// build a clean ffmpeg av packet // build a clean ffmpeg av packet
if (av_new_packet(avpkt, VIDEO_BUFFER_SIZE)) { if (av_new_packet(avpkt, VIDEO_BUFFER_SIZE)) {
Fatal(_("[softhddev] out of memory\n")); Fatal(_("[softhddev] out of memory\n"));
} }
avpkt->priv = NULL;
} }
atomic_set(&VideoPacketsFilled, 0); atomic_set(&stream->PacketsFilled, 0);
VideoPacketRead = VideoPacketWrite = 0; stream->PacketRead = stream->PacketWrite = 0;
} }
/** /**
** Cleanup video packet ringbuffer. ** Cleanup video packet ringbuffer.
**
** @param stream video stream
*/ */
static void VideoPacketExit(void) static void VideoPacketExit(VideoStream * stream)
{ {
int i; int i;
atomic_set(&VideoPacketsFilled, 0); atomic_set(&stream->PacketsFilled, 0);
for (i = 0; i < VIDEO_PACKET_MAX; ++i) { for (i = 0; i < VIDEO_PACKET_MAX; ++i) {
av_free_packet(&VideoPacketRb[i]); av_free_packet(&stream->PacketRb[i]);
} }
} }
/** /**
** Place video data in packet ringbuffer. ** Place video data in packet ringbuffer.
** **
** @param stream video stream
** @param pts presentation timestamp of pes packet ** @param pts presentation timestamp of pes packet
** @param data data of pes packet ** @param data data of pes packet
** @param data size of pes packet ** @param data size of pes packet
*/ */
static void VideoEnqueue(int64_t pts, const void *data, int size) static void VideoEnqueue(VideoStream * stream, int64_t pts, const void *data,
int size)
{ {
AVPacket *avpkt; AVPacket *avpkt;
// Debug(3, "video: enqueue %d\n", size); // Debug(3, "video: enqueue %d\n", size);
avpkt = &VideoPacketRb[VideoPacketWrite]; avpkt = &stream->PacketRb[stream->PacketWrite];
if (!avpkt->stream_index) { // add pts only for first added if (!avpkt->stream_index) { // add pts only for first added
avpkt->pts = pts; avpkt->pts = pts;
} }
@ -1352,13 +1370,16 @@ static void VideoEnqueue(int64_t pts, const void *data, int size)
/** /**
** Reset current packet. ** Reset current packet.
**
** @param stream video stream
*/ */
static void VideoResetPacket(void) static void VideoResetPacket(VideoStream * stream)
{ {
AVPacket *avpkt; AVPacket *avpkt;
avpkt = &VideoPacketRb[VideoPacketWrite]; avpkt = &stream->PacketRb[stream->PacketWrite];
avpkt->stream_index = 0; avpkt->stream_index = 0;
avpkt->priv = NULL;
avpkt->pts = AV_NOPTS_VALUE; avpkt->pts = AV_NOPTS_VALUE;
avpkt->dts = AV_NOPTS_VALUE; avpkt->dts = AV_NOPTS_VALUE;
} }
@ -1366,13 +1387,14 @@ static void VideoResetPacket(void)
/** /**
** Finish current packet advance to next. ** Finish current packet advance to next.
** **
** @param stream video stream
** @param codec_id codec id of packet (MPEG/H264) ** @param codec_id codec id of packet (MPEG/H264)
*/ */
static void VideoNextPacket(int codec_id) static void VideoNextPacket(VideoStream * stream, int codec_id)
{ {
AVPacket *avpkt; AVPacket *avpkt;
avpkt = &VideoPacketRb[VideoPacketWrite]; avpkt = &stream->PacketRb[stream->PacketWrite];
if (!avpkt->stream_index) { // ignore empty packets if (!avpkt->stream_index) { // ignore empty packets
if (codec_id != CODEC_ID_NONE) { if (codec_id != CODEC_ID_NONE) {
return; return;
@ -1380,7 +1402,7 @@ static void VideoNextPacket(int codec_id)
Debug(3, "video: possible stream change loss\n"); Debug(3, "video: possible stream change loss\n");
} }
if (atomic_read(&VideoPacketsFilled) >= VIDEO_PACKET_MAX - 1) { if (atomic_read(&stream->PacketsFilled) >= VIDEO_PACKET_MAX - 1) {
// no free slot available drop last packet // no free slot available drop last packet
Error(_("video: no empty slot in packet ringbuffer\n")); Error(_("video: no empty slot in packet ringbuffer\n"));
avpkt->stream_index = 0; avpkt->stream_index = 0;
@ -1396,13 +1418,13 @@ static void VideoNextPacket(int codec_id)
//H264Dump(avpkt->data, avpkt->stream_index); //H264Dump(avpkt->data, avpkt->stream_index);
// advance packet write // advance packet write
VideoPacketWrite = (VideoPacketWrite + 1) % VIDEO_PACKET_MAX; stream->PacketWrite = (stream->PacketWrite + 1) % VIDEO_PACKET_MAX;
atomic_inc(&VideoPacketsFilled); atomic_inc(&stream->PacketsFilled);
VideoDisplayWakeup(); VideoDisplayWakeup();
// intialize next package to use // intialize next package to use
VideoResetPacket(); VideoResetPacket(stream);
} }
/** /**
@ -1414,8 +1436,13 @@ static void VideoNextPacket(int codec_id)
** **
** FIXME: there are stations which have multiple pictures and ** FIXME: there are stations which have multiple pictures and
** the last picture incomplete in the PES packet. ** the last picture incomplete in the PES packet.
**
** FIXME: move function call into PlayVideo, than the hardware
** decoder didn't need to support multiple frames decoding.
**
** @param avpkt ffmpeg a/v packet
*/ */
void FixPacketForFFMpeg(VideoDecoder * MyVideoDecoder, AVPacket * avpkt) static void FixPacketForFFMpeg(VideoDecoder * vdecoder, AVPacket * avpkt)
{ {
uint8_t *p; uint8_t *p;
int n; int n;
@ -1455,7 +1482,7 @@ void FixPacketForFFMpeg(VideoDecoder * MyVideoDecoder, AVPacket * avpkt)
tmp->data[0], tmp->data[1], tmp->data[2], tmp->data[3]); tmp->data[0], tmp->data[1], tmp->data[2], tmp->data[3]);
} }
#endif #endif
CodecVideoDecode(MyVideoDecoder, tmp); CodecVideoDecode(vdecoder, tmp);
// time-stamp only valid for first packet // time-stamp only valid for first packet
tmp->pts = AV_NOPTS_VALUE; tmp->pts = AV_NOPTS_VALUE;
tmp->dts = AV_NOPTS_VALUE; tmp->dts = AV_NOPTS_VALUE;
@ -1472,27 +1499,32 @@ void FixPacketForFFMpeg(VideoDecoder * MyVideoDecoder, AVPacket * avpkt)
tmp->data[0], tmp->data[1], tmp->data[2], tmp->data[3]); tmp->data[0], tmp->data[1], tmp->data[2], tmp->data[3]);
} }
#endif #endif
CodecVideoDecode(MyVideoDecoder, tmp); CodecVideoDecode(vdecoder, tmp);
} }
/** /**
** Poll PES packet ringbuffer. ** Poll PES packet ringbuffer.
** **
** Called if video frame buffers are full. ** Called if video frame buffers are full.
**
** @param stream video stream
**
** @retval 1 something todo
** @retval -1 empty stream
*/ */
int VideoPollInput(void) int VideoPollInput(VideoStream * stream)
{ {
if (VideoClearBuffers) { if (stream->ClearBuffers) {
atomic_set(&VideoPacketsFilled, 0); atomic_set(&stream->PacketsFilled, 0);
VideoPacketRead = VideoPacketWrite; stream->PacketRead = stream->PacketWrite;
if (MyVideoDecoder) { if (stream->Decoder) {
CodecVideoFlushBuffers(MyVideoDecoder); CodecVideoFlushBuffers(stream->Decoder);
VideoResetStart(MyHwDecoder); VideoResetStart(stream->HwDecoder);
} }
VideoClearBuffers = 0; stream->ClearBuffers = 0;
return 1; return 1;
} }
if (!atomic_read(&VideoPacketsFilled)) { if (!atomic_read(&stream->PacketsFilled)) {
return -1; return -1;
} }
return 1; return 1;
@ -1501,83 +1533,85 @@ int VideoPollInput(void)
/** /**
** Decode from PES packet ringbuffer. ** Decode from PES packet ringbuffer.
** **
** @param stream video stream
**
** @retval 0 packet decoded ** @retval 0 packet decoded
** @retval 1 stream paused ** @retval 1 stream paused
** @retval -1 empty stream ** @retval -1 empty stream
*/ */
int VideoDecodeInput(void) int VideoDecodeInput(VideoStream * stream)
{ {
int filled; int filled;
AVPacket *avpkt; AVPacket *avpkt;
int saved_size; int saved_size;
static int last_codec_id = CODEC_ID_NONE;
if (VideoClearBuffers) { if (stream->ClearBuffers) { // clear buffer request
atomic_set(&VideoPacketsFilled, 0); atomic_set(&stream->PacketsFilled, 0);
VideoPacketRead = VideoPacketWrite; stream->PacketRead = stream->PacketWrite;
if (MyVideoDecoder) { if (stream->Decoder) {
CodecVideoFlushBuffers(MyVideoDecoder); CodecVideoFlushBuffers(stream->Decoder);
VideoResetStart(MyHwDecoder); VideoResetStart(stream->HwDecoder);
} }
VideoClearBuffers = 0; stream->ClearBuffers = 0;
return 1; return 1;
} }
if (StreamFreezed) { // stream freezed if (stream->Freezed) { // stream freezed
// clear is called during freezed // clear is called during freezed
return 1; return 1;
} }
filled = atomic_read(&VideoPacketsFilled); filled = atomic_read(&stream->PacketsFilled);
if (!filled) { if (!filled) {
return -1; return -1;
} }
// clearing for normal channel switch has no advantage // clearing for normal channel switch has no advantage
if (VideoClearClose /*|| ClosingVideoStream */ ) { if (stream->ClearClose /*|| stream->ClosingStream */ ) {
int f; int f;
// flush buffers, if close is in the queue // flush buffers, if close is in the queue
for (f = 0; f < filled; ++f) { for (f = 0; f < filled; ++f) {
avpkt = &VideoPacketRb[(VideoPacketRead + f) % VIDEO_PACKET_MAX]; avpkt =
&stream->PacketRb[(stream->PacketRead + f) % VIDEO_PACKET_MAX];
if ((int)(size_t) avpkt->priv == CODEC_ID_NONE) { if ((int)(size_t) avpkt->priv == CODEC_ID_NONE) {
if (f) { if (f) {
Debug(3, "video: cleared upto close\n"); Debug(3, "video: cleared upto close\n");
atomic_sub(f, &VideoPacketsFilled); atomic_sub(f, &stream->PacketsFilled);
VideoPacketRead = (VideoPacketRead + f) % VIDEO_PACKET_MAX; stream->PacketRead =
VideoClearClose = 0; (stream->PacketRead + f) % VIDEO_PACKET_MAX;
stream->ClearClose = 0;
} }
break; break;
} }
} }
ClosingVideoStream = 0; stream->ClosingStream = 0;
} }
avpkt = &VideoPacketRb[VideoPacketRead];
// //
// handle queued commands // handle queued commands
// //
avpkt = &stream->PacketRb[stream->PacketRead];
switch ((int)(size_t) avpkt->priv) { switch ((int)(size_t) avpkt->priv) {
case CODEC_ID_NONE: case CODEC_ID_NONE:
ClosingVideoStream = 0; stream->ClosingStream = 0;
if (last_codec_id != CODEC_ID_NONE) { if (stream->LastCodecID != CODEC_ID_NONE) {
last_codec_id = CODEC_ID_NONE; stream->LastCodecID = CODEC_ID_NONE;
CodecVideoClose(MyVideoDecoder); CodecVideoClose(stream->Decoder);
goto skip; goto skip;
} }
// FIXME: look if more close are in the queue // FIXME: look if more close are in the queue
// size can be zero // size can be zero
goto skip; goto skip;
case CODEC_ID_MPEG2VIDEO: case CODEC_ID_MPEG2VIDEO:
if (last_codec_id != CODEC_ID_MPEG2VIDEO) { if (stream->LastCodecID != CODEC_ID_MPEG2VIDEO) {
last_codec_id = CODEC_ID_MPEG2VIDEO; stream->LastCodecID = CODEC_ID_MPEG2VIDEO;
CodecVideoOpen(MyVideoDecoder, VideoHardwareDecoder < 0 CodecVideoOpen(stream->Decoder, VideoHardwareDecoder < 0
&& VdpauDecoder ? "mpegvideo_vdpau" : NULL, && VdpauDecoder ? "mpegvideo_vdpau" : NULL,
CODEC_ID_MPEG2VIDEO); CODEC_ID_MPEG2VIDEO);
} }
break; break;
case CODEC_ID_H264: case CODEC_ID_H264:
if (last_codec_id != CODEC_ID_H264) { if (stream->LastCodecID != CODEC_ID_H264) {
last_codec_id = CODEC_ID_H264; stream->LastCodecID = CODEC_ID_H264;
CodecVideoOpen(MyVideoDecoder, VideoHardwareDecoder CodecVideoOpen(stream->Decoder, VideoHardwareDecoder
&& VdpauDecoder ? "h264_vdpau" : NULL, CODEC_ID_H264); && VdpauDecoder ? "h264_vdpau" : NULL, CODEC_ID_H264);
} }
break; break;
@ -1590,28 +1624,30 @@ int VideoDecodeInput(void)
avpkt->size = avpkt->stream_index; avpkt->size = avpkt->stream_index;
avpkt->stream_index = 0; avpkt->stream_index = 0;
if (last_codec_id == CODEC_ID_MPEG2VIDEO) { if (stream->LastCodecID == CODEC_ID_MPEG2VIDEO) {
FixPacketForFFMpeg(MyVideoDecoder, avpkt); FixPacketForFFMpeg(stream->Decoder, avpkt);
} else { } else {
CodecVideoDecode(MyVideoDecoder, avpkt); CodecVideoDecode(stream->Decoder, avpkt);
} }
avpkt->size = saved_size; avpkt->size = saved_size;
skip: skip:
// advance packet read // advance packet read
VideoPacketRead = (VideoPacketRead + 1) % VIDEO_PACKET_MAX; stream->PacketRead = (stream->PacketRead + 1) % VIDEO_PACKET_MAX;
atomic_dec(&VideoPacketsFilled); atomic_dec(&stream->PacketsFilled);
return 0; return 0;
} }
/** /**
** Get number of video buffers. ** Get number of video buffers.
**
** @param stream video stream
*/ */
int VideoGetBuffers(void) int VideoGetBuffers(const VideoStream * stream)
{ {
return atomic_read(&VideoPacketsFilled); return atomic_read(&stream->PacketsFilled);
} }
/** /**
@ -1631,13 +1667,17 @@ static void StartVideo(void)
VideoSetFullscreen(1); VideoSetFullscreen(1);
} }
VideoOsdInit(); VideoOsdInit();
if (!MyVideoDecoder) { if (!MyVideoStream->Decoder) {
if ((MyHwDecoder = VideoNewHwDecoder())) { if ((MyVideoStream->HwDecoder = VideoNewHwDecoder(MyVideoStream))) {
MyVideoDecoder = CodecVideoNewDecoder(MyHwDecoder); MyVideoStream->Decoder =
CodecVideoNewDecoder(MyVideoStream->HwDecoder);
MyVideoStream->SkipStream = 0;
AudioSyncStream = MyVideoStream;
} }
VideoCodecID = CODEC_ID_NONE; MyVideoStream->CodecID = CODEC_ID_NONE;
MyVideoStream->LastCodecID = CODEC_ID_NONE;
} }
VideoPacketInit(); VideoPacketInit(MyVideoStream);
} }
/** /**
@ -1647,19 +1687,21 @@ static void StopVideo(void)
{ {
VideoOsdExit(); VideoOsdExit();
VideoExit(); VideoExit();
if (MyVideoDecoder) { if (MyVideoStream->Decoder) {
MyVideoStream->SkipStream = 1;
// FIXME: this can crash, hw decoder released by video exit // FIXME: this can crash, hw decoder released by video exit
CodecVideoClose(MyVideoDecoder); CodecVideoClose(MyVideoStream->Decoder);
CodecVideoDelDecoder(MyVideoDecoder); CodecVideoDelDecoder(MyVideoStream->Decoder);
MyVideoDecoder = NULL; MyVideoStream->Decoder = NULL;
AudioSyncStream = NULL;
} }
if (MyHwDecoder) { if (MyVideoStream->HwDecoder) {
// done by exit: VideoDelHwDecoder(MyHwDecoder); // done by exit: VideoDelHwDecoder(MyVideoStream->HwDecoder);
MyHwDecoder = NULL; MyVideoStream->HwDecoder = NULL;
} }
VideoPacketExit(); VideoPacketExit(MyVideoStream);
NewVideoStream = 1; MyVideoStream->NewStream = 1;
} }
#ifdef DEBUG #ifdef DEBUG
@ -1727,20 +1769,14 @@ static int ValidateMpeg(const uint8_t * data, int size)
/** /**
** Play video packet. ** Play video packet.
** **
** @param stream video stream
** @param data data of exactly one complete PES packet ** @param data data of exactly one complete PES packet
** @param size size of PES packet ** @param size size of PES packet
** **
** @return number of bytes used, 0 if internal buffer are full. ** @return number of bytes used, 0 if internal buffer are full.
** **
** @note vdr sends incomplete packets, va-api h264 decoder only
** supports complete packets.
** We buffer here until we receive an complete PES Packet, which
** is no problem, the audio is always far behind us.
** cTsToPes::GetPes splits the packets.
**
** @todo FIXME: combine the 5 ifs at start of the function
*/ */
int PlayVideo(const uint8_t * data, int size) int PlayVideo3(VideoStream * stream, const uint8_t * data, int size)
{ {
const uint8_t *check; const uint8_t *check;
int64_t pts; int64_t pts;
@ -1748,27 +1784,25 @@ int PlayVideo(const uint8_t * data, int size)
int z; int z;
int l; int l;
if (!MyVideoDecoder) { // no x11 video started if (!stream->Decoder) { // no x11 video started
return size; return size;
} }
if (SkipVideo) { // skip video if (stream->SkipStream) { // skip video stream
return size; return size;
} }
if (StreamFreezed) { // stream freezed if (stream->Freezed) { // stream freezed
return 0; return 0;
} }
if (NewVideoStream) { // channel switched if (stream->NewStream) { // channel switched
Debug(3, "video: new stream %dms\n", GetMsTicks() - VideoSwitch); Debug(3, "video: new stream %dms\n", GetMsTicks() - VideoSwitch);
// FIXME: hack to test results if (atomic_read(&stream->PacketsFilled) >= VIDEO_PACKET_MAX - 1) {
if (atomic_read(&VideoPacketsFilled) >= VIDEO_PACKET_MAX - 1) {
Debug(3, "video: new video stream lost\n"); Debug(3, "video: new video stream lost\n");
NewVideoStream = 0;
return 0; return 0;
} }
VideoNextPacket(CODEC_ID_NONE); VideoNextPacket(stream, CODEC_ID_NONE);
VideoCodecID = CODEC_ID_NONE; stream->CodecID = CODEC_ID_NONE;
ClosingVideoStream = 1; stream->ClosingStream = 1;
NewVideoStream = 0; stream->NewStream = 0;
} }
// must be a PES start code // must be a PES start code
if (size < 9 || !data || data[0] || data[1] || data[2] != 0x01) { if (size < 9 || !data || data[0] || data[1] || data[2] != 0x01) {
@ -1791,12 +1825,13 @@ int PlayVideo(const uint8_t * data, int size)
return size; return size;
} }
// hard limit buffer full: needed for replay // hard limit buffer full: needed for replay
if (atomic_read(&VideoPacketsFilled) >= VIDEO_PACKET_MAX - 3) { if (atomic_read(&stream->PacketsFilled) >= VIDEO_PACKET_MAX - 3) {
return 0; return 0;
} }
// soft limit buffer full // soft limit buffer full
if (atomic_read(&VideoPacketsFilled) > 3 if (atomic_read(&stream->PacketsFilled) > 3
&& AudioUsedBytes() > AUDIO_MIN_BUFFER_FREE) { && AudioUsedBytes() > AUDIO_MIN_BUFFER_FREE) {
// FIXME: audio only for main video stream
return 0; return 0;
} }
// get pts/dts // get pts/dts
@ -1825,11 +1860,11 @@ int PlayVideo(const uint8_t * data, int size)
if ((data[6] & 0xC0) == 0x80 && z >= 2 && check[0] == 0x01 if ((data[6] & 0xC0) == 0x80 && z >= 2 && check[0] == 0x01
&& check[1] == 0x09) { && check[1] == 0x09) {
// old PES HDTV recording z == 2 // old PES HDTV recording z == 2
if (VideoCodecID == CODEC_ID_H264) { if (stream->CodecID == CODEC_ID_H264) {
#if 0 #if 0
// this should improve ffwd+frew, but produce crash in ffmpeg // this should improve ffwd+frew, but produce crash in ffmpeg
// with some streams // with some streams
if (CurrentTrickSpeed && pts != (int64_t) AV_NOPTS_VALUE) { if (stream->TrickSpeed && pts != (int64_t) AV_NOPTS_VALUE) {
// H264 NAL End of Sequence // H264 NAL End of Sequence
static uint8_t seq_end_h264[] = static uint8_t seq_end_h264[] =
{ 0x00, 0x00, 0x00, 0x01, 0x0A }; { 0x00, 0x00, 0x00, 0x01, 0x0A };
@ -1843,22 +1878,22 @@ int PlayVideo(const uint8_t * data, int size)
} }
} }
#endif #endif
VideoNextPacket(CODEC_ID_H264); VideoNextPacket(stream, CODEC_ID_H264);
} else { } else {
Debug(3, "video: h264 detected\n"); Debug(3, "video: h264 detected\n");
VideoCodecID = CODEC_ID_H264; stream->CodecID = CODEC_ID_H264;
} }
// SKIP PES header (ffmpeg supports short start code) // SKIP PES header (ffmpeg supports short start code)
VideoEnqueue(pts, check - 2, l + 2); VideoEnqueue(stream, pts, check - 2, l + 2);
return size; return size;
} }
// PES start code 0x00 0x00 0x01 // PES start code 0x00 0x00 0x01
if (z > 1 && check[0] == 0x01) { if (z > 1 && check[0] == 0x01) {
if (VideoCodecID == CODEC_ID_MPEG2VIDEO) { if (stream->CodecID == CODEC_ID_MPEG2VIDEO) {
VideoNextPacket(CODEC_ID_MPEG2VIDEO); VideoNextPacket(stream, CODEC_ID_MPEG2VIDEO);
} else { } else {
Debug(3, "video: mpeg2 detected ID %02x\n", check[3]); Debug(3, "video: mpeg2 detected ID %02x\n", check[3]);
VideoCodecID = CODEC_ID_MPEG2VIDEO; stream->CodecID = CODEC_ID_MPEG2VIDEO;
} }
#ifdef DEBUG #ifdef DEBUG
if (ValidateMpeg(data, size)) { if (ValidateMpeg(data, size)) {
@ -1866,29 +1901,70 @@ int PlayVideo(const uint8_t * data, int size)
} }
#endif #endif
// SKIP PES header, begin of start code // SKIP PES header, begin of start code
VideoEnqueue(pts, check - z, l + z); VideoEnqueue(stream, pts, check - z, l + z);
return size; return size;
} }
// this happens when vdr sends incomplete packets // this happens when vdr sends incomplete packets
if (VideoCodecID == CODEC_ID_NONE) { if (stream->CodecID == CODEC_ID_NONE) {
Debug(3, "video: not detected\n"); Debug(3, "video: not detected\n");
return size; return size;
} }
// SKIP PES header // SKIP PES header
VideoEnqueue(pts, data + 9 + n, size - 9 - n); VideoEnqueue(stream, pts, data + 9 + n, size - 9 - n);
// incomplete packets produce artefacts after channel switch // incomplete packets produce artefacts after channel switch
// packet < 65526 is the last split packet, detect it here for // packet < 65526 is the last split packet, detect it here for
// better latency // better latency
if (size < 65526 && VideoCodecID == CODEC_ID_MPEG2VIDEO) { if (size < 65526 && stream->CodecID == CODEC_ID_MPEG2VIDEO) {
// mpeg codec supports incomplete packets // mpeg codec supports incomplete packets
// waiting for a full complete packages, increases needed delays // waiting for a full complete packages, increases needed delays
VideoNextPacket(CODEC_ID_MPEG2VIDEO); VideoNextPacket(stream, CODEC_ID_MPEG2VIDEO);
} }
return size; return size;
} }
/**
** Play video packet.
**
** @param data data of exactly one complete PES packet
** @param size size of PES packet
**
** @return number of bytes used, 0 if internal buffer are full.
**
*/
int PlayVideo2(const uint8_t * data, int size)
{
static VideoStream *stream;
if (!stream) { // test hack v1
stream = MyVideoStream;
}
return PlayVideo3(stream, data, size);
}
/**
** Play video packet.
**
** @param data data of exactly one complete PES packet
** @param size size of PES packet
**
** @return number of bytes used, 0 if internal buffer are full.
**
** @note vdr sends incomplete packets, va-api h264 decoder only
** supports complete packets.
** We buffer here until we receive an complete PES Packet, which
** is no problem, the audio is always far behind us.
** cTsToPes::GetPes splits the packets.
**
** @todo FIXME: combine the 5 ifs at start of the function
*/
int PlayVideo(const uint8_t * data, int size)
{
return PlayVideo3(MyVideoStream, data, size);
}
/// call VDR support function /// call VDR support function
extern uint8_t *CreateJpeg(uint8_t *, int *, int, int, int); extern uint8_t *CreateJpeg(uint8_t *, int *, int, int, int);
@ -1986,12 +2062,12 @@ uint8_t *GrabImage(int *size, int jpeg, int quality, int width, int height)
int SetPlayMode(int play_mode) int SetPlayMode(int play_mode)
{ {
VideoDisplayWakeup(); VideoDisplayWakeup();
if (MyVideoDecoder) { // tell video parser we have new stream if (MyVideoStream->Decoder) { // tell video parser we have new stream
if (VideoCodecID != CODEC_ID_NONE) { if (MyVideoStream->CodecID != CODEC_ID_NONE) {
NewVideoStream = 1; MyVideoStream->NewStream = 1;
// tell hw decoder we are closing stream // tell hw decoder we are closing stream
VideoSetClosing(MyHwDecoder); VideoSetClosing(MyVideoStream->HwDecoder);
VideoResetStart(MyHwDecoder); VideoResetStart(MyVideoStream->HwDecoder);
#ifdef DEBUG #ifdef DEBUG
VideoSwitch = GetMsTicks(); VideoSwitch = GetMsTicks();
#endif #endif
@ -2026,8 +2102,8 @@ int SetPlayMode(int play_mode)
*/ */
int64_t GetSTC(void) int64_t GetSTC(void)
{ {
if (MyHwDecoder) { if (MyVideoStream->HwDecoder) {
return VideoGetClock(MyHwDecoder); return VideoGetClock(MyVideoStream->HwDecoder);
} }
// could happen during dettached // could happen during dettached
Warning(_("softhddev: %s called without hw decoder\n"), __FUNCTION__); Warning(_("softhddev: %s called without hw decoder\n"), __FUNCTION__);
@ -2050,8 +2126,8 @@ void GetVideoSize(int *width, int *height, double *aspect)
int aspect_num; int aspect_num;
int aspect_den; int aspect_den;
if (MyHwDecoder) { if (MyVideoStream->HwDecoder) {
VideoGetVideoSize(MyHwDecoder, width, height, &aspect_num, VideoGetVideoSize(MyVideoStream->HwDecoder, width, height, &aspect_num,
&aspect_den); &aspect_den);
*aspect = (double)aspect_num / (double)aspect_den; *aspect = (double)aspect_num / (double)aspect_den;
} else { } else {
@ -2080,14 +2156,15 @@ void GetVideoSize(int *width, int *height, double *aspect)
*/ */
void TrickSpeed(int speed) void TrickSpeed(int speed)
{ {
CurrentTrickSpeed = speed; MyVideoStream->TrickSpeed = speed;
if (MyHwDecoder) { if (MyVideoStream->HwDecoder) {
VideoSetTrickSpeed(MyHwDecoder, speed); VideoSetTrickSpeed(MyVideoStream->HwDecoder, speed);
} else { } else {
// can happen, during startup // can happen, during startup
Debug(3, "softhddev: %s called without hw decoder\n", __FUNCTION__); Debug(3, "softhddev: %s called without hw decoder\n", __FUNCTION__);
} }
StreamFreezed = 0; StreamFreezed = 0;
MyVideoStream->Freezed = 0;
} }
/** /**
@ -2097,19 +2174,19 @@ void Clear(void)
{ {
int i; int i;
VideoResetPacket(); // terminate work VideoResetPacket(MyVideoStream); // terminate work
VideoClearBuffers = 1; MyVideoStream->ClearBuffers = 1;
AudioFlushBuffers(); AudioFlushBuffers();
//NewAudioStream = 1; //NewAudioStream = 1;
// FIXME: audio avcodec_flush_buffers, video is done by VideoClearBuffers // FIXME: audio avcodec_flush_buffers, video is done by VideoClearBuffers
// wait for empty buffers // wait for empty buffers
// FIXME: without softstart sync VideoDecode isn't called. // FIXME: without softstart sync VideoDecode isn't called.
for (i = 0; VideoClearBuffers && i < 20; ++i) { for (i = 0; MyVideoStream->ClearBuffers && i < 20; ++i) {
usleep(1 * 1000); usleep(1 * 1000);
} }
Debug(3, "[softhddev]%s: %dms buffers %d\n", __FUNCTION__, i, Debug(3, "[softhddev]%s: %dms buffers %d\n", __FUNCTION__, i,
VideoGetBuffers()); VideoGetBuffers(MyVideoStream));
} }
/** /**
@ -2128,6 +2205,7 @@ void Play(void)
void Freeze(void) void Freeze(void)
{ {
StreamFreezed = 1; StreamFreezed = 1;
MyVideoStream->Freezed = 1;
AudioPause(); AudioPause();
} }
@ -2163,14 +2241,14 @@ void StillPicture(const uint8_t * data, int size)
#ifdef STILL_DEBUG #ifdef STILL_DEBUG
InStillPicture = 1; InStillPicture = 1;
#endif #endif
VideoSetTrickSpeed(MyHwDecoder, 1); VideoSetTrickSpeed(MyVideoStream->HwDecoder, 1);
VideoResetPacket(); VideoResetPacket(MyVideoStream);
old_video_hardware_decoder = VideoHardwareDecoder; old_video_hardware_decoder = VideoHardwareDecoder;
// enable/disable hardware decoder for still picture // enable/disable hardware decoder for still picture
VideoHardwareDecoder = ConfigStillDecoder; VideoHardwareDecoder = ConfigStillDecoder;
VideoNextPacket(CODEC_ID_NONE); // close last stream VideoNextPacket(MyVideoStream, CODEC_ID_NONE); // close last stream
if (VideoCodecID == CODEC_ID_NONE) { if (MyVideoStream->CodecID == CODEC_ID_NONE) {
// FIXME: should detect codec, see PlayVideo // FIXME: should detect codec, see PlayVideo
Error(_("[softhddev] no codec known for still picture\n")); Error(_("[softhddev] no codec known for still picture\n"));
} }
@ -2179,7 +2257,8 @@ void StillPicture(const uint8_t * data, int size)
#ifdef STILL_DEBUG #ifdef STILL_DEBUG
fprintf(stderr, "still-picture\n"); fprintf(stderr, "still-picture\n");
#endif #endif
for (i = 0; i < (VideoCodecID == CODEC_ID_MPEG2VIDEO ? 4 : 4); ++i) { for (i = 0; i < (MyVideoStream->CodecID == CODEC_ID_MPEG2VIDEO ? 4 : 4);
++i) {
const uint8_t *split; const uint8_t *split;
int n; int n;
@ -2202,47 +2281,49 @@ void StillPicture(const uint8_t * data, int size)
if (!len || len + 6 > n) { if (!len || len + 6 > n) {
if ((split[3] & 0xF0) == 0xE0) { if ((split[3] & 0xF0) == 0xE0) {
// video only // video only
while (!PlayVideo(split, n)) { // feed remaining bytes while (!PlayVideo3(MyVideoStream, split, n)) { // feed remaining bytes
} }
} }
break; break;
} }
if ((split[3] & 0xF0) == 0xE0) { if ((split[3] & 0xF0) == 0xE0) {
// video only // video only
while (!PlayVideo(split, len + 6)) { // feed it while (!PlayVideo3(MyVideoStream, split, len + 6)) { // feed it
} }
} }
split += 6 + len; split += 6 + len;
n -= 6 + len; n -= 6 + len;
} while (n > 6); } while (n > 6);
VideoNextPacket(VideoCodecID); // terminate last packet VideoNextPacket(MyVideoStream, MyVideoStream->CodecID); // terminate last packet
} else { // ES packet } else { // ES packet
if (VideoCodecID != CODEC_ID_MPEG2VIDEO) { if (MyVideoStream->CodecID != CODEC_ID_MPEG2VIDEO) {
VideoNextPacket(CODEC_ID_NONE); // close last stream VideoNextPacket(MyVideoStream, CODEC_ID_NONE); // close last stream
VideoCodecID = CODEC_ID_MPEG2VIDEO; MyVideoStream->CodecID = CODEC_ID_MPEG2VIDEO;
} }
VideoEnqueue(AV_NOPTS_VALUE, data, size); VideoEnqueue(MyVideoStream, AV_NOPTS_VALUE, data, size);
} }
if (VideoCodecID == CODEC_ID_H264) { if (MyVideoStream->CodecID == CODEC_ID_H264) {
VideoEnqueue(AV_NOPTS_VALUE, seq_end_h264, sizeof(seq_end_h264)); VideoEnqueue(MyVideoStream, AV_NOPTS_VALUE, seq_end_h264,
sizeof(seq_end_h264));
} else { } else {
VideoEnqueue(AV_NOPTS_VALUE, seq_end_mpeg, sizeof(seq_end_mpeg)); VideoEnqueue(MyVideoStream, AV_NOPTS_VALUE, seq_end_mpeg,
sizeof(seq_end_mpeg));
} }
VideoNextPacket(VideoCodecID); // terminate last packet VideoNextPacket(MyVideoStream, MyVideoStream->CodecID); // terminate last packet
} }
// wait for empty buffers // wait for empty buffers
for (i = 0; VideoGetBuffers() && i < 30; ++i) { for (i = 0; VideoGetBuffers(MyVideoStream) && i < 30; ++i) {
usleep(10 * 1000); usleep(10 * 1000);
} }
Debug(3, "[softhddev]%s: buffers %d %dms\n", __FUNCTION__, Debug(3, "[softhddev]%s: buffers %d %dms\n", __FUNCTION__,
VideoGetBuffers(), i * 10); VideoGetBuffers(MyVideoStream), i * 10);
#ifdef STILL_DEBUG #ifdef STILL_DEBUG
InStillPicture = 0; InStillPicture = 0;
#endif #endif
VideoNextPacket(CODEC_ID_NONE); // close last stream VideoNextPacket(MyVideoStream, CODEC_ID_NONE); // close last stream
VideoSetTrickSpeed(MyHwDecoder, 0); VideoSetTrickSpeed(MyVideoStream->HwDecoder, 0);
VideoHardwareDecoder = old_video_hardware_decoder; VideoHardwareDecoder = old_video_hardware_decoder;
} }
@ -2262,26 +2343,15 @@ void StillPicture(const uint8_t * data, int size)
int Poll(int timeout) int Poll(int timeout)
{ {
// poll is only called during replay, flush buffers after replay // poll is only called during replay, flush buffers after replay
VideoClearClose = 1; MyVideoStream->ClearClose = 1;
for (;;) { for (;;) {
#if 0
int empty;
int t;
// buffers are too full
empty = atomic_read(&VideoPacketsFilled) < VIDEO_PACKET_MAX * 1 / 4
|| AudioUsedBytes() < AUDIO_MIN_BUFFER_FREE * 2;
if (empty || !timeout) {
return empty;
}
#else
int full; int full;
int t; int t;
int used; int used;
int filled; int filled;
used = AudioUsedBytes(); used = AudioUsedBytes();
filled = atomic_read(&VideoPacketsFilled); filled = atomic_read(&MyVideoStream->PacketsFilled);
// soft limit + hard limit // soft limit + hard limit
full = (used > AUDIO_MIN_BUFFER_FREE && filled > 3) full = (used > AUDIO_MIN_BUFFER_FREE && filled > 3)
|| AudioFreeBytes() < AUDIO_MIN_BUFFER_FREE || AudioFreeBytes() < AUDIO_MIN_BUFFER_FREE
@ -2290,7 +2360,7 @@ int Poll(int timeout)
if (!full || !timeout) { if (!full || !timeout) {
return !full; return !full;
} }
#endif
t = 15; t = 15;
if (timeout < t) { if (timeout < t) {
t = timeout; t = timeout;
@ -2307,11 +2377,11 @@ int Poll(int timeout)
*/ */
int Flush(int timeout) int Flush(int timeout)
{ {
if (atomic_read(&VideoPacketsFilled)) { if (atomic_read(&MyVideoStream->PacketsFilled)) {
if (timeout) { // let display thread work if (timeout) { // let display thread work
usleep(timeout * 1000); usleep(timeout * 1000);
} }
return !atomic_read(&VideoPacketsFilled); return !atomic_read(&MyVideoStream->PacketsFilled);
} }
return 1; return 1;
} }
@ -2384,7 +2454,7 @@ const char *CommandLineHelp(void)
" -d display\tdisplay of x11 server (fe. :0.0)\n" " -d display\tdisplay of x11 server (fe. :0.0)\n"
" -f\t\tstart with fullscreen window (only with window manager)\n" " -f\t\tstart with fullscreen window (only with window manager)\n"
" -g geometry\tx11 window geometry wxh+x+y\n" " -g geometry\tx11 window geometry wxh+x+y\n"
" -v device\tvideo device (va-api, vdpau, noop)\n" " -v device\tvideo driver device (va-api, vdpau, noop)\n"
" -s\t\tstart in suspended mode\n" " -x\t\tstart x11 server\n" " -s\t\tstart in suspended mode\n" " -x\t\tstart x11 server\n"
" -X args\tX11 server arguments (f.e. -nocursor)\n" " -X args\tX11 server arguments (f.e. -nocursor)\n"
" -w workaround\tenable/disable workarounds\n" " -w workaround\tenable/disable workarounds\n"
@ -2693,7 +2763,7 @@ int Start(void)
StartVideo(); StartVideo();
} }
} else { } else {
SkipVideo = 1; MyVideoStream->SkipStream = 1;
SkipAudio = 1; SkipAudio = 1;
} }
pthread_mutex_init(&SuspendLockMutex, NULL); pthread_mutex_init(&SuspendLockMutex, NULL);
@ -2755,14 +2825,14 @@ void MainThreadHook(void)
void Suspend(int video, int audio, int dox11) void Suspend(int video, int audio, int dox11)
{ {
pthread_mutex_lock(&SuspendLockMutex); pthread_mutex_lock(&SuspendLockMutex);
if (SkipVideo && SkipAudio) { // already suspended if (MyVideoStream->SkipStream && SkipAudio) { // already suspended
pthread_mutex_unlock(&SuspendLockMutex); pthread_mutex_unlock(&SuspendLockMutex);
return; return;
} }
Debug(3, "[softhddev]%s:\n", __FUNCTION__); Debug(3, "[softhddev]%s:\n", __FUNCTION__);
SkipVideo = 1; MyVideoStream->SkipStream = 1;
SkipAudio = 1; SkipAudio = 1;
if (audio) { if (audio) {
@ -2791,7 +2861,7 @@ void Suspend(int video, int audio, int dox11)
*/ */
void Resume(void) void Resume(void)
{ {
if (!SkipVideo && !SkipAudio) { // we are not suspended if (!MyVideoStream->SkipStream && !SkipAudio) { // we are not suspended
return; return;
} }
@ -2800,7 +2870,7 @@ void Resume(void)
pthread_mutex_lock(&SuspendLockMutex); pthread_mutex_lock(&SuspendLockMutex);
// FIXME: start x11 // FIXME: start x11
if (!MyHwDecoder) { // video not running if (!MyVideoStream->HwDecoder) { // video not running
StartVideo(); StartVideo();
} }
if (!MyAudioDecoder) { // audio not running if (!MyAudioDecoder) { // audio not running
@ -2812,7 +2882,9 @@ void Resume(void)
AudioChannelID = -1; AudioChannelID = -1;
} }
SkipVideo = 0; if (MyVideoStream->Decoder) {
MyVideoStream->SkipStream = 0;
}
SkipAudio = 0; SkipAudio = 0;
pthread_mutex_unlock(&SuspendLockMutex); pthread_mutex_unlock(&SuspendLockMutex);
@ -2832,8 +2904,9 @@ void GetStats(int *missed, int *duped, int *dropped, int *counter)
*duped = 0; *duped = 0;
*dropped = 0; *dropped = 0;
*counter = 0; *counter = 0;
if (MyHwDecoder) { if (MyVideoStream->HwDecoder) {
VideoGetStats(MyHwDecoder, missed, duped, dropped, counter); VideoGetStats(MyVideoStream->HwDecoder, missed, duped, dropped,
counter);
} }
} }
@ -2847,7 +2920,7 @@ void GetStats(int *missed, int *duped, int *dropped, int *counter)
*/ */
void ScaleVideo(int x, int y, int width, int height) void ScaleVideo(int x, int y, int width, int height)
{ {
if (MyHwDecoder) { if (MyVideoStream->HwDecoder) {
VideoSetOutputPosition(MyHwDecoder, x, y, width, height); VideoSetOutputPosition(MyVideoStream->HwDecoder, x, y, width, height);
} }
} }

View File

@ -44,6 +44,8 @@ extern "C"
/// C plugin play video packet /// C plugin play video packet
extern int PlayVideo(const uint8_t *, int); extern int PlayVideo(const uint8_t *, int);
/// C plugin play video packet (next version)
extern int PlayVideo2(const uint8_t *, int);
/// C plugin play TS video packet /// C plugin play TS video packet
extern void PlayTsVideo(const uint8_t *, int); extern void PlayTsVideo(const uint8_t *, int);
/// C plugin grab an image /// C plugin grab an image

View File

@ -50,7 +50,7 @@ extern "C"
/// vdr-plugin version number. /// vdr-plugin version number.
/// Makefile extracts the version number for generating the file name /// Makefile extracts the version number for generating the file name
/// for the distribution archive. /// for the distribution archive.
static const char *const VERSION = "0.5.3" static const char *const VERSION = "0.6.0"
#ifdef GIT_REV #ifdef GIT_REV
"-GIT" GIT_REV "-GIT" GIT_REV
#endif #endif
@ -1178,6 +1178,201 @@ cSoftHdControl::~cSoftHdControl()
dsyslog("[softhddev]%s: dummy player stopped\n", __FUNCTION__); dsyslog("[softhddev]%s: dummy player stopped\n", __FUNCTION__);
} }
//////////////////////////////////////////////////////////////////////////////
// PIP
//////////////////////////////////////////////////////////////////////////////
#ifdef USE_PIP
static int OsdPipTest; ///< OSD pip test flag
//////////////////////////////////////////////////////////////////////////////
// cReceiver
//////////////////////////////////////////////////////////////////////////////
#include <vdr/receiver.h>
/**
** Receiver class for PIP mode.
*/
class cSoftReceiver:public cReceiver
{
protected:
virtual void Activate(bool);
virtual void Receive(uchar *, int);
public:
cSoftReceiver(const cChannel *); ///< receiver constructor
virtual ~ cSoftReceiver(); ///< receiver destructor
};
/**
** Receiver constructor.
**
** @param channel channel to receive.
*/
cSoftReceiver::cSoftReceiver(const cChannel * channel):cReceiver(channel)
{
fprintf(stderr, "pip: v-pid: %04x\n", channel->Vpid());
SetPids(NULL); // clear all pids, we want video only
AddPid(channel->Vpid());
}
/**
** Receiver destructor.
*/
cSoftReceiver::~cSoftReceiver()
{
}
/**
** Called before the receiver gets attached or detached.
**
** @param on flag attached, detached
*/
void cSoftReceiver::Activate(bool on)
{
fprintf(stderr, "pip: activate %d\n", on);
OsdPipTest = on;
}
///
/// Parse packetized elementary stream.
///
/// @param data payload data of transport stream
/// @param size number of payload data bytes
/// @param is_start flag, start of pes packet
///
static void PipPesParse(const uint8_t * data, int size, int is_start)
{
static uint8_t *pes_buf;
static int pes_size;
static int pes_index;
// FIXME: quick&dirty
if (!pes_buf) {
pes_size = 500 * 1024 * 1024;
pes_buf = (uint8_t *) malloc(pes_size);
pes_index = 0;
}
if (is_start) { // start of pes packet
if (pes_index) {
fprintf(stderr, "pip: pes packet %8d %02x%02x\n", pes_index,
pes_buf[2], pes_buf[3]);
if (pes_buf[0] || pes_buf[1] || pes_buf[2] != 0x01) {
fprintf(stderr, "pip: invalid pes packet %d\n", pes_index);
} else {
PlayVideo2(pes_buf, pes_index);
// FIXME: buffer full: pes packet is dropped
}
pes_index = 0;
}
}
if (pes_index + size > pes_size) {
fprintf(stderr, "pip: pes buffer too small\n");
// FIXME: error state
return;
}
memcpy(pes_buf + pes_index, data, size);
pes_index += size;
}
/// Transport stream packet size
#define TS_PACKET_SIZE 188
/// Transport stream packet sync byte
#define TS_PACKET_SYNC 0x47
/**
** Receive TS packet from device.
**
** @param data ts packet
** @param size size (#TS_PACKET_SIZE=188) of tes packet
*/
void cSoftReceiver::Receive(uchar * data, int size)
{
static int x;
const uint8_t *p;
if (!x) {
fprintf(stderr, "pip: receive %p(%d)\n", data, size);
x++;
}
p = data;
while (size >= TS_PACKET_SIZE) {
int payload;
if (p[0] != TS_PACKET_SYNC) {
esyslog(tr("tsdemux: transport stream out of sync\n"));
// FIXME: kill all buffers
return;
}
if (p[1] & 0x80) { // error indicatord
dsyslog("tsdemux: transport error\n");
// FIXME: kill all buffers
goto next_packet;
}
if (0) {
int pid;
pid = (p[1] & 0x1F) << 8 | p[2];
fprintf(stderr, "tsdemux: PID: %#04x%s%s\n", pid,
p[1] & 0x40 ? " start" : "", p[3] & 0x10 ? " payload" : "");
}
// skip adaptation field
switch (p[3] & 0x30) { // adaption field
case 0x00: // reserved
case 0x20: // adaptation field only
default:
goto next_packet;
case 0x10: // only payload
payload = 4;
break;
case 0x30: // skip adapation field
payload = 5 + p[4];
// illegal length, ignore packet
if (payload >= TS_PACKET_SIZE) {
dsyslog("tsdemux: illegal adaption field length\n");
goto next_packet;
}
break;
}
PipPesParse(p + payload, TS_PACKET_SIZE - payload, p[1] & 0x40);
next_packet:
p += TS_PACKET_SIZE;
size -= TS_PACKET_SIZE;
}
}
//////////////////////////////////////////////////////////////////////////////
/**
** Prepare new PIP.
*/
static void NewPip(void)
{
int channel_nr;
const cChannel *channel;
cDevice *device;
cSoftReceiver *receiver;
if ((channel_nr = cDevice::CurrentChannel())
&& (channel = Channels.GetByNumber(cDevice::CurrentChannel()))
&& (device = cDevice::GetDevice(channel, 1, false))) {
fprintf(stderr, "pip: %d %p %p\n", channel_nr, channel, device);
device->SwitchChannel(channel, false);
receiver = new cSoftReceiver(channel);
device->AttachReceiver(receiver);
fprintf(stderr, "pip: attached\n");
}
}
#endif
////////////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////////////
// cOsdMenu // cOsdMenu
////////////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////////////
@ -1213,12 +1408,16 @@ void cSoftHdMenu::Create(void)
SetHasHotkeys(); SetHasHotkeys();
Add(new cOsdItem(hk(tr("Suspend SoftHdDevice")), osUser1)); Add(new cOsdItem(hk(tr("Suspend SoftHdDevice")), osUser1));
#ifdef USE_PIP
Add(new cOsdItem(hk(tr("PIP")), osUser2));
#endif
Add(new cOsdItem(NULL, osUnknown, false)); Add(new cOsdItem(NULL, osUnknown, false));
Add(new cOsdItem(NULL, osUnknown, false)); Add(new cOsdItem(NULL, osUnknown, false));
GetStats(&missed, &duped, &dropped, &counter); GetStats(&missed, &duped, &dropped, &counter);
Add(new cOsdItem(cString:: Add(new
sprintf(tr(" Frames missed(%d) duped(%d) dropped(%d) total(%d)"), cOsdItem(cString::sprintf(tr
missed, duped, dropped, counter), osUnknown, false)); (" Frames missed(%d) duped(%d) dropped(%d) total(%d)"), missed,
duped, dropped, counter), osUnknown, false));
SetCurrent(Get(current)); // restore selected menu entry SetCurrent(Get(current)); // restore selected menu entry
Display(); // display build menu Display(); // display build menu
@ -1408,6 +1607,11 @@ eOSState cSoftHdMenu::ProcessKey(eKeys key)
} }
} }
return osEnd; return osEnd;
#ifdef USE_PIP
case osUser2:
NewPip();
return osEnd;
#endif
default: default:
Create(); Create();
break; break;
@ -1707,8 +1911,8 @@ bool cSoftHdDevice::Flush(int timeout_ms)
** Sets the video display format to the given one (only useful if this ** Sets the video display format to the given one (only useful if this
** device has an MPEG decoder). ** device has an MPEG decoder).
*/ */
void cSoftHdDevice:: void cSoftHdDevice:: SetVideoDisplayFormat(eVideoDisplayFormat
SetVideoDisplayFormat(eVideoDisplayFormat video_display_format) video_display_format)
{ {
dsyslog("[softhddev]%s: %d\n", __FUNCTION__, video_display_format); dsyslog("[softhddev]%s: %d\n", __FUNCTION__, video_display_format);
@ -1826,6 +2030,11 @@ void cSoftHdDevice::SetVolumeDevice(int volume)
int cSoftHdDevice::PlayVideo(const uchar * data, int length) int cSoftHdDevice::PlayVideo(const uchar * data, int length)
{ {
//dsyslog("[softhddev]%s: %p %d\n", __FUNCTION__, data, length); //dsyslog("[softhddev]%s: %p %d\n", __FUNCTION__, data, length);
#ifdef USE_PIP
if (OsdPipTest) {
return length;
}
#endif
return::PlayVideo(data, length); return::PlayVideo(data, length);
} }

66
video.c
View File

@ -226,7 +226,7 @@ typedef struct _video_module_
char Enabled; ///< flag output module enabled char Enabled; ///< flag output module enabled
/// allocate new video hw decoder /// allocate new video hw decoder
VideoHwDecoder *(*const NewHwDecoder)(void); VideoHwDecoder *(*const NewHwDecoder)(VideoStream *);
void (*const DelHwDecoder) (VideoHwDecoder *); void (*const DelHwDecoder) (VideoHwDecoder *);
unsigned (*const GetSurface) (VideoHwDecoder *); unsigned (*const GetSurface) (VideoHwDecoder *);
void (*const ReleaseSurface) (VideoHwDecoder *, unsigned); void (*const ReleaseSurface) (VideoHwDecoder *, unsigned);
@ -279,7 +279,7 @@ typedef struct _video_module_
char VideoIgnoreRepeatPict; ///< disable repeat pict warning char VideoIgnoreRepeatPict; ///< disable repeat pict warning
static const char *VideoDevice; ///< video output device static const char *VideoDriverName; ///< video output device
static Display *XlibDisplay; ///< Xlib X11 display static Display *XlibDisplay; ///< Xlib X11 display
static xcb_connection_t *Connection; ///< xcb connection static xcb_connection_t *Connection; ///< xcb connection
static xcb_colormap_t VideoColormap; ///< video colormap static xcb_colormap_t VideoColormap; ///< video colormap
@ -1380,6 +1380,7 @@ struct _vaapi_decoder_
int TrickSpeed; ///< current trick speed int TrickSpeed; ///< current trick speed
int TrickCounter; ///< current trick speed counter int TrickCounter; ///< current trick speed counter
struct timespec FrameTime; ///< time of last display struct timespec FrameTime; ///< time of last display
VideoStream *Stream; ///< video stream
int Closing; ///< flag about closing current stream int Closing; ///< flag about closing current stream
int64_t PTS; ///< video PTS clock int64_t PTS; ///< video PTS clock
@ -1762,7 +1763,7 @@ static void VaapiInitSurfaceFlags(VaapiDecoder * decoder)
/// ///
/// @returns a new prepared VA-API hardware decoder. /// @returns a new prepared VA-API hardware decoder.
/// ///
static VaapiDecoder *VaapiNewHwDecoder(void) static VaapiDecoder *VaapiNewHwDecoder(VideoStream * stream)
{ {
VaapiDecoder *decoder; VaapiDecoder *decoder;
int i; int i;
@ -1826,6 +1827,7 @@ static VaapiDecoder *VaapiNewHwDecoder(void)
decoder->OutputWidth = VideoWindowWidth; decoder->OutputWidth = VideoWindowWidth;
decoder->OutputHeight = VideoWindowHeight; decoder->OutputHeight = VideoWindowHeight;
decoder->Stream = stream;
decoder->Closing = -300 - 1; decoder->Closing = -300 - 1;
decoder->PTS = AV_NOPTS_VALUE; decoder->PTS = AV_NOPTS_VALUE;
@ -4394,7 +4396,7 @@ static void VaapiAdvanceDecoderFrame(VaapiDecoder * decoder)
// FIXME: don't warn after stream start, don't warn during pause // FIXME: don't warn after stream start, don't warn during pause
Error(_("video: display buffer empty, duping frame (%d/%d) %d\n"), Error(_("video: display buffer empty, duping frame (%d/%d) %d\n"),
decoder->FramesDuped, decoder->FrameCounter, decoder->FramesDuped, decoder->FrameCounter,
VideoGetBuffers()); VideoGetBuffers(decoder->Stream));
return; return;
} }
// wait for rendering finished // wait for rendering finished
@ -4681,7 +4683,7 @@ static void VaapiSyncDecoder(VaapiDecoder * decoder)
VaapiMessage(1, VaapiMessage(1,
_("video: decoder buffer empty, " _("video: decoder buffer empty, "
"duping frame (%d/%d) %d v-buf\n"), decoder->FramesDuped, "duping frame (%d/%d) %d v-buf\n"), decoder->FramesDuped,
decoder->FrameCounter, VideoGetBuffers()); decoder->FrameCounter, VideoGetBuffers(decoder->Stream));
if (decoder->Closing < -300) { if (decoder->Closing < -300) {
atomic_set(&decoder->SurfacesFilled, 0); atomic_set(&decoder->SurfacesFilled, 0);
} }
@ -4701,7 +4703,8 @@ static void VaapiSyncDecoder(VaapiDecoder * decoder)
Timestamp2String(video_clock), Timestamp2String(video_clock),
abs((video_clock - audio_clock) / 90) < abs((video_clock - audio_clock) / 90) <
8888 ? ((video_clock - audio_clock) / 90) : 8888, 8888 ? ((video_clock - audio_clock) / 90) : 8888,
AudioGetDelay() / 90, (int)VideoDeltaPTS / 90, VideoGetBuffers(), AudioGetDelay() / 90, (int)VideoDeltaPTS / 90,
VideoGetBuffers(decoder->Stream),
(1 + decoder->Interlaced) * atomic_read(&decoder->SurfacesFilled) (1 + decoder->Interlaced) * atomic_read(&decoder->SurfacesFilled)
- decoder->SurfaceField); - decoder->SurfaceField);
if (!(decoder->FramesDisplayed % (5 * 60 * 60))) { if (!(decoder->FramesDisplayed % (5 * 60 * 60))) {
@ -4844,10 +4847,10 @@ static void VaapiDisplayHandlerThread(void)
// FIXME: hot polling // FIXME: hot polling
pthread_mutex_lock(&VideoLockMutex); pthread_mutex_lock(&VideoLockMutex);
// fetch+decode or reopen // fetch+decode or reopen
err = VideoDecodeInput(); err = VideoDecodeInput(decoder->Stream);
pthread_mutex_unlock(&VideoLockMutex); pthread_mutex_unlock(&VideoLockMutex);
} else { } else {
err = VideoPollInput(); err = VideoPollInput(decoder->Stream);
} }
if (err) { if (err) {
// FIXME: sleep on wakeup // FIXME: sleep on wakeup
@ -5111,7 +5114,8 @@ static void VaapiOsdExit(void)
static const VideoModule VaapiModule = { static const VideoModule VaapiModule = {
.Name = "va-api", .Name = "va-api",
.Enabled = 1, .Enabled = 1,
.NewHwDecoder = (VideoHwDecoder * (*const)(void))VaapiNewHwDecoder, .NewHwDecoder =
(VideoHwDecoder * (*const)(VideoStream *)) VaapiNewHwDecoder,
.DelHwDecoder = (void (*const) (VideoHwDecoder *))VaapiDelHwDecoder, .DelHwDecoder = (void (*const) (VideoHwDecoder *))VaapiDelHwDecoder,
.GetSurface = (unsigned (*const) (VideoHwDecoder *))VaapiGetSurface, .GetSurface = (unsigned (*const) (VideoHwDecoder *))VaapiGetSurface,
.ReleaseSurface = .ReleaseSurface =
@ -5211,6 +5215,7 @@ typedef struct _vdpau_decoder_
int TrickSpeed; ///< current trick speed int TrickSpeed; ///< current trick speed
int TrickCounter; ///< current trick speed counter int TrickCounter; ///< current trick speed counter
struct timespec FrameTime; ///< time of last display struct timespec FrameTime; ///< time of last display
VideoStream *Stream; ///< video stream
int Closing; ///< flag about closing current stream int Closing; ///< flag about closing current stream
int64_t PTS; ///< video PTS clock int64_t PTS; ///< video PTS clock
@ -5826,9 +5831,11 @@ static void VdpauMixerCreate(VdpauDecoder * decoder)
/// ///
/// Allocate new VDPAU decoder. /// Allocate new VDPAU decoder.
/// ///
/// @param stream video stream
///
/// @returns a new prepared vdpau hardware decoder. /// @returns a new prepared vdpau hardware decoder.
/// ///
static VdpauDecoder *VdpauNewHwDecoder(void) static VdpauDecoder *VdpauNewHwDecoder(VideoStream * stream)
{ {
VdpauDecoder *decoder; VdpauDecoder *decoder;
int i; int i;
@ -5884,6 +5891,7 @@ static VdpauDecoder *VdpauNewHwDecoder(void)
decoder->OutputWidth = VideoWindowWidth; decoder->OutputWidth = VideoWindowWidth;
decoder->OutputHeight = VideoWindowHeight; decoder->OutputHeight = VideoWindowHeight;
decoder->Stream = stream;
decoder->Closing = -300 - 1; decoder->Closing = -300 - 1;
decoder->PTS = AV_NOPTS_VALUE; decoder->PTS = AV_NOPTS_VALUE;
@ -7830,7 +7838,7 @@ static void VdpauAdvanceDecoderFrame(VdpauDecoder * decoder)
// FIXME: don't warn after stream start, don't warn during pause // FIXME: don't warn after stream start, don't warn during pause
Error(_("video: display buffer empty, duping frame (%d/%d) %d\n"), Error(_("video: display buffer empty, duping frame (%d/%d) %d\n"),
decoder->FramesDuped, decoder->FrameCounter, decoder->FramesDuped, decoder->FrameCounter,
VideoGetBuffers()); VideoGetBuffers(decoder->Stream));
return; return;
} }
decoder->SurfaceRead = (decoder->SurfaceRead + 1) % VIDEO_SURFACES_MAX; decoder->SurfaceRead = (decoder->SurfaceRead + 1) % VIDEO_SURFACES_MAX;
@ -8094,7 +8102,7 @@ static void VdpauSyncDecoder(VdpauDecoder * decoder)
VdpauMessage(1, VdpauMessage(1,
_("video: decoder buffer empty, " _("video: decoder buffer empty, "
"duping frame (%d/%d) %d v-buf\n"), decoder->FramesDuped, "duping frame (%d/%d) %d v-buf\n"), decoder->FramesDuped,
decoder->FrameCounter, VideoGetBuffers()); decoder->FrameCounter, VideoGetBuffers(decoder->Stream));
if (decoder->Closing < -300) { if (decoder->Closing < -300) {
atomic_set(&decoder->SurfacesFilled, 0); atomic_set(&decoder->SurfacesFilled, 0);
} }
@ -8114,7 +8122,8 @@ static void VdpauSyncDecoder(VdpauDecoder * decoder)
Timestamp2String(video_clock), Timestamp2String(video_clock),
abs((video_clock - audio_clock) / 90) < abs((video_clock - audio_clock) / 90) <
8888 ? ((video_clock - audio_clock) / 90) : 8888, 8888 ? ((video_clock - audio_clock) / 90) : 8888,
AudioGetDelay() / 90, (int)VideoDeltaPTS / 90, VideoGetBuffers(), AudioGetDelay() / 90, (int)VideoDeltaPTS / 90,
VideoGetBuffers(decoder->Stream),
(1 + decoder->Interlaced) * atomic_read(&decoder->SurfacesFilled) (1 + decoder->Interlaced) * atomic_read(&decoder->SurfacesFilled)
- decoder->SurfaceField); - decoder->SurfaceField);
if (!(decoder->FramesDisplayed % (5 * 60 * 60))) { if (!(decoder->FramesDisplayed % (5 * 60 * 60))) {
@ -8335,10 +8344,10 @@ static void VdpauDisplayHandlerThread(void)
// FIXME: hot polling // FIXME: hot polling
pthread_mutex_lock(&VideoLockMutex); pthread_mutex_lock(&VideoLockMutex);
// fetch+decode or reopen // fetch+decode or reopen
err = VideoDecodeInput(); err = VideoDecodeInput(decoder->Stream);
pthread_mutex_unlock(&VideoLockMutex); pthread_mutex_unlock(&VideoLockMutex);
} else { } else {
err = VideoPollInput(); err = VideoPollInput(decoder->Stream);
} }
if (err) { if (err) {
// FIXME: sleep on wakeup // FIXME: sleep on wakeup
@ -8638,7 +8647,8 @@ static void VdpauOsdExit(void)
static const VideoModule VdpauModule = { static const VideoModule VdpauModule = {
.Name = "vdpau", .Name = "vdpau",
.Enabled = 1, .Enabled = 1,
.NewHwDecoder = (VideoHwDecoder * (*const)(void))VdpauNewHwDecoder, .NewHwDecoder =
(VideoHwDecoder * (*const)(VideoStream *)) VdpauNewHwDecoder,
.DelHwDecoder = (void (*const) (VideoHwDecoder *))VdpauDelHwDecoder, .DelHwDecoder = (void (*const) (VideoHwDecoder *))VdpauDelHwDecoder,
.GetSurface = (unsigned (*const) (VideoHwDecoder *))VdpauGetSurface, .GetSurface = (unsigned (*const) (VideoHwDecoder *))VdpauGetSurface,
.ReleaseSurface = .ReleaseSurface =
@ -8673,9 +8683,12 @@ static const VideoModule VdpauModule = {
/// ///
/// Allocate new noop decoder. /// Allocate new noop decoder.
/// ///
/// @param stream video stream
///
/// @returns always NULL. /// @returns always NULL.
/// ///
static VideoHwDecoder *NoopNewHwDecoder(void) static VideoHwDecoder *NoopNewHwDecoder(
__attribute__ ((unused)) VideoStream * stream)
{ {
return NULL; return NULL;
} }
@ -9384,11 +9397,13 @@ struct _video_hw_decoder_
/// ///
/// Allocate new video hw decoder. /// Allocate new video hw decoder.
/// ///
/// @param stream video stream
///
/// @returns a new initialized video hardware decoder. /// @returns a new initialized video hardware decoder.
/// ///
VideoHwDecoder *VideoNewHwDecoder(void) VideoHwDecoder *VideoNewHwDecoder(VideoStream * stream)
{ {
return VideoUsedModule->NewHwDecoder(); return VideoUsedModule->NewHwDecoder(stream);
} }
/// ///
@ -10107,7 +10122,7 @@ static void VideoCreateWindow(xcb_window_t parent, xcb_visualid_t visual,
/// ///
void VideoSetDevice(const char *device) void VideoSetDevice(const char *device)
{ {
VideoDevice = device; VideoDriverName = device;
} }
/// ///
@ -10696,15 +10711,16 @@ void VideoInit(const char *display_name)
for (i = 0; i < (int)(sizeof(VideoModules) / sizeof(*VideoModules)); ++i) { for (i = 0; i < (int)(sizeof(VideoModules) / sizeof(*VideoModules)); ++i) {
// FIXME: support list of drivers and include display name // FIXME: support list of drivers and include display name
// use user device or first working enabled device driver // use user device or first working enabled device driver
if ((VideoDevice && !strcasecmp(VideoDevice, VideoModules[i]->Name)) if ((VideoDriverName
|| (!VideoDevice && VideoModules[i]->Enabled)) { && !strcasecmp(VideoDriverName, VideoModules[i]->Name))
|| (!VideoDriverName && VideoModules[i]->Enabled)) {
if (VideoModules[i]->Init(display_name)) { if (VideoModules[i]->Init(display_name)) {
VideoUsedModule = VideoModules[i]; VideoUsedModule = VideoModules[i];
goto found; goto found;
} }
} }
} }
Error(_("video: '%s' output module isn't supported\n"), VideoDevice); Error(_("video: '%s' output module isn't supported\n"), VideoDriverName);
VideoUsedModule = &NoopModule; VideoUsedModule = &NoopModule;
found: found:
@ -10822,7 +10838,7 @@ void FeedKeyPress( __attribute__ ((unused))
{ {
} }
int VideoDecodeInput(void) int VideoDecodeInput( __attribute__ ((unused)) VideoStream * stream)
{ {
return -1; return -1;
} }
@ -10923,7 +10939,7 @@ int main(int argc, char *const argv[])
// //
VideoInit(NULL); VideoInit(NULL);
VideoOsdInit(); VideoOsdInit();
video_hw_decoder = VideoNewHwDecoder(); video_hw_decoder = VideoNewHwDecoder(NULL);
start_tick = GetMsTicks(); start_tick = GetMsTicks();
n = 0; n = 0;
for (;;) { for (;;) {

16
video.h
View File

@ -30,6 +30,9 @@
/// Video hardware decoder typedef /// Video hardware decoder typedef
typedef struct _video_hw_decoder_ VideoHwDecoder; typedef struct _video_hw_decoder_ VideoHwDecoder;
/// Video output stream typedef
typedef struct __video_stream__ VideoStream;
//---------------------------------------------------------------------------- //----------------------------------------------------------------------------
// Variables // Variables
//---------------------------------------------------------------------------- //----------------------------------------------------------------------------
@ -43,7 +46,7 @@ extern int VideoAudioDelay; ///< audio/video delay
//---------------------------------------------------------------------------- //----------------------------------------------------------------------------
/// Allocate new video hardware decoder. /// Allocate new video hardware decoder.
extern VideoHwDecoder *VideoNewHwDecoder(void); extern VideoHwDecoder *VideoNewHwDecoder(VideoStream *);
/// Deallocate video hardware decoder. /// Deallocate video hardware decoder.
extern void VideoDelHwDecoder(VideoHwDecoder *); extern void VideoDelHwDecoder(VideoHwDecoder *);
@ -208,8 +211,13 @@ extern void VideoOsdExit(void); ///< Cleanup osd.
extern void VideoInit(const char *); ///< Setup video module. extern void VideoInit(const char *); ///< Setup video module.
extern void VideoExit(void); ///< Cleanup and exit video module. extern void VideoExit(void); ///< Cleanup and exit video module.
extern int VideoPollInput(void); ///< Poll video input buffers. /// Poll video input buffers.
extern int VideoDecodeInput(void); ///< Decode video input buffers. extern int VideoPollInput(VideoStream *);
extern int VideoGetBuffers(void); ///< Get number of input buffers.
/// Decode video input buffers.
extern int VideoDecodeInput(VideoStream *);
/// Get number of input buffers.
extern int VideoGetBuffers(const VideoStream *);
/// @} /// @}