1
0
mirror of https://github.com/jojo61/vdr-plugin-softhdcuvid.git synced 2023-10-10 13:37:41 +02:00

Fix remaining indentation problems.

This commit is contained in:
Dirk Nehring 2019-10-28 21:43:37 +01:00
parent 43fa0b8929
commit 9ebdd3baae
15 changed files with 784 additions and 790 deletions

26
audio.c
View File

@ -87,7 +87,7 @@
#endif
#include <pthread.h>
#ifndef HAVE_PTHREAD_NAME
/// only available with newer glibc
/// only available with newer glibc
#define pthread_setname_np(thread, name)
#endif
#endif
@ -132,7 +132,7 @@ char AudioAlsaCloseOpenDelay; ///< enable alsa close/open delay fix
static const char *AudioModuleName; ///< which audio module to use
/// Selected audio module.
/// Selected audio module.
static const AudioModule *AudioUsedModule = &NoopModule;
static const char *AudioPCMDevice; ///< PCM device name
static const char *AudioPassthroughDevice; ///< Passthrough device name
@ -174,30 +174,30 @@ static int AudioVolume; ///< current volume (0 .. 1000)
extern int VideoAudioDelay; ///< import audio/video delay
/// default ring buffer size ~2s 8ch 16bit (3 * 5 * 7 * 8)
/// default ring buffer size ~2s 8ch 16bit (3 * 5 * 7 * 8)
static const unsigned AudioRingBufferSize = 3 * 5 * 7 * 8 * 2 * 1000;
static int AudioChannelsInHw[9]; ///< table which channels are supported
enum _audio_rates
{ ///< sample rates enumeration
// HW: 32000 44100 48000 88200 96000 176400 192000
//Audio32000, ///< 32.0Khz
// Audio32000, ///< 32.0Khz
Audio44100, ///< 44.1Khz
Audio48000, ///< 48.0Khz
//Audio88200, ///< 88.2Khz
//Audio96000, ///< 96.0Khz
//Audio176400, ///< 176.4Khz
// Audio88200, ///< 88.2Khz
// Audio96000, ///< 96.0Khz
// Audio176400, ///< 176.4Khz
Audio192000, ///< 192.0Khz
AudioRatesMax ///< max index
};
/// table which rates are supported
/// table which rates are supported
static int AudioRatesInHw[AudioRatesMax];
/// input to hardware channel matrix
/// input to hardware channel matrix
static int AudioChannelMatrix[AudioRatesMax][9];
/// rates tables (must be sorted by frequency)
/// rates tables (must be sorted by frequency)
static const unsigned AudioRatesTable[AudioRatesMax] = {
44100, 48000, 192000
};
@ -209,7 +209,7 @@ static const unsigned AudioRatesTable[AudioRatesMax] = {
static const int AudioNormSamples = 4096; ///< number of samples
#define AudioNormMaxIndex 128 ///< number of average values
/// average of n last sample blocks
/// average of n last sample blocks
static uint32_t AudioNormAverage[AudioNormMaxIndex];
static int AudioNormIndex; ///< index into average table
static int AudioNormReady; ///< index counter
@ -1149,7 +1149,7 @@ static int64_t AlsaGetDelay(void)
}
// delay in frames in alsa + kernel buffers
if ((err = snd_pcm_delay(AlsaPCMHandle, &delay)) < 0) {
//Debug(3, "audio/alsa: no hw delay\n");
// Debug(3, "audio/alsa: no hw delay\n");
delay = 0L;
#ifdef DEBUG
} else if (snd_pcm_state(AlsaPCMHandle) != SND_PCM_STATE_RUNNING) {
@ -2285,7 +2285,7 @@ void AudioEnqueue(const void *samples, int count)
// forced start or enough video + audio buffered
// for some exotic channels * 4 too small
if (AudioStartThreshold * 10 < n || (AudioVideoIsReady
// if ((AudioVideoIsReady
// if ((AudioVideoIsReady
&& AudioStartThreshold < n)) {
// restart play-back
// no lock needed, can wakeup next time

89
codec.c
View File

@ -1,4 +1,3 @@
///
/// @file codec.c @brief Codec functions
///
@ -31,15 +30,15 @@
/// many bugs and incompatiblity in it. Don't use this shit.
///
/// compile with pass-through support (stable, AC-3, E-AC-3 only)
/// compile with pass-through support (stable, AC-3, E-AC-3 only)
#define USE_PASSTHROUGH
/// compile audio drift correction support (very experimental)
/// compile audio drift correction support (very experimental)
#define USE_AUDIO_DRIFT_CORRECTION
/// compile AC-3 audio drift correction support (very experimental)
/// compile AC-3 audio drift correction support (very experimental)
#define USE_AC3_DRIFT_CORRECTION
/// use ffmpeg libswresample API (autodected, Makefile)
/// use ffmpeg libswresample API (autodected, Makefile)
#define noUSE_SWRESAMPLE
/// use libav libavresample API (autodected, Makefile)
/// use libav libavresample API (autodected, Makefile)
#define noUSE_AVRESAMPLE
#include <stdio.h>
@ -89,15 +88,15 @@
// Global
//----------------------------------------------------------------------------
///
/// ffmpeg lock mutex
///
/// new ffmpeg dislikes simultanous open/close
/// this breaks our code, until this is fixed use lock.
///
///
/// ffmpeg lock mutex
///
/// new ffmpeg dislikes simultanous open/close
/// this breaks our code, until this is fixed use lock.
///
static pthread_mutex_t CodecLockMutex;
/// Flag prefer fast channel switch
/// Flag prefer fast channel switch
char CodecUsePossibleDefectFrames;
AVBufferRef *hw_device_ctx;
@ -155,7 +154,7 @@ static enum AVPixelFormat Codec_get_format(AVCodecContext * video_ctx, const enu
}
//static void Codec_free_buffer(void *opaque, uint8_t *data);
// static void Codec_free_buffer(void *opaque, uint8_t *data);
/**
** Video buffer management, get buffer for frame.
@ -174,8 +173,8 @@ static int Codec_get_buffer2(AVCodecContext * video_ctx, AVFrame * frame, int fl
if (!decoder->GetFormatDone) { // get_format missing
enum AVPixelFormat fmts[2];
// fprintf(stderr, "codec: buggy libav, use ffmpeg\n");
// Warning(_("codec: buggy libav, use ffmpeg\n"));
// fprintf(stderr, "codec: buggy libav, use ffmpeg\n");
// Warning(_("codec: buggy libav, use ffmpeg\n"));
fmts[0] = video_ctx->pix_fmt;
fmts[1] = AV_PIX_FMT_NONE;
Codec_get_format(video_ctx, fmts);
@ -183,11 +182,11 @@ static int Codec_get_buffer2(AVCodecContext * video_ctx, AVFrame * frame, int fl
#if 0
if (decoder->hwaccel_get_buffer && (AV_PIX_FMT_VDPAU == decoder->hwaccel_pix_fmt
|| AV_PIX_FMT_CUDA == decoder->hwaccel_pix_fmt || AV_PIX_FMT_VAAPI == decoder->hwaccel_pix_fmt)) {
//Debug(3,"hwaccel get_buffer\n");
// Debug(3,"hwaccel get_buffer\n");
return decoder->hwaccel_get_buffer(video_ctx, frame, flags);
}
#endif
//Debug(3, "codec: fallback to default get_buffer\n");
// Debug(3, "codec: fallback to default get_buffer\n");
return avcodec_default_get_buffer2(video_ctx, frame, flags);
}
@ -368,12 +367,12 @@ void CodecVideoOpen(VideoDecoder * decoder, int codec_id)
//decoder->VideoCtx->debug = FF_DEBUG_STARTCODE;
//decoder->VideoCtx->err_recognition |= AV_EF_EXPLODE;
// av_log_set_level(AV_LOG_DEBUG);
// av_log_set_level(AV_LOG_DEBUG);
av_log_set_level(0);
decoder->VideoCtx->get_format = Codec_get_format;
decoder->VideoCtx->get_buffer2 = Codec_get_buffer2;
// decoder->VideoCtx->active_thread_type = 0;
// decoder->VideoCtx->active_thread_type = 0;
decoder->VideoCtx->draw_horiz_band = NULL;
decoder->VideoCtx->hwaccel_context = VideoGetHwAccelContext(decoder->HwDecoder);
@ -403,7 +402,7 @@ void CodecVideoClose(VideoDecoder * video_decoder)
AVFrame *frame;
// FIXME: play buffered data
// av_frame_free(&video_decoder->Frame); // callee does checks
// av_frame_free(&video_decoder->Frame); // callee does checks
Debug(3, "CodecVideoClose\n");
if (video_decoder->VideoCtx) {
@ -533,10 +532,10 @@ void CodecVideoDecode(VideoDecoder * decoder, const AVPacket * avpkt)
pkt = avpkt; // use copy
got_frame = 0;
// printf("decode packet %d\n",(GetusTicks()-first_time)/1000000);
// printf("decode packet %d\n",(GetusTicks()-first_time)/1000000);
ret1 = avcodec_send_packet(video_ctx, pkt);
// first_time = GetusTicks();
// first_time = GetusTicks();
if (ret1 >= 0) {
consumed = 1;
@ -545,7 +544,7 @@ void CodecVideoDecode(VideoDecoder * decoder, const AVPacket * avpkt)
if (!CuvidTestSurfaces())
usleep(1000);
//printf("send packet to decode %s\n",consumed?"ok":"Full");
// printf("send packet to decode %s\n",consumed?"ok":"Full");
if ((ret1 == AVERROR(EAGAIN) || ret1 == AVERROR_EOF || ret1 >= 0) && CuvidTestSurfaces()) {
ret = 0;
@ -557,7 +556,7 @@ void CodecVideoDecode(VideoDecoder * decoder, const AVPacket * avpkt)
} else {
got_frame = 0;
}
// printf("got %s packet from decoder\n",got_frame?"1":"no");
// printf("got %s packet from decoder\n",got_frame?"1":"no");
if (got_frame) { // frame completed
#ifdef YADIF
if (decoder->filter) {
@ -572,24 +571,24 @@ void CodecVideoDecode(VideoDecoder * decoder, const AVPacket * avpkt)
}
if (frame->interlaced_frame && decoder->filter == 2 && (frame->height != 720)) { // broken ZDF sends Interlaced flag
ret = push_filters(video_ctx, decoder->HwDecoder, frame);
// av_frame_unref(frame);
// av_frame_unref(frame);
continue;
}
}
#endif
//DisplayPts(video_ctx, frame);
// DisplayPts(video_ctx, frame);
VideoRenderFrame(decoder->HwDecoder, video_ctx, frame);
// av_frame_unref(frame);
// av_frame_unref(frame);
} else {
av_frame_free(&frame);
// printf("codec: got no frame %d send %d\n",ret,ret1);
// printf("codec: got no frame %d send %d\n",ret,ret1);
}
}
if (!CuvidTestSurfaces()) {
usleep(1000);
}
} else {
// consumed = 1;
// consumed = 1;
}
if (!consumed) {
@ -694,9 +693,9 @@ static char CodecAudioDrift; ///< flag: enable audio-drift correction
static const int CodecAudioDrift = 0;
#endif
#ifdef USE_PASSTHROUGH
///
/// Pass-through flags: CodecPCM, CodecAC3, CodecEAC3, ...
///
///
/// Pass-through flags: CodecPCM, CodecAC3, CodecEAC3, ...
///
static char CodecPassthrough;
#else
static const int CodecPassthrough = 0;
@ -745,7 +744,7 @@ void CodecAudioOpen(AudioDecoder * audio_decoder, int codec_id)
Debug(3, "codec: using audio codec ID %#06x (%s)\n", codec_id, avcodec_get_name(codec_id));
if (!(audio_codec = avcodec_find_decoder(codec_id))) {
// if (!(audio_codec = avcodec_find_decoder(codec_id))) {
// if (!(audio_codec = avcodec_find_decoder(codec_id))) {
Fatal(_("codec: codec ID %#06x not found\n"), codec_id);
// FIXME: errors aren't fatal
}
@ -765,9 +764,9 @@ void CodecAudioOpen(AudioDecoder * audio_decoder, int codec_id)
av_dict = NULL;
// FIXME: import settings
//av_dict_set(&av_dict, "dmix_mode", "0", 0);
//av_dict_set(&av_dict, "ltrt_cmixlev", "1.414", 0);
//av_dict_set(&av_dict, "loro_cmixlev", "1.414", 0);
// av_dict_set(&av_dict, "dmix_mode", "0", 0);
// av_dict_set(&av_dict, "ltrt_cmixlev", "1.414", 0);
// av_dict_set(&av_dict, "loro_cmixlev", "1.414", 0);
if (avcodec_open2(audio_decoder->AudioCtx, audio_codec, &av_dict) < 0) {
pthread_mutex_unlock(&CodecLockMutex);
Fatal(_("codec: can't open audio codec\n"));
@ -1364,12 +1363,12 @@ int myavcodec_decode_audio3(AVCodecContext * avctx, int16_t * samples, int *fram
#if 0
ret = avcodec_decode_audio4(avctx, frame, &got_frame, avpkt);
#else
// SUGGESTION
// Now that avcodec_decode_audio4 is deprecated and replaced
// by 2 calls (receive frame and send packet), this could be optimized
// into separate routines or separate threads.
// Also now that it always consumes a whole buffer some code
// in the caller may be able to be optimized.
// SUGGESTION
// Now that avcodec_decode_audio4 is deprecated and replaced
// by 2 calls (receive frame and send packet), this could be optimized
// into separate routines or separate threads.
// Also now that it always consumes a whole buffer some code
// in the caller may be able to be optimized.
ret = avcodec_receive_frame(avctx, frame);
if (ret == 0)
got_frame = 1;
@ -1380,7 +1379,7 @@ int myavcodec_decode_audio3(AVCodecContext * avctx, int16_t * samples, int *fram
if (ret == AVERROR(EAGAIN))
ret = 0;
else if (ret < 0) {
// Debug(3, "codec/audio: audio decode error: %1 (%2)\n",av_make_error_string(error, sizeof(error), ret),got_frame);
// Debug(3, "codec/audio: audio decode error: %1 (%2)\n",av_make_error_string(error, sizeof(error), ret),got_frame);
return ret;
} else
ret = avpkt->size;
@ -1401,7 +1400,7 @@ int myavcodec_decode_audio3(AVCodecContext * avctx, int16_t * samples, int *fram
samples = (char *)samples + data_size;
}
}
//Debug(3,"data_size %d nb_samples %d sample_fmt %d channels %d planar %d\n",data_size,frame->nb_samples,avctx->sample_fmt,avctx->channels,planar);
// Debug(3,"data_size %d nb_samples %d sample_fmt %d channels %d planar %d\n",data_size,frame->nb_samples,avctx->sample_fmt,avctx->channels,planar);
*frame_size_ptr = data_size * avctx->channels * frame->nb_samples;
} else {
*frame_size_ptr = 0;

50
codec.h
View File

@ -60,10 +60,10 @@ struct _video_decoder_
int GetFormatDone; ///< flag get format called!
AVCodec *VideoCodec; ///< video codec
AVCodecContext *VideoCtx; ///< video codec context
//#ifdef FFMPEG_WORKAROUND_ARTIFACTS
// #ifdef FFMPEG_WORKAROUND_ARTIFACTS
int FirstKeyFrame; ///< flag first frame
//#endif
// AVFrame *Frame; ///< decoded video frame
// #endif
// AVFrame *Frame; ///< decoded video frame
int filter; // flag for deint filter
@ -94,82 +94,82 @@ struct _video_decoder_
// Typedefs
//----------------------------------------------------------------------------
/// Video decoder typedef.
/// Video decoder typedef.
typedef struct _video_decoder_ VideoDecoder;
/// Audio decoder typedef.
/// Audio decoder typedef.
typedef struct _audio_decoder_ AudioDecoder;
//----------------------------------------------------------------------------
// Variables
//----------------------------------------------------------------------------
/// x11 display name
/// x11 display name
extern const char *X11DisplayName;
/// HW device context from video module
/// HW device context from video module
extern AVBufferRef *HwDeviceContext;
//----------------------------------------------------------------------------
// Variables
//----------------------------------------------------------------------------
/// Flag prefer fast xhannel switch
/// Flag prefer fast xhannel switch
extern char CodecUsePossibleDefectFrames;
//----------------------------------------------------------------------------
// Prototypes
//----------------------------------------------------------------------------
/// Allocate a new video decoder context.
/// Allocate a new video decoder context.
extern VideoDecoder *CodecVideoNewDecoder(VideoHwDecoder *);
/// Deallocate a video decoder context.
/// Deallocate a video decoder context.
extern void CodecVideoDelDecoder(VideoDecoder *);
/// Open video codec.
/// Open video codec.
extern void CodecVideoOpen(VideoDecoder *, int);
/// Close video codec.
/// Close video codec.
extern void CodecVideoClose(VideoDecoder *);
/// Decode a video packet.
/// Decode a video packet.
extern void CodecVideoDecode(VideoDecoder *, const AVPacket *);
/// Flush video buffers.
/// Flush video buffers.
extern void CodecVideoFlushBuffers(VideoDecoder *);
/// Allocate a new audio decoder context.
/// Allocate a new audio decoder context.
extern AudioDecoder *CodecAudioNewDecoder(void);
/// Deallocate an audio decoder context.
/// Deallocate an audio decoder context.
extern void CodecAudioDelDecoder(AudioDecoder *);
/// Open audio codec.
/// Open audio codec.
extern void CodecAudioOpen(AudioDecoder *, int);
/// Close audio codec.
/// Close audio codec.
extern void CodecAudioClose(AudioDecoder *);
/// Set audio drift correction.
/// Set audio drift correction.
extern void CodecSetAudioDrift(int);
/// Set audio pass-through.
/// Set audio pass-through.
extern void CodecSetAudioPassthrough(int);
/// Set audio downmix.
/// Set audio downmix.
extern void CodecSetAudioDownmix(int);
/// Decode an audio packet.
/// Decode an audio packet.
extern void CodecAudioDecode(AudioDecoder *, const AVPacket *);
/// Flush audio buffers.
/// Flush audio buffers.
extern void CodecAudioFlushBuffers(AudioDecoder *);
/// Setup and initialize codec module.
/// Setup and initialize codec module.
extern void CodecInit(void);
/// Cleanup and exit codec module.
/// Cleanup and exit codec module.
extern void CodecExit(void);
/// @}

View File

@ -86,7 +86,7 @@ void mpgl_load_functions2(GL * gl, void *(*get_fn)(void *ctx, const char *n), vo
typedef void (GLAPIENTRY * MP_GLDEBUGPROC) (GLenum, GLenum, GLuint, GLenum, GLsizei, const GLchar *, const void *);
//function pointers loaded from the OpenGL library
// function pointers loaded from the OpenGL library
struct GL
{
int version; // MPGL_VER() mangled (e.g. 210 for 2.1)

View File

@ -294,14 +294,14 @@ bool cShader::Compile(const char *vertexCode, const char *fragmentCode) {
sVertex = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(sVertex, 1, &vertexCode, NULL);
glCompileShader(sVertex);
// esyslog("[softhddev]:SHADER:VERTEX %s\n",vertexCode);
// esyslog("[softhddev]:SHADER:VERTEX %s\n",vertexCode);
if (!CheckCompileErrors(sVertex))
return false;
// Fragment Shader
sFragment = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(sFragment, 1, &fragmentCode, NULL);
glCompileShader(sFragment);
// esyslog("[softhddev]:SHADER:FRAGMENT %s\n",fragmentCode);
// esyslog("[softhddev]:SHADER:FRAGMENT %s\n",fragmentCode);
if (!CheckCompileErrors(sFragment))
return false;
// link Program
@ -375,8 +375,8 @@ void cOglGlyph::BindTexture(void) {
void cOglGlyph::LoadTexture(FT_BitmapGlyph ftGlyph) {
// Disable byte-alignment restriction
#ifdef VAAPI
OSD_release_context();
OSD_get_shared_context();
OSD_release_context();
OSD_get_shared_context();
#endif
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glGenTextures(1, &texture);
@ -400,8 +400,8 @@ void cOglGlyph::LoadTexture(FT_BitmapGlyph ftGlyph) {
glBindTexture(GL_TEXTURE_2D, 0);
glPixelStorei(GL_UNPACK_ALIGNMENT, 4);
#ifdef VAAPI
OSD_release_context();
OSD_get_context();
OSD_release_context();
OSD_get_context();
#endif
}
@ -559,18 +559,18 @@ cOglFb::cOglFb(GLint width, GLint height, GLint viewPortWidth, GLint viewPortHei
}
cOglFb::~cOglFb(void) {
if (texture)
glDeleteTextures(1, &texture);
if (fb)
glDeleteFramebuffers(1, &fb);
if (texture)
glDeleteTextures(1, &texture);
if (fb)
glDeleteFramebuffers(1, &fb);
}
bool cOglFb::Init(void) {
initiated = true;
#ifdef VAAPI
OSD_release_context();
OSD_get_shared_context();
OSD_release_context();
OSD_get_shared_context();
#endif
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
@ -588,14 +588,14 @@ bool cOglFb::Init(void) {
if(glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) {
esyslog("[softhddev]ERROR: %d Framebuffer is not complete!\n",__LINE__);
#ifdef VAAPI
OSD_release_context();
OSD_get_context();
OSD_release_context();
OSD_get_context();
#endif
return false;
}
#ifdef VAAPI
OSD_release_context();
OSD_get_context();
OSD_release_context();
OSD_get_context();
#endif
return true;
}
@ -636,24 +636,24 @@ void cOglFb::Blit(GLint destX1, GLint destY1, GLint destX2, GLint destY2) {
* cOglOutputFb
****************************************************************************************/
cOglOutputFb::cOglOutputFb(GLint width, GLint height) : cOglFb(width, height, width, height) {
// surface = 0;
initiated = false;
// surface = 0;
initiated = false;
fb = 0;
texture = 0;
}
cOglOutputFb::~cOglOutputFb(void) {
// glVDPAUUnregisterSurfaceNV(surface);
glDeleteTextures(1, &texture);
// glVDPAUUnregisterSurfaceNV(surface);
glDeleteTextures(1, &texture);
glDeleteFramebuffers(1, &fb);
}
bool cOglOutputFb::Init(void) {
initiated = true;
initiated = true;
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_BORDER);
@ -670,7 +670,7 @@ bool cOglOutputFb::Init(void) {
}
void cOglOutputFb::BindWrite(void) {
if (!initiated)
if (!initiated)
Init();
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, fb);
}
@ -700,35 +700,35 @@ cOglVb::~cOglVb(void) {
bool cOglVb::Init(void) {
if (type == vbTexture) {
//Texture VBO definition
// Texture VBO definition
sizeVertex1 = 2;
sizeVertex2 = 2;
numVertices = 6;
drawMode = GL_TRIANGLES;
shader = stTexture;
} else if (type == vbRect) {
//Rectangle VBO definition
// Rectangle VBO definition
sizeVertex1 = 2;
sizeVertex2 = 0;
numVertices = 4;
drawMode = GL_TRIANGLE_FAN;
shader = stRect;
} else if (type == vbEllipse) {
//Ellipse VBO definition
// Ellipse VBO definition
sizeVertex1 = 2;
sizeVertex2 = 0;
numVertices = 182;
drawMode = GL_TRIANGLE_FAN;
shader = stRect;
} else if (type == vbSlope) {
//Slope VBO definition
// Slope VBO definition
sizeVertex1 = 2;
sizeVertex2 = 0;
numVertices = 102;
drawMode = GL_TRIANGLE_FAN;
shader = stRect;
} else if (type == vbText) {
//Text VBO definition
// Text VBO definition
sizeVertex1 = 2;
sizeVertex2 = 2;
numVertices = 6;
@ -839,8 +839,8 @@ cOglCmdDeleteFb::cOglCmdDeleteFb(cOglFb *fb) : cOglCmd(fb) {
}
bool cOglCmdDeleteFb::Execute(void) {
if (fb)
delete fb;
if (fb)
delete fb;
return true;
}
@ -889,7 +889,7 @@ bool cOglCmdRenderFbToBufferFb::Execute(void) {
buffer->Bind();
if (!fb->BindTexture())
return false;
VertexBuffers[vbTexture]->Bind();
VertexBuffers[vbTexture]->Bind();
VertexBuffers[vbTexture]->SetVertexData(quadVertices);
VertexBuffers[vbTexture]->DrawArrays();
VertexBuffers[vbTexture]->Unbind();
@ -907,32 +907,32 @@ cOglCmdCopyBufferToOutputFb::cOglCmdCopyBufferToOutputFb(cOglFb *fb, cOglOutputF
#ifdef PLACEBO
//extern "C" {
extern unsigned char *posd;
extern unsigned char *posd;
//}
#endif
bool cOglCmdCopyBufferToOutputFb::Execute(void) {
int i;
int i;
pthread_mutex_lock(&OSDMutex);
fb->BindRead();
oFb->BindWrite();
glClear(GL_COLOR_BUFFER_BIT);
glClear(GL_COLOR_BUFFER_BIT);
#ifdef PLACEBO
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glPixelStorei(GL_PACK_ALIGNMENT, 1);
if (posd)
glReadPixels(0, 0 ,fb->Width(), fb->Height(),GL_BGRA,GL_UNSIGNED_BYTE,posd);
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glPixelStorei(GL_PACK_ALIGNMENT, 1);
if (posd)
glReadPixels(0, 0 ,fb->Width(), fb->Height(),GL_BGRA,GL_UNSIGNED_BYTE,posd);
#else
fb->Blit(x, y + fb->Height(), x + fb->Width(), y);
glFlush();
fb->Blit(x, y + fb->Height(), x + fb->Width(), y);
glFlush();
#endif
ActivateOsd(oFb->texture,x, y, fb->Width() ,fb->Height());
ActivateOsd(oFb->texture,x, y, fb->Width() ,fb->Height());
oFb->Unbind();
pthread_mutex_unlock(&OSDMutex);
oFb->Unbind();
pthread_mutex_unlock(&OSDMutex);
return true;
}
@ -946,7 +946,7 @@ bool cOglCmdFill::Execute(void) {
glm::vec4 col;
ConvertColor(color, col);
fb->Bind();
glClearColor(col.r, col.g, col.b, col.a);
glClearColor(col.r, col.g, col.b, col.a);
glClear(GL_COLOR_BUFFER_BIT);
fb->Unbind();
return true;
@ -1027,7 +1027,7 @@ bool cOglCmdDrawEllipse::Execute(void) {
VertexBuffers[vbEllipse]->SetShaderColor(color);
VertexBuffers[vbEllipse]->SetShaderProjectionMatrix(fb->Width(), fb->Height());
//not antialiased
// not antialiased
fb->Bind();
VertexBuffers[vbEllipse]->DisableBlending();
VertexBuffers[vbEllipse]->Bind();
@ -1245,7 +1245,7 @@ bool cOglCmdDrawSlope::Execute(void) {
VertexBuffers[vbSlope]->SetShaderColor(color);
VertexBuffers[vbSlope]->SetShaderProjectionMatrix(fb->Width(), fb->Height());
//not antialiased
// not antialiased
fb->Bind();
VertexBuffers[vbSlope]->DisableBlending();
VertexBuffers[vbSlope]->Bind();
@ -1356,8 +1356,8 @@ cOglCmdDrawImage::~cOglCmdDrawImage(void) {
bool cOglCmdDrawImage::Execute(void) {
GLuint texture;
#ifdef VAAPI
OSD_release_context();
OSD_get_shared_context();
OSD_release_context();
OSD_get_shared_context();
#endif
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
@ -1378,8 +1378,8 @@ bool cOglCmdDrawImage::Execute(void) {
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glBindTexture(GL_TEXTURE_2D, 0);
#ifdef VAAPI
OSD_release_context();
OSD_get_context();
OSD_release_context();
OSD_get_context();
#endif
GLfloat x1 = x; //left
@ -1471,8 +1471,8 @@ cOglCmdStoreImage::~cOglCmdStoreImage(void) {
bool cOglCmdStoreImage::Execute(void) {
#ifdef VAAPI
OSD_release_context();
OSD_get_shared_context();
OSD_release_context();
OSD_get_shared_context();
#endif
glGenTextures(1, &imageRef->texture);
glBindTexture(GL_TEXTURE_2D, imageRef->texture);
@ -1493,8 +1493,8 @@ bool cOglCmdStoreImage::Execute(void) {
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glBindTexture(GL_TEXTURE_2D, 0);
#ifdef VAAPI
OSD_release_context();
OSD_get_context();
OSD_release_context();
OSD_get_context();
#endif
return true;
}
@ -1714,9 +1714,9 @@ void cOglThread::Action(void) {
cOglCmd* cmd = commands.front();
commands.pop();
Unlock();
//uint64_t start = cTimeMs::Now();
// uint64_t start = cTimeMs::Now();
cmd->Execute();
//esyslog("[softhddev]\"%s\", %dms, %d commands left, time %" PRIu64 "", cmd->Description(), (int)(cTimeMs::Now() - start), commands.size(), cTimeMs::Now());
// esyslog("[softhddev]\"%s\", %dms, %d commands left, time %" PRIu64 "", cmd->Description(), (int)(cTimeMs::Now() - start), commands.size(), cTimeMs::Now());
delete cmd;
if (stalled && commands.size() < OGL_CMDQUEUE_SIZE / 2)
stalled = false;
@ -1770,8 +1770,8 @@ bool cOglThread::InitOpenGL(void) {
}
#else
if (!GlxInitopengl())
return false;
if (!GlxInitopengl())
return false;
#endif
VertexBuffers[vbText]->EnableBlending();
glDisable(GL_DEPTH_TEST);
@ -1823,21 +1823,21 @@ void cOglThread::DeleteVertexBuffers(void) {
}
void cOglThread::Cleanup(void) {
esyslog("[softhddev]OglThread cleanup\n");
pthread_mutex_lock(&OSDMutex);
OsdClose();
esyslog("[softhddev]OglThread cleanup\n");
pthread_mutex_lock(&OSDMutex);
OsdClose();
DeleteVertexBuffers();
delete cOglOsd::oFb;
cOglOsd::oFb = NULL;
DeleteShaders();
// glVDPAUFiniNV();
// glVDPAUFiniNV();
cOglFont::Cleanup();
#ifdef PLACEBO
glutExit();
#endif
pthread_mutex_unlock(&OSDMutex);
pthread_mutex_unlock(&OSDMutex);
}
/****************************************************************************************
@ -2080,37 +2080,37 @@ cOglOsd::cOglOsd(int Left, int Top, uint Level, std::shared_ptr<cOglThread> oglT
pthread_mutex_lock(&OSDMutex);
VideoGetOsdSize(&osdWidth, &osdHeight);
// osdWidth = 1920;
// osdHeight = 1080;
// osdWidth = 1920;
// osdHeight = 1080;
dsyslog("[softhddev]cOglOsd osdLeft %d osdTop %d screenWidth %d screenHeight %d", Left, Top, osdWidth, osdHeight);
#ifdef PLACEBO
if (posd)
free(posd);
posd = MALLOC(unsigned char, osdWidth * osdHeight * 4);
if (posd)
free(posd);
posd = MALLOC(unsigned char, osdWidth * osdHeight * 4);
#endif
//create output framebuffer
// create output framebuffer
#ifdef VAAPI
OSD_release_context();
OSD_get_shared_context();
OSD_release_context();
OSD_get_shared_context();
#endif
if (!oFb) {
oFb = new cOglOutputFb(osdWidth, osdHeight);
oglThread->DoCmd(new cOglCmdInitOutputFb(oFb));
}
#ifdef VAAPI
OSD_release_context();
OSD_release_context();
#endif
pthread_mutex_unlock(&OSDMutex);
pthread_mutex_unlock(&OSDMutex);
}
cOglOsd::~cOglOsd() {
OsdClose();
SetActive(false);
#ifdef PLACEBO
if (posd)
free(posd);
posd = 0;
if (posd)
free(posd);
posd = 0;
#endif
oglThread->DoCmd(new cOglCmdDeleteFb(bFb));
}
@ -2124,7 +2124,7 @@ eOsdError cOglOsd::SetAreas(const tArea *Areas, int NumAreas) {
tArea area = { r.Left(), r.Top(), r.Right(), r.Bottom(), 32 };
//now we know the actuaL osd size, create double buffer frame buffer
// now we know the actuaL osd size, create double buffer frame buffer
if (bFb) {
oglThread->DoCmd(new cOglCmdDeleteFb(bFb));
DestroyPixmap(oglPixmaps[0]);
@ -2154,11 +2154,11 @@ cPixmap *cOglOsd::CreatePixmap(int Layer, const cRect &ViewPort, const cRect &Dr
cOglPixmap *p = new cOglPixmap(oglThread, Layer, ViewPort, DrawPort);
if (cOsd::AddPixmap(p)) {
//find free slot
// find free slot
for (int i = 0; i < oglPixmaps.Size(); i++)
if (!oglPixmaps[i])
return oglPixmaps[i] = p;
//append at end
// append at end
oglPixmaps.Append(p);
return p;
}
@ -2190,21 +2190,21 @@ void cOglOsd::Flush(void) {
if (!oglThread->Active())
return;
LOCK_PIXMAPS;
//check if any pixmap is dirty
// check if any pixmap is dirty
bool dirty = false;
for (int i = 0; i < oglPixmaps.Size() && !dirty; i++)
if (oglPixmaps[i] && oglPixmaps[i]->Layer() >= 0 && oglPixmaps[i]->IsDirty())
dirty = true;
if (!dirty)
return;
//clear buffer
//uint64_t start = cTimeMs::Now();
//dsyslog("[softhddev]Start Flush at %" PRIu64 "", cTimeMs::Now());
// clear buffer
// uint64_t start = cTimeMs::Now();
// dsyslog("[softhddev]Start Flush at %" PRIu64 "", cTimeMs::Now());
oglThread->DoCmd(new cOglCmdFill(bFb, clrTransparent));
//render pixmap textures blended to buffer
// render pixmap textures blended to buffer
for (int layer = 0; layer < MAXPIXMAPLAYERS; layer++) {
for (int i = 0; i < oglPixmaps.Size(); i++) {
if (oglPixmaps[i]) {
@ -2223,7 +2223,7 @@ void cOglOsd::Flush(void) {
}
oglThread->DoCmd(new cOglCmdCopyBufferToOutputFb(bFb, oFb, Left(), Top()));
//dsyslog("[softhddev]End Flush at %" PRIu64 ", duration %d", cTimeMs::Now(), (int)(cTimeMs::Now()-start));
// dsyslog("[softhddev]End Flush at %" PRIu64 ", duration %d", cTimeMs::Now(), (int)(cTimeMs::Now()-start));
}
void cOglOsd::DrawScaledBitmap(int x, int y, const cBitmap &Bitmap, double FactorX, double FactorY, bool AntiAlias) {

View File

@ -202,8 +202,8 @@ class cOglFb
{
protected:
bool initiated;
// GLuint fb;
// GLuint texture;
// GLuint fb;
// GLuint texture;
GLint width, height;
GLint viewPortWidth, viewPortHeight;
bool scrollable;

View File

@ -33,7 +33,7 @@
#include "iatomic.h"
#include "ringbuffer.h"
/// ring buffer structure
/// ring buffer structure
struct _ring_buffer_
{
char *Buffer; ///< ring buffer data

View File

@ -23,40 +23,40 @@
/// @addtogroup Ringbuffer
/// @{
/// ring buffer typedef
/// ring buffer typedef
typedef struct _ring_buffer_ RingBuffer;
/// reset ring buffer pointers
/// reset ring buffer pointers
extern void RingBufferReset(RingBuffer *);
/// create new ring buffer
/// create new ring buffer
extern RingBuffer *RingBufferNew(size_t);
/// free ring buffer
/// free ring buffer
extern void RingBufferDel(RingBuffer *);
/// write into ring buffer
/// write into ring buffer
extern size_t RingBufferWrite(RingBuffer *, const void *, size_t);
/// get write pointer of ring buffer
/// get write pointer of ring buffer
extern size_t RingBufferGetWritePointer(RingBuffer *, void **);
/// advance write pointer of ring buffer
/// advance write pointer of ring buffer
extern size_t RingBufferWriteAdvance(RingBuffer *, size_t);
/// read from ring buffer
/// read from ring buffer
extern size_t RingBufferRead(RingBuffer *, void *, size_t);
/// get read pointer of ring buffer
/// get read pointer of ring buffer
extern size_t RingBufferGetReadPointer(RingBuffer *, const void **);
/// advance read pointer of ring buffer
/// advance read pointer of ring buffer
extern size_t RingBufferReadAdvance(RingBuffer *, size_t);
/// free bytes ring buffer
/// free bytes ring buffer
extern size_t RingBufferFreeBytes(RingBuffer *);
/// used bytes ring buffer
/// used bytes ring buffer
extern size_t RingBufferUsedBytes(RingBuffer *);
/// @}

View File

@ -1,4 +1,3 @@
// shader
#ifdef CUVID
char vertex_osd[] = { "\

View File

@ -89,4 +89,3 @@ color.a = 1.0;
// color mapping
out_color = color;
}

File diff suppressed because it is too large Load Diff

View File

@ -90,7 +90,7 @@ static enum AVCodecID AudioCodecID; ///< current codec id
static int AudioChannelID; ///< current audio channel id
static VideoStream *AudioSyncStream; ///< video stream for audio/video sync
/// Minimum free space in audio buffer 8 packets for 8 channels
/// Minimum free space in audio buffer 8 packets for 8 channels
#define AUDIO_MIN_BUFFER_FREE (3072 * 8 * 8)
#define AUDIO_BUFFER_SIZE (512 * 1024) ///< audio PES buffer default size
static AVPacket AudioAvPkt[1]; ///< audio a/v packet
@ -884,9 +884,9 @@ static void PesParse(PesDemux * pesdx, const uint8_t * data, int size, int is_st
// Transport stream demux
//////////////////////////////////////////////////////////////////////////////
/// Transport stream packet size
/// Transport stream packet size
#define TS_PACKET_SIZE 188
/// Transport stream packet sync byte
/// Transport stream packet sync byte
#define TS_PACKET_SYNC 0x47
///
@ -962,11 +962,11 @@ static int TsDemuxer(TsDemux * tsdx, const uint8_t * data, int size)
#if 0
int tmp;
// check continuity
// check continuity
tmp = p[3] & 0x0F; // continuity counter
if (((tsdx->CC + 1) & 0x0F) != tmp) {
Debug(3, "tsdemux: OUT OF SYNC: %d %d\n", tmp, tsdx->CC);
//TS discontinuity (received 8, expected 0) for PID
// TS discontinuity (received 8, expected 0) for PID
}
tsdx->CC = tmp;
#endif
@ -1049,7 +1049,7 @@ int PlayAudio(const uint8_t * data, int size, uint8_t id)
AudioAvPkt->pts =
(int64_t) (data[9] & 0x0E) << 29 | data[10] << 22 | (data[11] & 0xFE) << 14 | data[12] << 7 | (data[13] &
0xFE) >> 1;
//Debug(3, "audio: pts %#012" PRIx64 "\n", AudioAvPkt->pts);
// Debug(3, "audio: pts %#012" PRIx64 "\n", AudioAvPkt->pts);
}
if (0) { // dts is unused
if (data[7] & 0x40) {
@ -1256,7 +1256,7 @@ int PlayTsAudio(const uint8_t * data, int size)
Debug(3, "AudioDelay %dms\n", AudioDelay);
usleep(AudioDelay * 1000);
AudioDelay = 0;
// TsDemuxer(tsdx, data, size); // insert dummy audio
// TsDemuxer(tsdx, data, size); // insert dummy audio
}
return TsDemuxer(tsdx, data, size);
@ -1333,7 +1333,7 @@ static VideoStream PipVideoStream[1]; ///< pip video stream
uint32_t VideoSwitch; ///< debug video switch ticks
static int VideoMaxPacketSize; ///< biggest used packet buffer
#endif
//#define STILL_DEBUG 2
// #define STILL_DEBUG 2
#ifdef STILL_DEBUG
static char InStillPicture; ///< flag still picture
#endif
@ -1394,7 +1394,7 @@ static void VideoEnqueue(VideoStream * stream, int64_t pts, int64_t dts, const v
{
AVPacket *avpkt;
// Debug(3, "video: enqueue %d\n", size);
// Debug(3, "video: enqueue %d\n", size);
avpkt = &stream->PacketRb[stream->PacketWrite];
if (!avpkt->stream_index) { // add pts only for first added
@ -1404,8 +1404,8 @@ static void VideoEnqueue(VideoStream * stream, int64_t pts, int64_t dts, const v
if (avpkt->stream_index + size >= avpkt->size) {
// Warning(_("video: packet buffer too small for %d\n"),
// avpkt->stream_index + size);
// Warning(_("video: packet buffer too small for %d\n"),
// avpkt->stream_index + size);
// new + grow reserves FF_INPUT_BUFFER_PADDING_SIZE
av_grow_packet(avpkt, ((size + VIDEO_BUFFER_SIZE / 2)
@ -1479,7 +1479,7 @@ static void VideoNextPacket(VideoStream * stream, int codec_id)
memset(avpkt->data + avpkt->stream_index, 0, AV_INPUT_BUFFER_PADDING_SIZE);
stream->CodecIDRb[stream->PacketWrite] = codec_id;
//DumpH264(avpkt->data, avpkt->stream_index);
// DumpH264(avpkt->data, avpkt->stream_index);
// advance packet write
stream->PacketWrite = (stream->PacketWrite + 1) % VIDEO_PACKET_MAX;
@ -1618,7 +1618,7 @@ static void VideoMpegEnqueue(VideoStream * stream, int64_t pts, int64_t dts, con
continue;
}
if (!p[0] && !p[1] && p[2] == 0x01 && p[3] == 0xb3) {
// printf("aspectratio %02x\n",p[7]>>4);
// printf("aspectratio %02x\n",p[7]>>4);
}
--n;
++p;
@ -1868,7 +1868,7 @@ int VideoDecodeInput(VideoStream * stream)
}
filled = atomic_read(&stream->PacketsFilled);
// printf("Packets in Decode %d\n",filled);
// printf("Packets in Decode %d\n",filled);
if (!filled) {
return -1;
}
@ -1941,8 +1941,8 @@ int VideoDecodeInput(VideoStream * stream)
avpkt->stream_index = 0;
#ifdef USE_PIP
//fprintf(stderr, "[");
//DumpMpeg(avpkt->data, avpkt->size);
// fprintf(stderr, "[");
// DumpMpeg(avpkt->data, avpkt->size);
#ifdef STILL_DEBUG
if (InStillPicture) {
DumpMpeg(avpkt->data, avpkt->size);
@ -1954,7 +1954,7 @@ int VideoDecodeInput(VideoStream * stream)
CodecVideoDecode(stream->Decoder, avpkt);
}
pthread_mutex_unlock(&stream->DecoderLockMutex);
//fprintf(stderr, "]\n");
// fprintf(stderr, "]\n");
#else
// old version
if (stream->LastCodecID == AV_CODEC_ID_MPEG2VIDEO) {
@ -2193,7 +2193,7 @@ int PlayVideo3(VideoStream * stream, const uint8_t * data, int size)
}
// hard limit buffer full: needed for replay
if (atomic_read(&stream->PacketsFilled) >= VIDEO_PACKET_MAX - 10) {
// Debug(3, "video: video buffer full\n");
// Debug(3, "video: video buffer full\n");
return 0;
}
#ifdef USE_SOFTLIMIT
@ -2225,7 +2225,7 @@ int PlayVideo3(VideoStream * stream, const uint8_t * data, int size)
z = 0;
while (!*check) { // count leading zeros
if (l < 3) {
// Warning(_("[softhddev] empty video packet %d bytes\n"), size);
// Warning(_("[softhddev] empty video packet %d bytes\n"), size);
z = 0;
break;
}
@ -2372,7 +2372,7 @@ int PlayVideo(const uint8_t * data, int size)
return PlayVideo3(MyVideoStream, data, size);
}
/// call VDR support function
/// call VDR support function
extern uint8_t *CreateJpeg(uint8_t *, int *, int, int, int);
#if defined(USE_JPEG) && JPEG_LIB_VERSION >= 80
@ -3033,7 +3033,7 @@ static void StartXServer(void)
int maxfd;
int fd;
// X server
// X server
if (X11Server) {
args[0] = X11Server;
} else {
@ -3047,7 +3047,7 @@ static void StartXServer(void)
// export display for childs
setenv("DISPLAY", X11DisplayName, 1);
}
// split X server arguments string into words
// split X server arguments string into words
if ((sval = X11ServerArguments)) {
char *s;
@ -3071,13 +3071,13 @@ static void StartXServer(void)
// FIXME: append VTxx
args[argn] = NULL;
// arm the signal
// arm the signal
memset(&usr1, 0, sizeof(struct sigaction));
usr1.sa_handler = Usr1Handler;
sigaction(SIGUSR1, &usr1, NULL);
Debug(3, "x-setup: Starting X server '%s' '%s'\n", args[0], X11ServerArguments);
// fork
// fork
if ((pid = fork())) { // parent
X11ServerPid = pid;
@ -3096,7 +3096,7 @@ static void StartXServer(void)
close(fd); // vdr should open with O_CLOEXEC
}
// start the X server
// start the X server
execvp(args[0], (char *const *)args);
Error(_("x-setup: Failed to start X server '%s'\n"), args[0]);
@ -3269,7 +3269,7 @@ void MainThreadHook(void)
// Suspend/Resume
//////////////////////////////////////////////////////////////////////////////
/// call VDR support function
/// call VDR support function
extern void DelPip(void);
/**

369
video.c
View File

@ -44,10 +44,10 @@
#define noUSE_SCREENSAVER ///< support disable screensaver
#define USE_GRAB ///< experimental grab code
//#define USE_GLX ///< outdated GLX code
// #define USE_GLX ///< outdated GLX code
#define USE_DOUBLEBUFFER ///< use GLX double buffers
#define USE_CUVID ///< enable cuvid support
//#define AV_INFO ///< log a/v sync informations
// #define AV_INFO ///< log a/v sync informations
#ifndef AV_INFO_TIME
#define AV_INFO_TIME (50 * 60) ///< a/v info every minute
#endif
@ -86,7 +86,7 @@
#include <time.h>
#include <signal.h>
#ifndef HAVE_PTHREAD_NAME
/// only available with newer glibc
/// only available with newer glibc
#define pthread_setname_np(thread, name)
#endif
#endif
@ -104,12 +104,12 @@
#include <xcb/dpms.h>
#endif
//#include <xcb/shm.h>
//#include <xcb/xv.h>
// #include <xcb/shm.h>
// #include <xcb/xv.h>
//#include <xcb/xcb_image.h>
//#include <xcb/xcb_event.h>
//#include <xcb/xcb_atom.h>
// #include <xcb/xcb_image.h>
// #include <xcb/xcb_event.h>
// #include <xcb/xcb_atom.h>
#include <xcb/xcb_icccm.h>
#ifdef XCB_ICCCM_NUM_WM_SIZE_HINTS_ELEMENTS
#include <xcb/xcb_ewmh.h>
@ -132,10 +132,10 @@ typedef enum
#ifdef USE_GLX
#include <GL/glew.h>
//#include <GL/gl.h> // For GL_COLOR_BUFFER_BIT
//#include <GL/glext.h> // For GL_COLOR_BUFFER_BIT
//#include <GL/glxew.h>
//#include <GL/glx.h>
// #include <GL/gl.h> // For GL_COLOR_BUFFER_BIT
// #include <GL/glext.h> // For GL_COLOR_BUFFER_BIT
// #include <GL/glxew.h>
// #include <GL/glx.h>
// only for gluErrorString
#include <GL/glu.h>
#include <GL/glut.h>
@ -146,8 +146,8 @@ typedef enum
#include <libavutil/pixdesc.h>
#ifdef CUVID
//#include <GL/gl.h> // For GL_COLOR_BUFFER_BIT
//#include <GL/glext.h> // For GL_COLOR_BUFFER_BIT
// #include <GL/gl.h> // For GL_COLOR_BUFFER_BIT
// #include <GL/glext.h> // For GL_COLOR_BUFFER_BIT
#include <cuda.h>
#include <cuda_runtime_api.h>
#include <cudaGL.h>
@ -168,7 +168,7 @@ typedef enum
#endif
#include <assert.h>
//#define EGL_EGLEXT_PROTOTYPES
// #define EGL_EGLEXT_PROTOTYPES
#include <EGL/egl.h>
#include <EGL/eglext.h>
#ifndef GL_OES_EGL_image
@ -332,8 +332,8 @@ typedef struct
#define CODEC_SURFACES_MAX 12 //
#define VIDEO_SURFACES_MAX 6 ///< video output surfaces for queue
//#define OUTPUT_SURFACES_MAX 4 ///< output surfaces for flip page
#define VIDEO_SURFACES_MAX 6 ///< video output surfaces for queue
// #define OUTPUT_SURFACES_MAX 4 ///< output surfaces for flip page
#ifdef VAAPI
#define PIXEL_FORMAT AV_PIX_FMT_VAAPI
#define SWAP_BUFFER_SIZE 3
@ -367,53 +367,53 @@ static unsigned VideoWindowHeight; ///< video output window height
static const VideoModule NoopModule; ///< forward definition of noop module
/// selected video module
/// selected video module
static const VideoModule *VideoUsedModule = &NoopModule;
signed char VideoHardwareDecoder = -1; ///< flag use hardware decoder
static char VideoSurfaceModesChanged; ///< flag surface modes changed
/// flag use transparent OSD.
/// flag use transparent OSD.
static const char VideoTransparentOsd = 1;
static uint32_t VideoBackground; ///< video background color
static char VideoStudioLevels; ///< flag use studio levels
/// Default deinterlace mode.
/// Default deinterlace mode.
static VideoDeinterlaceModes VideoDeinterlace[VideoResolutionMax];
/// Default number of deinterlace surfaces
/// Default number of deinterlace surfaces
static const int VideoDeinterlaceSurfaces = 4;
/// Default skip chroma deinterlace flag (CUVID only).
/// Default skip chroma deinterlace flag (CUVID only).
static char VideoSkipChromaDeinterlace[VideoResolutionMax];
/// Default inverse telecine flag (CUVID only).
/// Default inverse telecine flag (CUVID only).
static char VideoInverseTelecine[VideoResolutionMax];
/// Default amount of noise reduction algorithm to apply (0 .. 1000).
/// Default amount of noise reduction algorithm to apply (0 .. 1000).
static int VideoDenoise[VideoResolutionMax];
/// Default amount of sharpening, or blurring, to apply (-1000 .. 1000).
/// Default amount of sharpening, or blurring, to apply (-1000 .. 1000).
static int VideoSharpen[VideoResolutionMax];
/// Default cut top and bottom in pixels
/// Default cut top and bottom in pixels
static int VideoCutTopBottom[VideoResolutionMax];
/// Default cut left and right in pixels
/// Default cut left and right in pixels
static int VideoCutLeftRight[VideoResolutionMax];
/// Default scaling mode
/// Default scaling mode
static VideoScalingModes VideoScaling[VideoResolutionMax];
/// Default audio/video delay
/// Default audio/video delay
int VideoAudioDelay;
/// Default zoom mode for 4:3
/// Default zoom mode for 4:3
static VideoZoomModes Video4to3ZoomMode;
/// Default zoom mode for 16:9 and others
/// Default zoom mode for 16:9 and others
static VideoZoomModes VideoOtherZoomMode;
static char Video60HzMode; ///< handle 60hz displays
@ -452,9 +452,9 @@ pthread_mutex_t OSDMutex; ///< OSD update mutex
static pthread_t VideoDisplayThread; ///< video display thread
//static pthread_cond_t VideoDisplayWakeupCond; ///< wakeup condition variable
//static pthread_mutex_t VideoDisplayMutex; ///< video condition mutex
//static pthread_mutex_t VideoDisplayLockMutex; ///< video lock mutex
// static pthread_cond_t VideoDisplayWakeupCond; ///< wakeup condition variable
// static pthread_mutex_t VideoDisplayMutex; ///< video condition mutex
// static pthread_mutex_t VideoDisplayLockMutex; ///< video lock mutex
static int OsdConfigWidth; ///< osd configured width
static int OsdConfigHeight; ///< osd configured height
@ -546,21 +546,21 @@ static void VideoSetPts(int64_t * pts_p, int interlaced, const AVCodecContext *
// FIXME: using framerate as workaround for av_frame_get_pkt_duration
//
// if (video_ctx->framerate.num && video_ctx->framerate.den) {
// duration = 1000 * video_ctx->framerate.den / video_ctx->framerate.num;
// } else {
// if (video_ctx->framerate.num && video_ctx->framerate.den) {
// duration = 1000 * video_ctx->framerate.den / video_ctx->framerate.num;
// } else {
duration = interlaced ? 40 : 20; // 50Hz -> 20ms default
// }
// Debug(4, "video: %d/%d %" PRIx64 " -> %d\n", video_ctx->framerate.den, video_ctx->framerate.num, av_frame_get_pkt_duration(frame), duration);
// }
// Debug(4, "video: %d/%d %" PRIx64 " -> %d\n", video_ctx->framerate.den, video_ctx->framerate.num, av_frame_get_pkt_duration(frame), duration);
// update video clock
if (*pts_p != (int64_t) AV_NOPTS_VALUE) {
*pts_p += duration * 90;
//Info("video: %s +pts\n", Timestamp2String(*pts_p));
}
//av_opt_ptr(avcodec_get_frame_class(), frame, "best_effort_timestamp");
//pts = frame->best_effort_timestamp;
// pts = frame->pkt_pts;
// av_opt_ptr(avcodec_get_frame_class(), frame, "best_effort_timestamp");
// pts = frame->best_effort_timestamp;
// pts = frame->pkt_pts;
pts = frame->pts;
if (pts == (int64_t) AV_NOPTS_VALUE || !pts) {
// libav: 0.8pre didn't set pts
@ -672,7 +672,6 @@ static void VideoUpdateOutput(AVRational input_aspect_ratio, int input_width, in
(video_height * display_aspect_ratio.num + display_aspect_ratio.den - 1) / display_aspect_ratio.den;
*output_height =
(video_width * display_aspect_ratio.den + display_aspect_ratio.num - 1) / display_aspect_ratio.num;
// JOJO
if (*output_width > video_width) {
*output_width = video_width;
*output_y += (video_height - *output_height) / 2;
@ -744,25 +743,25 @@ static uint64_t test_time = 0;
#define VideoThreadLock(void)\
{\
if (VideoThread) {\
if (pthread_mutex_lock(&VideoLockMutex)) {\
Error(_("video: can't lock thread\n"));\
}\
if (pthread_mutex_lock(&VideoLockMutex)) {\
Error(_("video: can't lock thread\n"));\
}\
}\
}
// test_time = GetusTicks();
// printf("Lock start....");
// test_time = GetusTicks();
// printf("Lock start....");
///
/// Unlock video thread.
///
#define VideoThreadUnlock(void)\
{\
if (VideoThread) {\
if (pthread_mutex_unlock(&VideoLockMutex)) {\
Error(_("video: can't unlock thread\n"));\
}\
}\
if (pthread_mutex_unlock(&VideoLockMutex)) {\
Error(_("video: can't unlock thread\n"));\
}\
}\
}
// printf("Video Locked for %d\n",(GetusTicks()-test_time)/1000);
// printf("Video Locked for %d\n",(GetusTicks()-test_time)/1000);
//----------------------------------------------------------------------------
// GLX
@ -791,7 +790,7 @@ static PFNGLXSWAPINTERVALSGIPROC GlxSwapIntervalSGI;
GLenum err;\
\
if ((err = glGetError()) != GL_NO_ERROR) {\
Debug(3, "video/glx: error %s:%d %d '%s'\n",__FILE__,__LINE__, err, gluErrorString(err));\
Debug(3, "video/glx: error %s:%d %d '%s'\n",__FILE__,__LINE__, err, gluErrorString(err));\
}\
}
@ -840,7 +839,7 @@ char *eglErrorString(EGLint error)
EGLint err;\
\
if ((err = eglGetError()) != EGL_SUCCESS) {\
Debug(3, "video/egl: %s:%d error %d %s\n", __FILE__,__LINE__,err,eglErrorString(err));\
Debug(3, "video/egl: %s:%d error %d %s\n", __FILE__,__LINE__,err,eglErrorString(err));\
}\
}
@ -849,19 +848,19 @@ char *eglErrorString(EGLint error)
void OSD_get_shared_context()
{
eglMakeCurrent(eglDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE, eglSharedContext);
// EglCheck();
// EglCheck();
}
void OSD_get_context()
{
eglMakeCurrent(eglDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE, OSDcontext);
// EglCheck();
// EglCheck();
}
void OSD_release_context()
{
eglMakeCurrent(eglDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT);
// EglCheck();
// EglCheck();
}
#endif
@ -968,7 +967,7 @@ static void EglInit(void)
return;
#endif
//The desired 30-bit color visual
// The desired 30-bit color visual
int attributeList10[] = {
GLX_DRAWABLE_TYPE, GLX_WINDOW_BIT,
GLX_RENDER_TYPE, GLX_RGBA_BIT,
@ -1186,10 +1185,10 @@ static void EglExit(void)
// must destroy contet
#ifdef CUVID
// must destroy glx
// if (glXGetCurrentContext() == glxContext) {
// if (glXGetCurrentContext() == glxContext) {
// if currently used, set to none
glXMakeCurrent(XlibDisplay, None, NULL);
// }
// }
if (OSDcontext) {
glXDestroyContext(XlibDisplay, OSDcontext);
GlxCheck();
@ -1342,10 +1341,10 @@ typedef struct _cuvid_decoder_
int SurfacesFree[CODEC_SURFACES_MAX];
/// video surface ring buffer
int SurfacesRb[VIDEO_SURFACES_MAX];
// CUcontext cuda_ctx;
// CUcontext cuda_ctx;
// cudaStream_t stream; // make my own cuda stream
// CUgraphicsResource cuResource;
// cudaStream_t stream; // make my own cuda stream
// CUgraphicsResource cuResource;
int SurfaceWrite; ///< write pointer
int SurfaceRead; ///< read pointer
atomic_t SurfacesFilled; ///< how many of the buffer is used
@ -1406,11 +1405,11 @@ typedef struct priv
struct pl_renderer *renderertest;
const struct pl_swapchain *swapchain;
struct pl_context_params context;
// struct pl_render_target r_target;
// struct pl_render_params r_params;
// struct pl_tex final_fbo;
// struct pl_render_target r_target;
// struct pl_render_params r_params;
// struct pl_tex final_fbo;
VkSurfaceKHR pSurface;
// VkSemaphore sig_in;
// VkSemaphore sig_in;
int has_dma_buf;
} priv;
static priv *p;
@ -1420,9 +1419,9 @@ static int semid;
struct itimerval itimer;
#endif
GLuint vao_buffer; //
GLuint vao_buffer;
//GLuint vao_vao[4]; //
//GLuint vao_vao[4];
GLuint gl_shader = 0, gl_prog = 0, gl_fbo = 0; // shader programm
GLint gl_colormatrix, gl_colormatrix_c;
GLuint OSDfb = 0;
@ -1619,7 +1618,7 @@ static int CuvidGetVideoSurface0(CuvidDecoder * decoder)
int i;
if (!decoder->SurfaceFreeN) {
// Error(_("video/cuvid: out of surfaces\n"));
// Error(_("video/cuvid: out of surfaces\n"));
return -1;
}
// use oldest surface
@ -1739,9 +1738,9 @@ static const struct mp_egl_config_attr mp_egl_attribs[] = {
};
const int mpgl_preferred_gl_versions[] = {
// 440,
// 430,
// 400,
// 440,
// 430,
// 400,
330,
320,
310,
@ -1923,7 +1922,7 @@ static CuvidDecoder *CuvidNewHwDecoder(VideoStream * stream)
int i = 0;
// setenv ("DISPLAY", ":0", 0);
// setenv ("DISPLAY", ":0", 0);
Debug(3, "Cuvid New HW Decoder\n");
if ((unsigned)CuvidDecoderN >= sizeof(CuvidDecoders) / sizeof(*CuvidDecoders)) {
@ -1936,8 +1935,8 @@ static CuvidDecoder *CuvidNewHwDecoder(VideoStream * stream)
}
#endif
#ifdef VAAPI
// if ((i = av_hwdevice_ctx_create(&hw_device_ctx, AV_HWDEVICE_TYPE_VAAPI, ":0.0" , NULL, 0)) != 0 ) {
if ((i = av_hwdevice_ctx_create(&hw_device_ctx, AV_HWDEVICE_TYPE_VAAPI, "/dev/dri/renderD128", NULL, 0)) != 0) {
// if ((i = av_hwdevice_ctx_create(&hw_device_ctx, AV_HWDEVICE_TYPE_VAAPI, ":0.0" , NULL, 0)) != 0 ) {
Fatal("codec: can't allocate HW video codec context err %04x", i);
}
#endif
@ -1952,8 +1951,8 @@ static CuvidDecoder *CuvidNewHwDecoder(VideoStream * stream)
decoder->VaDisplay = VaDisplay;
#endif
decoder->Window = VideoWindow;
//decoder->VideoX = 0; // done by calloc
//decoder->VideoY = 0;
// decoder->VideoX = 0; // done by calloc
// decoder->VideoY = 0;
decoder->VideoWidth = VideoWindowWidth;
decoder->VideoHeight = VideoWindowHeight;
@ -2049,7 +2048,7 @@ static void CuvidDelHwDecoder(CuvidDecoder * decoder)
if (decoder == CuvidDecoders[0])
VideoThreadUnlock();
// glXMakeCurrent(XlibDisplay, None, NULL);
// glXMakeCurrent(XlibDisplay, None, NULL);
for (i = 0; i < CuvidDecoderN; ++i) {
if (CuvidDecoders[i] == decoder) {
CuvidDecoders[i] = NULL;
@ -2057,7 +2056,7 @@ static void CuvidDelHwDecoder(CuvidDecoder * decoder)
if (i < --CuvidDecoderN) {
CuvidDecoders[i] = CuvidDecoders[CuvidDecoderN];
}
// CuvidCleanup(decoder);
// CuvidCleanup(decoder);
CuvidPrintFrames(decoder);
#ifdef CUVID
if (decoder->cuda_ctx && CuvidDecoderN == 1) {
@ -2143,7 +2142,7 @@ void generateCUDAImage(CuvidDecoder * decoder, int index, const AVFrame * frame,
{
int n;
for (n = 0; n < 2; n++) { //
for (n = 0; n < 2; n++) {
// widthInBytes must account for the chroma plane
// elements being two samples wide.
CUDA_MEMCPY2D cpy = {
@ -2172,7 +2171,7 @@ void createTextureDst(CuvidDecoder * decoder, int anz, unsigned int size_x, unsi
struct pl_image *img;
struct pl_plane *pl;
//printf("Create textures and planes %d %d\n",size_x,size_y);
// printf("Create textures and planes %d %d\n",size_x,size_y);
Debug(3, "video/vulkan: create %d Textures Format %s w %d h %d \n", anz,
PixFmt == AV_PIX_FMT_NV12 ? "NV12" : "P010", size_x, size_y);
@ -2192,11 +2191,11 @@ void createTextureDst(CuvidDecoder * decoder, int anz, unsigned int size_x, unsi
size = 2;
}
if (decoder->pl_images[i].planes[n].texture) {
//#ifdef VAAPI
// #ifdef VAAPI
if (decoder->pl_images[i].planes[n].texture->params.shared_mem.handle.fd) {
close(decoder->pl_images[i].planes[n].texture->params.shared_mem.handle.fd);
}
//#endif
// #endif
pl_tex_destroy(p->gpu, &decoder->pl_images[i].planes[n].texture); // delete old texture
}
@ -2335,7 +2334,7 @@ void generateVAAPIImage(CuvidDecoder * decoder, int index, const AVFrame * frame
},
};
//printf("vor create Object %d with fd %d import size %u offset %d %dx%d\n",id,fd,size,offset, tex_params.w,tex_params.h);
// printf("vor create Object %d with fd %d import size %u offset %d %dx%d\n",id,fd,size,offset, tex_params.w,tex_params.h);
if (decoder->pl_images[index].planes[n].texture) {
pl_tex_destroy(p->gpu, &decoder->pl_images[index].planes[n].texture);
@ -2409,21 +2408,21 @@ void createTextureDst(CuvidDecoder * decoder, int anz, unsigned int size_x, unsi
#ifdef VAAPI
#define MP_ARRAY_SIZE(s) (sizeof(s) / sizeof((s)[0]))
#define ADD_ATTRIB(name, value) \
do { \
assert(num_attribs + 3 < MP_ARRAY_SIZE(attribs)); \
attribs[num_attribs++] = (name); \
attribs[num_attribs++] = (value); \
attribs[num_attribs] = EGL_NONE; \
#define ADD_ATTRIB(name, value) \
do { \
assert(num_attribs + 3 < MP_ARRAY_SIZE(attribs)); \
attribs[num_attribs++] = (name); \
attribs[num_attribs++] = (value); \
attribs[num_attribs] = EGL_NONE; \
} while(0)
#define ADD_PLANE_ATTRIBS(plane) do { \
ADD_ATTRIB(EGL_DMA_BUF_PLANE ## plane ## _FD_EXT, \
desc.objects[desc.layers[n].object_index[plane]].fd); \
desc.objects[desc.layers[n].object_index[plane]].fd); \
ADD_ATTRIB(EGL_DMA_BUF_PLANE ## plane ## _OFFSET_EXT, \
desc.layers[n].offset[plane]); \
desc.layers[n].offset[plane]); \
ADD_ATTRIB(EGL_DMA_BUF_PLANE ## plane ## _PITCH_EXT, \
desc.layers[n].pitch[plane]); \
desc.layers[n].pitch[plane]); \
} while (0)
void generateVAAPIImage(CuvidDecoder * decoder, int index, const AVFrame * frame, int image_width, int image_height)
@ -2528,12 +2527,12 @@ int push_filters(AVCodecContext * dec_ctx, CuvidDecoder * decoder, AVFrame * fra
av_log(NULL, AV_LOG_ERROR, "Error while feeding the filtergraph\n");
}
//printf("Interlaced %d tff %d\n",frame->interlaced_frame,frame->top_field_first);
// printf("Interlaced %d tff %d\n",frame->interlaced_frame,frame->top_field_first);
/* pull filtered frames from the filtergraph */
while ((ret = av_buffersink_get_frame(decoder->buffersink_ctx, filt_frame)) >= 0) {
filt_frame->pts /= 2;
decoder->Interlaced = 0;
// printf("vaapideint video:new %#012" PRIx64 " old %#012" PRIx64 "\n",filt_frame->pts,frame->pts);
// printf("vaapideint video:new %#012" PRIx64 " old %#012" PRIx64 "\n",filt_frame->pts,frame->pts);
CuvidSyncRenderFrame(decoder, dec_ctx, filt_frame);
filt_frame = av_frame_alloc(); // get new frame
@ -2592,7 +2591,7 @@ int init_filters(AVCodecContext * dec_ctx, CuvidDecoder * decoder, AVFrame * fra
src_params->frame_rate.den = 1;
src_params->sample_aspect_ratio = dec_ctx->sample_aspect_ratio;
//printf("width %d height %d hw_frames_ctx %p\n",dec_ctx->width,dec_ctx->height ,frame->hw_frames_ctx);
// printf("width %d height %d hw_frames_ctx %p\n",dec_ctx->width,dec_ctx->height ,frame->hw_frames_ctx);
ret = av_buffersrc_parameters_set(decoder->buffersrc_ctx, src_params);
if (ret < 0) {
Debug(3, "Cannot set hw_frames_ctx to src\n");
@ -2714,7 +2713,7 @@ static enum AVPixelFormat Cuvid_get_format(CuvidDecoder * decoder, AVCodecContex
ist->GetFormatDone = 1;
Debug(3, "video: create decoder 16bit?=%d %dx%d old %d %d\n", bitformat16, video_ctx->width, video_ctx->height,
Debug(3, "video: create decoder 16bit?=%d %dx%d old %d %d\n", bitformat16, video_ctx->width, video_ctx->height,
decoder->InputWidth, decoder->InputHeight);
if (*fmt_idx == PIXEL_FORMAT) { // HWACCEL used
@ -2729,8 +2728,8 @@ static enum AVPixelFormat Cuvid_get_format(CuvidDecoder * decoder, AVCodecContex
ist->hwaccel_output_format = AV_PIX_FMT_NV12;
}
// if ((video_ctx->width != decoder->InputWidth
// || video_ctx->height != decoder->InputHeight) && decoder->TrickSpeed == 0) {
// if ((video_ctx->width != decoder->InputWidth
// || video_ctx->height != decoder->InputHeight) && decoder->TrickSpeed == 0) {
if (decoder->TrickSpeed == 0) {
#ifdef PLACEBO
@ -2812,7 +2811,7 @@ int get_RGB(CuvidDecoder * decoder)
#ifndef PLACEBO
// eglMakeCurrent(eglDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE, eglSharedContext);
// eglMakeCurrent(eglDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE, eglSharedContext);
glGenTextures(1, &texture);
GlxCheck();
glBindTexture(GL_TEXTURE_2D, texture);
@ -2860,7 +2859,7 @@ int get_RGB(CuvidDecoder * decoder)
GLint texLoc;
#ifdef CUVID
// glXMakeCurrent(XlibDisplay, VideoWindow, glxSharedContext);
// glXMakeCurrent(XlibDisplay, VideoWindow, glxSharedContext);
GlxCheck();
#endif
glEnable(GL_BLEND);
@ -2884,9 +2883,9 @@ int get_RGB(CuvidDecoder * decoder)
glUseProgram(0);
glActiveTexture(GL_TEXTURE0);
#ifdef CUVID
// glXMakeCurrent(XlibDisplay, VideoWindow, glxThreadContext);
// glXMakeCurrent(XlibDisplay, VideoWindow, glxThreadContext);
#else
// eglMakeCurrent(eglDisplay, eglSurface,eglSurface, eglThreadContext);
// eglMakeCurrent(eglDisplay, eglSurface,eglSurface, eglThreadContext);
#endif
}
glFlush();
@ -2992,9 +2991,9 @@ static uint8_t *CuvidGrabOutputSurfaceLocked(int *ret_size, int *ret_width, int
if (decoder == NULL) // no video aktiv
return NULL;
// surface = CuvidSurfacesRb[CuvidOutputSurfaceIndex];
// surface = CuvidSurfacesRb[CuvidOutputSurfaceIndex];
// get real surface size
// get real surface size
#ifdef PLACEBO
width = decoder->VideoWidth;
height = decoder->VideoHeight;
@ -3003,7 +3002,7 @@ static uint8_t *CuvidGrabOutputSurfaceLocked(int *ret_size, int *ret_width, int
height = decoder->InputHeight;
#endif
// Debug(3, "video/cuvid: grab %dx%d\n", width, height);
// Debug(3, "video/cuvid: grab %dx%d\n", width, height);
source_rect.x0 = 0;
source_rect.y0 = 0;
@ -3036,7 +3035,7 @@ static uint8_t *CuvidGrabOutputSurfaceLocked(int *ret_size, int *ret_width, int
}
}
// printf("video/cuvid: grab source dim %dx%d\n", width, height);
// printf("video/cuvid: grab source dim %dx%d\n", width, height);
size = width * height * sizeof(uint32_t);
@ -3057,7 +3056,7 @@ static uint8_t *CuvidGrabOutputSurfaceLocked(int *ret_size, int *ret_width, int
while (decoder->grab) {
usleep(1000); // wait for data
}
// Debug(3,"got grab data\n");
// Debug(3,"got grab data\n");
if (ret_size) {
*ret_size = size;
@ -3120,7 +3119,7 @@ static void CuvidQueueVideoSurface(CuvidDecoder * decoder, int surface, int soft
return;
}
//
// Check and release, old surface
// Check and release, old surface
//
if ((old = decoder->SurfacesRb[decoder->SurfaceWrite]) != -1) {
// now we can release the surface, software surfaces only
@ -3172,7 +3171,7 @@ static void CuvidRenderFrame(CuvidDecoder * decoder, const AVCodecContext * vide
Debug(3, "video/vdpau: aspect ratio changed\n");
decoder->InputAspect = frame->sample_aspect_ratio;
//printf("new aspect %d:%d\n",frame->sample_aspect_ratio.num,frame->sample_aspect_ratio.den);
// printf("new aspect %d:%d\n",frame->sample_aspect_ratio.num,frame->sample_aspect_ratio.den);
CuvidUpdateOutput(decoder);
}
@ -3185,9 +3184,9 @@ static void CuvidRenderFrame(CuvidDecoder * decoder, const AVCodecContext * vide
//
if ( // decoder->PixFmt != video_ctx->pix_fmt
video_ctx->width != decoder->InputWidth
// || decoder->ColorSpace != color
// || decoder->ColorSpace != color
|| video_ctx->height != decoder->InputHeight) {
//Debug(3,"fmt %02d:%02d width %d:%d hight %d:%d\n",decoder->ColorSpace,frame->colorspace ,video_ctx->width, decoder->InputWidth,video_ctx->height, decoder->InputHeight);
// Debug(3,"fmt %02d:%02d width %d:%d hight %d:%d\n",decoder->ColorSpace,frame->colorspace ,video_ctx->width, decoder->InputWidth,video_ctx->height, decoder->InputHeight);
decoder->InputWidth = video_ctx->width;
decoder->InputHeight = video_ctx->height;
@ -3236,7 +3235,7 @@ static void CuvidRenderFrame(CuvidDecoder * decoder, const AVCodecContext * vide
output = av_frame_alloc();
av_hwframe_transfer_data(output, frame, 0);
av_frame_copy_props(output, frame);
// printf("Save Surface ID %d %p %p\n",surface,decoder->pl_images[surface].planes[0].texture,decoder->pl_images[surface].planes[1].texture);
// printf("Save Surface ID %d %p %p\n",surface,decoder->pl_images[surface].planes[0].texture,decoder->pl_images[surface].planes[1].texture);
bool ok = pl_tex_upload(p->gpu, &(struct pl_tex_transfer_params) {
.tex = decoder->pl_images[surface].planes[0].texture,
.stride_w = output->linesize[0],
@ -3287,7 +3286,7 @@ static void *CuvidGetHwAccelContext(CuvidDecoder * decoder)
unsigned int version;
Debug(3, "Initializing cuvid hwaccel thread ID:%ld\n", (long int)syscall(186));
//turn NULL;
// turn NULL;
#ifdef CUVID
if (decoder->cuda_ctx) {
Debug(3, "schon passiert\n");
@ -3345,9 +3344,9 @@ static void CuvidAdvanceDecoderFrame(CuvidDecoder * decoder)
// keep use of last surface
++decoder->FramesDuped;
// FIXME: don't warn after stream start, don't warn during pause
// printf("video: display buffer empty, duping frame (%d/%d) %d\n",
// decoder->FramesDuped, decoder->FrameCounter,
// VideoGetBuffers(decoder->Stream));
// printf("video: display buffer empty, duping frame (%d/%d) %d\n",
// decoder->FramesDuped, decoder->FrameCounter,
// VideoGetBuffers(decoder->Stream));
return;
}
@ -3470,10 +3469,10 @@ static void CuvidMixVideo(CuvidDecoder * decoder, __attribute__((unused))
case AVCOL_SPC_UNSPECIFIED: // comes with UHD
img->repr.sys = PL_COLOR_SYSTEM_BT_709;
memcpy(&img->color, &pl_color_space_bt709, sizeof(struct pl_color_space));
// img->color.primaries = PL_COLOR_PRIM_BT_709;
// img->color.transfer = PL_COLOR_TRC_BT_1886;
// img->color.light = PL_COLOR_LIGHT_SCENE_709_1886;
// img->color.light = PL_COLOR_LIGHT_DISPLAY;
// img->color.primaries = PL_COLOR_PRIM_BT_709;
// img->color.transfer = PL_COLOR_TRC_BT_1886;
// img->color.light = PL_COLOR_LIGHT_SCENE_709_1886;
// img->color.light = PL_COLOR_LIGHT_DISPLAY;
break;
case AVCOL_SPC_BT2020_NCL:
@ -3485,22 +3484,22 @@ static void CuvidMixVideo(CuvidDecoder * decoder, __attribute__((unused))
#ifdef VAAPI
render_params.peak_detect_params = NULL;
#endif
// img->color.primaries = PL_COLOR_PRIM_BT_2020;
// img->color.transfer = PL_COLOR_TRC_HLG;
// img->color.light = PL_COLOR_LIGHT_SCENE_HLG;
// img->color.primaries = PL_COLOR_PRIM_BT_2020;
// img->color.transfer = PL_COLOR_TRC_HLG;
// img->color.light = PL_COLOR_LIGHT_SCENE_HLG;
break;
default: // fallback
img->repr.sys = PL_COLOR_SYSTEM_BT_709;
memcpy(&img->color, &pl_color_space_bt709, sizeof(struct pl_color_space));
// img->color.primaries = PL_COLOR_PRIM_BT_709;
// img->color.transfer = PL_COLOR_TRC_BT_1886;
// img->color.light = PL_COLOR_LIGHT_DISPLAY;
// img->color.primaries = PL_COLOR_PRIM_BT_709;
// img->color.transfer = PL_COLOR_TRC_BT_1886;
// img->color.light = PL_COLOR_LIGHT_DISPLAY;
break;
}
// Source crop
if (VideoScalerTest) { // right side defnied scaler
// pl_tex_clear(p->gpu,target->fbo,(float[4]){0}); // clear frame
// pl_tex_clear(p->gpu,target->fbo,(float[4]){0}); // clear frame
img->src_rect.x0 = video_src_rect.x1 / 2 + 1;
img->src_rect.y0 = video_src_rect.y0;
img->src_rect.x1 = video_src_rect.x1;
@ -3551,7 +3550,7 @@ static void CuvidMixVideo(CuvidDecoder * decoder, __attribute__((unused))
render_params.cone_params = NULL;
}
// render_params.upscaler = &pl_filter_ewa_lanczos;
// render_params.upscaler = &pl_filter_ewa_lanczos;
render_params.upscaler = pl_named_filters[VideoScaling[decoder->Resolution]].filter;
render_params.downscaler = pl_named_filters[VideoScaling[decoder->Resolution]].filter;
@ -3722,10 +3721,10 @@ static void CuvidDisplayFrame(void)
#if 1
diff = (GetusTicks() - last_time) / 1000;
// last_time = GetusTicks();
//printf("Roundtrip Displayframe %d\n",diff);
// last_time = GetusTicks();
// printf("Roundtrip Displayframe %d\n",diff);
if (diff < 15000 && diff > 0) {
//printf("Sleep %d\n",15000-diff);
// printf("Sleep %d\n",15000-diff);
usleep((15000 - diff)); // * 1000);
}
@ -3733,17 +3732,17 @@ static void CuvidDisplayFrame(void)
if (!p->swapchain)
return;
//last_time = GetusTicks();
// last_time = GetusTicks();
#ifdef CUVID
//first_time = GetusTicks();
// first_time = GetusTicks();
VideoThreadLock();
if (!first) {
// last_time = GetusTicks();
// last_time = GetusTicks();
if (!pl_swapchain_submit_frame(p->swapchain))
Error(_("Failed to submit swapchain buffer\n"));
pl_swapchain_swap_buffers(p->swapchain); // swap buffers
// printf("submit and swap %d\n",(GetusTicks()-last_time)/1000000);
// printf("submit and swap %d\n",(GetusTicks()-last_time)/1000000);
}
#endif
@ -3782,9 +3781,9 @@ static void CuvidDisplayFrame(void)
else
target.repr.levels = PL_COLOR_LEVELS_TV;
target.repr.alpha = PL_ALPHA_UNKNOWN;
// target.repr.bits.sample_depth = 16;
// target.repr.bits.color_depth = 16;
// target.repr.bits.bit_shift =0;
// target.repr.bits.sample_depth = 16;
// target.repr.bits.color_depth = 16;
// target.repr.bits.bit_shift =0;
switch (VulkanTargetColorSpace) {
case 0:
@ -3877,7 +3876,7 @@ static void CuvidDisplayFrame(void)
GlxCheck();
#endif
#ifdef VAAPI
// eglMakeCurrent(eglDisplay, eglSurface, eglSurface, OSDcontext);
// eglMakeCurrent(eglDisplay, eglSurface, eglSurface, OSDcontext);
#endif
glEnable(GL_BLEND);
GlxCheck();
@ -3905,21 +3904,21 @@ static void CuvidDisplayFrame(void)
glXMakeCurrent(XlibDisplay, VideoWindow, glxThreadContext);
#endif
#ifdef VAAPI
// eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglThreadContext);
// eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglThreadContext);
#endif
}
#endif
#ifdef PLACEBO
#ifdef VAAPI
// first_time = GetusTicks();
// first_time = GetusTicks();
if (!pl_swapchain_submit_frame(p->swapchain))
Fatal(_("Failed to submit swapchain buffer\n"));
pl_swapchain_swap_buffers(p->swapchain); // swap buffers
// printf("submit and swap %d us\n",(GetusTicks()-first_time)/1000);
// printf("submit and swap %d us\n",(GetusTicks()-first_time)/1000);
#endif
VideoThreadUnlock();
// printf("Display time %d\n",(GetusTicks()-first_time)/1000000);
// printf("Display time %d\n",(GetusTicks()-first_time)/1000000);
#else
#ifdef CUVID
glXGetVideoSyncSGI(&Count); // get current frame
@ -4053,7 +4052,7 @@ static void CuvidSyncDecoder(CuvidDecoder * decoder)
int err = 0;
static uint64_t last_time;
// video_clock = CuvidGetClock(decoder);
// video_clock = CuvidGetClock(decoder);
video_clock = decoder->PTS - (90 * 20 * 1); // 1 Frame in Output
filled = atomic_read(&decoder->SurfacesFilled);
@ -4063,7 +4062,7 @@ static void CuvidSyncDecoder(CuvidDecoder * decoder)
goto skip_sync;
}
audio_clock = AudioGetClock();
//printf("Diff %d %ld %ld filled %d \n",(video_clock - audio_clock - VideoAudioDelay)/90,video_clock,audio_clock,filled);
// printf("Diff %d %ld %ld filled %d \n",(video_clock - audio_clock - VideoAudioDelay)/90,video_clock,audio_clock,filled);
// 60Hz: repeat every 5th field
if (Video60HzMode && !(decoder->FramesDisplayed % 6)) {
if (audio_clock == (int64_t) AV_NOPTS_VALUE || video_clock == (int64_t) AV_NOPTS_VALUE) {
@ -4105,19 +4104,19 @@ static void CuvidSyncDecoder(CuvidDecoder * decoder)
diff = (decoder->LastAVDiff + diff) / 2;
decoder->LastAVDiff = diff;
// if (CuvidDecoderN) {
// CuvidDecoders[0]->Frameproc = (float)(diff / 90);
// }
// if (CuvidDecoderN) {
// CuvidDecoders[0]->Frameproc = (float)(diff / 90);
// }
#if 0
if (abs(diff / 90) > 0) {
printf(" Diff %d filled %d \n", diff / 90, filled);
printf(" Diff %d filled %d \n", diff / 90, filled);
}
#endif
if (abs(diff) > 5000 * 90) { // more than 5s
err = CuvidMessage(2, "video: audio/video difference too big\n");
// decoder->SyncCounter = 1;
// usleep(10);
// goto out;
// decoder->SyncCounter = 1;
// usleep(10);
// goto out;
} else if (diff > 100 * 90) {
// FIXME: this quicker sync step, did not work with new code!
err = CuvidMessage(4, "video: slow down video, duping frame %d\n", diff / 90);
@ -4154,8 +4153,6 @@ static void CuvidSyncDecoder(CuvidDecoder * decoder)
skip_sync:
// check if next field is available
//JOJO if (decoder->SurfaceField && filled <= 1 + 2 * decoder->Interlaced) {
if (decoder->SurfaceField && filled <= 1 + 2 * decoder->Interlaced) {
if (filled < 1 + 2 * decoder->Interlaced) {
++decoder->FramesDuped;
@ -4171,8 +4168,8 @@ static void CuvidSyncDecoder(CuvidDecoder * decoder)
}
#endif
}
// Debug(3,"filled zu klein %d Field %d Interlaced %d\n",filled,decoder->SurfaceField,decoder->Interlaced);
// goto out;
// Debug(3,"filled zu klein %d Field %d Interlaced %d\n",filled,decoder->SurfaceField,decoder->Interlaced);
// goto out;
}
CuvidAdvanceDecoderFrame(decoder);
@ -4252,8 +4249,8 @@ static void CuvidSyncRenderFrame(CuvidDecoder * decoder, const AVCodecContext *
}
// if (!decoder->Closing) {
// VideoSetPts(&decoder->PTS, decoder->Interlaced, video_ctx, frame);
// }
// VideoSetPts(&decoder->PTS, decoder->Interlaced, video_ctx, frame);
// }
CuvidRenderFrame(decoder, video_ctx, frame);
}
@ -4320,7 +4317,7 @@ static void CuvidDisplayHandlerThread(void)
// fill frame output ring buffer
//
filled = atomic_read(&decoder->SurfacesFilled);
//if (filled <= 1 + 2 * decoder->Interlaced) {
//if (filled <= 1 + 2 * decoder->Interlaced) {
if (filled < 4) {
// FIXME: hot polling
// fetch+decode or reopen
@ -4436,14 +4433,14 @@ static const VideoModule CuvidModule = {
.SetVideoMode = CuvidSetVideoMode,
.DisplayHandlerThread = CuvidDisplayHandlerThread,
// .OsdClear = GlxOsdClear,
// .OsdDrawARGB = GlxOsdDrawARGB,
// .OsdInit = GlxOsdInit,
// .OsdExit = GlxOsdExit,
// .OsdClear = CuvidOsdClear,
// .OsdDrawARGB = CuvidOsdDrawARGB,
// .OsdInit = CuvidOsdInit,
// .OsdExit = CuvidOsdExit,
// .OsdClear = GlxOsdClear,
// .OsdDrawARGB = GlxOsdDrawARGB,
// .OsdInit = GlxOsdInit,
// .OsdExit = GlxOsdExit,
// .OsdClear = CuvidOsdClear,
// .OsdDrawARGB = CuvidOsdDrawARGB,
// .OsdInit = CuvidOsdInit,
// .OsdExit = CuvidOsdExit,
.Exit = CuvidExit,
.Init = CuvidGlxInit,
};
@ -4560,7 +4557,7 @@ static void NoopDisplayHandlerThread(void)
#else
#define NoopDisplayHandlerThread NULL
#define NoopDisplayHandlerThread NULL
#endif
@ -4630,7 +4627,7 @@ void VideoOsdClear(void)
OsdShown = 0;
#else
VideoThreadLock();
// VideoUsedModule->OsdClear();
// VideoUsedModule->OsdClear();
OsdDirtyX = OsdWidth; // reset dirty area
OsdDirtyY = OsdHeight;
OsdDirtyWidth = 0;
@ -4829,20 +4826,20 @@ static void VideoEvent(void)
case MapNotify:
Debug(3, "video/event: MapNotify\n");
// <EFBFBD>wm workaround
// wm workaround
VideoThreadLock();
xcb_change_window_attributes(Connection, VideoWindow, XCB_CW_CURSOR, &VideoBlankCursor);
VideoThreadUnlock();
VideoBlankTick = 0;
break;
case Expose:
//Debug(3, "video/event: Expose\n");
// Debug(3, "video/event: Expose\n");
break;
case ReparentNotify:
Debug(3, "video/event: ReparentNotify\n");
break;
case ConfigureNotify:
//Debug(3, "video/event: ConfigureNotify\n");
// Debug(3, "video/event: ConfigureNotify\n");
VideoSetVideoMode(event.xconfigure.x, event.xconfigure.y, event.xconfigure.width, event.xconfigure.height);
break;
case ButtonPress:
@ -4966,7 +4963,7 @@ void delete_placebo()
vkDestroySurfaceKHR(p->vk_inst->instance, p->pSurface, NULL);
pl_vk_inst_destroy(&p->vk_inst);
// pl_vulkan_destroy(&p->vk);
// pl_vulkan_destroy(&p->vk);
pl_context_destroy(&p->ctx);
free(p);
p = NULL;
@ -4999,7 +4996,7 @@ void InitPlacebo()
// create Vulkan instance
memcpy(&iparams, &pl_vk_inst_default_params, sizeof(iparams));
// iparams.debug = true;
// iparams.debug = true;
iparams.num_extensions = 2;
iparams.extensions = malloc(2 * sizeof(const char *));
*iparams.extensions = surfext;
@ -5130,10 +5127,10 @@ static void *VideoHandlerThread(void *dummy)
VideoPollEvent();
// first_time = GetusTicks();
// first_time = GetusTicks();
CuvidSyncDisplayFrame();
// printf("syncdisplayframe exec %d\n",(GetusTicks()-first_time)/1000);
// printf("syncdisplayframe exec %d\n",(GetusTicks()-first_time)/1000);
}
pthread_cleanup_pop(NULL);
@ -5290,9 +5287,9 @@ void VideoDelHwDecoder(VideoHwDecoder * hw_decoder)
}
#endif
// only called from inside the thread
//VideoThreadLock();
// VideoThreadLock();
VideoUsedModule->DelHwDecoder(hw_decoder);
//VideoThreadUnlock();
// VideoThreadUnlock();
}
}
@ -5573,7 +5570,7 @@ uint8_t *VideoGrab(int *size, int *width, int *height, int write_header)
///
uint8_t *VideoGrabService(int *size, int *width, int *height)
{
// Debug(3, "video: grab service\n");
// Debug(3, "video: grab service\n");
#ifdef USE_GRAB
if (VideoUsedModule->GrabOutput) {
@ -5746,11 +5743,11 @@ static void X11DPMSReenable(xcb_connection_t * connection)
#else
/// dummy function: Suspend X11 screen saver.
/// dummy function: Suspend X11 screen saver.
#define X11SuspendScreenSaver(connection, suspend)
/// dummy function: Disable X11 DPMS.
/// dummy function: Disable X11 DPMS.
#define X11DPMSDisable(connection)
/// dummy function: Reenable X11 DPMS.
/// dummy function: Reenable X11 DPMS.
#define X11DPMSReenable(connection)
#endif
@ -6456,7 +6453,7 @@ void VideoInit(const char *display_name)
return;
}
// prefetch extensions
//xcb_prefetch_extension_data(Connection, &xcb_big_requests_id);
// xcb_prefetch_extension_data(Connection, &xcb_big_requests_id);
#ifdef xcb_USE_GLX
xcb_prefetch_extension_data(Connection, &xcb_glx_id);
#endif
@ -6575,7 +6572,7 @@ void VideoExit(void)
//
// FIXME: cleanup.
//
//RandrExit();
// RandrExit();
//
// X11/xcb cleanup

128
video.h
View File

@ -30,10 +30,10 @@
// Typedefs
//----------------------------------------------------------------------------
/// Video hardware decoder typedef
/// Video hardware decoder typedef
typedef struct _video_hw_decoder_ VideoHwDecoder;
/// Video output stream typedef
/// Video output stream typedef
typedef struct __video_stream__ VideoStream;
//----------------------------------------------------------------------------
@ -49,179 +49,179 @@ extern char ConfigStartX11Server; ///< flag start the x11 server
// Prototypes
//----------------------------------------------------------------------------
/// Allocate new video hardware decoder.
/// Allocate new video hardware decoder.
extern VideoHwDecoder *VideoNewHwDecoder(VideoStream *);
/// Deallocate video hardware decoder.
/// Deallocate video hardware decoder.
extern void VideoDelHwDecoder(VideoHwDecoder *);
/// Get and allocate a video hardware surface.
/// Get and allocate a video hardware surface.
extern unsigned VideoGetSurface(VideoHwDecoder *, const AVCodecContext *);
/// Release a video hardware surface
/// Release a video hardware surface
extern void VideoReleaseSurface(VideoHwDecoder *, unsigned);
/// Callback to negotiate the PixelFormat.
/// Callback to negotiate the PixelFormat.
extern enum AVPixelFormat Video_get_format(VideoHwDecoder *, AVCodecContext *, const enum AVPixelFormat *);
/// Render a ffmpeg frame.
/// Render a ffmpeg frame.
extern void VideoRenderFrame(VideoHwDecoder *, const AVCodecContext *, const AVFrame *);
/// Get hwaccel context for ffmpeg.
/// Get hwaccel context for ffmpeg.
extern void *VideoGetHwAccelContext(VideoHwDecoder *);
#ifdef AVCODEC_VDPAU_H
/// Draw vdpau render state.
/// Draw vdpau render state.
extern void VideoDrawRenderState(VideoHwDecoder *, struct vdpau_render_state *);
#endif
#ifdef USE_OPENGLOSD
/// Set callback funktion to notify VDR about VideoEvents
/// Set callback funktion to notify VDR about VideoEvents
extern void VideoSetVideoEventCallback(void (*)(void));
#endif
/// Poll video events.
/// Poll video events.
extern void VideoPollEvent(void);
/// Wakeup display handler.
/// Wakeup display handler.
extern void VideoDisplayWakeup(void);
/// Set video device.
/// Set video device.
extern void VideoSetDevice(const char *);
/// Get video driver name.
/// Get video driver name.
extern const char *VideoGetDriverName(void);
/// Set video geometry.
/// Set video geometry.
extern int VideoSetGeometry(const char *);
/// Set 60Hz display mode.
/// Set 60Hz display mode.
extern void VideoSet60HzMode(int);
/// Set soft start audio/video sync.
/// Set soft start audio/video sync.
extern void VideoSetSoftStartSync(int);
/// Set show black picture during channel switch.
/// Set show black picture during channel switch.
extern void VideoSetBlackPicture(int);
/// Set brightness adjustment.
/// Set brightness adjustment.
extern void VideoSetBrightness(int);
/// Set contrast adjustment.
/// Set contrast adjustment.
extern void VideoSetContrast(int);
/// Set saturation adjustment.
/// Set saturation adjustment.
extern void VideoSetSaturation(int);
/// Set Gamm.
/// Set Gamma.
extern void VideoSetGamma(int);
/// Set ColorSpace.
/// Set ColorSpace.
extern void VideoSetTargetColor(int);
/// Set hue adjustment.
/// Set hue adjustment.
extern void VideoSetHue(int);
/// Set Color Blindness.
/// Set Color Blindness.
extern void VideoSetColorBlindness(int);
/// Set Color Blindness Faktor
/// Set Color Blindness Faktor
extern void VideoSetColorBlindnessFaktor(int);
/// Set video output position.
/// Set video output position.
extern void VideoSetOutputPosition(VideoHwDecoder *, int, int, int, int);
/// Set video mode.
/// Set video mode.
extern void VideoSetVideoMode(int, int, int, int);
/// Set 4:3 display format.
/// Set 4:3 display format.
extern void VideoSet4to3DisplayFormat(int);
/// Set other display format.
/// Set other display format.
extern void VideoSetOtherDisplayFormat(int);
/// Set video fullscreen mode.
/// Set video fullscreen mode.
extern void VideoSetFullscreen(int);
/// Set deinterlace.
/// Set deinterlace.
extern void VideoSetDeinterlace(int[]);
/// Set skip chroma deinterlace.
/// Set skip chroma deinterlace.
extern void VideoSetSkipChromaDeinterlace(int[]);
/// Set inverse telecine.
/// Set inverse telecine.
extern void VideoSetInverseTelecine(int[]);
/// Set scaling.
/// Set scaling.
extern void VideoSetScaling(int[]);
/// Set scaler test.
/// Set scaler test.
extern void VideoSetScalerTest(int);
/// Set denoise.
/// Set denoise.
extern void VideoSetDenoise(int[]);
/// Set sharpen.
/// Set sharpen.
extern void VideoSetSharpen(int[]);
/// Set cut top and bottom.
/// Set cut top and bottom.
extern void VideoSetCutTopBottom(int[]);
/// Set cut left and right.
/// Set cut left and right.
extern void VideoSetCutLeftRight(int[]);
/// Set studio levels.
/// Set studio levels.
extern void VideoSetStudioLevels(int);
/// Set background.
/// Set background.
extern void VideoSetBackground(uint32_t);
/// Set audio delay.
/// Set audio delay.
extern void VideoSetAudioDelay(int);
/// Set auto-crop parameters.
/// Set auto-crop parameters.
extern void VideoSetAutoCrop(int, int, int);
/// Clear OSD.
/// Clear OSD.
extern void VideoOsdClear(void);
/// Draw an OSD ARGB image.
/// Draw an OSD ARGB image.
extern void VideoOsdDrawARGB(int, int, int, int, int, const uint8_t *, int, int);
/// Get OSD size.
/// Get OSD size.
extern void VideoGetOsdSize(int *, int *);
/// Set OSD size.
/// Set OSD size.
extern void VideoSetOsdSize(int, int);
/// Set Osd 3D Mode
/// Set Osd 3D Mode
extern void VideoSetOsd3DMode(int);
/// Set video clock.
/// Set video clock.
extern void VideoSetClock(VideoHwDecoder *, int64_t);
/// Get video clock.
/// Get video clock.
extern int64_t VideoGetClock(const VideoHwDecoder *);
/// Set closing flag.
/// Set closing flag.
extern void VideoSetClosing(VideoHwDecoder *);
/// Reset start of frame counter
/// Reset start of frame counter
extern void VideoResetStart(VideoHwDecoder *);
/// Set trick play speed.
/// Set trick play speed.
extern void VideoSetTrickSpeed(VideoHwDecoder *, int);
/// Grab screen.
/// Grab screen.
extern uint8_t *VideoGrab(int *, int *, int *, int);
/// Grab screen raw.
/// Grab screen raw.
extern uint8_t *VideoGrabService(int *, int *, int *);
/// Get decoder statistics.
/// Get decoder statistics.
extern void VideoGetStats(VideoHwDecoder *, int *, int *, int *, int *, float *);
/// Get video stream size
/// Get video stream size
extern void VideoGetVideoSize(VideoHwDecoder *, int *, int *, int *, int *);
extern void VideoOsdInit(void); ///< Setup osd.
@ -230,19 +230,19 @@ extern void VideoOsdExit(void); ///< Cleanup osd.
extern void VideoInit(const char *); ///< Setup video module.
extern void VideoExit(void); ///< Cleanup and exit video module.
/// Poll video input buffers.
/// Poll video input buffers.
extern int VideoPollInput(VideoStream *);
/// Decode video input buffers.
/// Decode video input buffers.
extern int VideoDecodeInput(VideoStream *);
/// Get number of input buffers.
/// Get number of input buffers.
extern int VideoGetBuffers(const VideoStream *);
/// Set DPMS at Blackscreen switch
/// Set DPMS at Blackscreen switch
extern void SetDPMSatBlackScreen(int);
/// Raise the frontend window
/// Raise the frontend window
extern int VideoRaiseWindow(void);
#ifdef USE_OPENGLOSD