mirror of
https://github.com/jojo61/vdr-plugin-softhdcuvid.git
synced 2023-10-10 13:37:41 +02:00
Merge pull request #16 from dnehring7/master
Fix remaining indentation problems.
This commit is contained in:
commit
3dfaeaf7e2
12
README.md
12
README.md
@ -5,7 +5,7 @@ Copyright (c) 2018 by jojo61. All Rights Reserved.
|
|||||||
|
|
||||||
Contributor(s):
|
Contributor(s):
|
||||||
|
|
||||||
jojo61
|
jojo61
|
||||||
|
|
||||||
License: AGPLv3
|
License: AGPLv3
|
||||||
|
|
||||||
@ -31,13 +31,13 @@ A software and GPU emulated UHD output device plugin for VDR.
|
|||||||
o HDMI/SPDIF pass-through
|
o HDMI/SPDIF pass-through
|
||||||
o Software volume, compression, normalize and channel resample
|
o Software volume, compression, normalize and channel resample
|
||||||
o VDR ScaleVideo API
|
o VDR ScaleVideo API
|
||||||
o CUDA deinterlacer
|
o CUDA deinterlacer
|
||||||
o Autocrop
|
o Autocrop
|
||||||
o Suspend / Dettach
|
o Suspend / Dettach
|
||||||
o PIP (Picture-in-Picture) (not working yet)
|
o PIP (Picture-in-Picture) (not working yet)
|
||||||
|
|
||||||
|
|
||||||
To compile you must have the 'requires' installed.
|
To compile you must have the 'requires' installed.
|
||||||
|
|
||||||
|
|
||||||
This is a fork of johns original softhddevice work and I reworked it to support HEVC with CUDA and opengl output.
|
This is a fork of johns original softhddevice work and I reworked it to support HEVC with CUDA and opengl output.
|
||||||
@ -52,7 +52,7 @@ You need libplacebo.
|
|||||||
It is still beta and I tested it with Intel VAAPI. If you have problmes with the shaders then copy the drirc file in your home directory as .drirc
|
It is still beta and I tested it with Intel VAAPI. If you have problmes with the shaders then copy the drirc file in your home directory as .drirc
|
||||||
AMD VAAPI is broken by AMD and will not work currently. The vaapi_deinterlace is broken and the amdgpu driver is instable. I have not testet with amdgpupro
|
AMD VAAPI is broken by AMD and will not work currently. The vaapi_deinterlace is broken and the amdgpu driver is instable. I have not testet with amdgpupro
|
||||||
|
|
||||||
You have to adapt the Makefile to your needs. I use FFMPEG 4.0
|
You have to adapt the Makefile to your needs. I use FFMPEG 4.0
|
||||||
The Makefile expects the CUDA SDK in /usr/local/cuda. Currently it is tested with CUDA 10
|
The Makefile expects the CUDA SDK in /usr/local/cuda. Currently it is tested with CUDA 10
|
||||||
|
|
||||||
Unfortunatly older FFMEGs has a bug with deinterlacing cuda frames. Best to get the latest FFMPEG Version.
|
Unfortunatly older FFMEGs has a bug with deinterlacing cuda frames. Best to get the latest FFMPEG Version.
|
||||||
@ -65,7 +65,7 @@ old:
|
|||||||
new:
|
new:
|
||||||
ctx->frame_queue = av_fifo_alloc((ctx->nb_surfaces + 2 ) * sizeof(CuvidParsedFrame));
|
ctx->frame_queue = av_fifo_alloc((ctx->nb_surfaces + 2 ) * sizeof(CuvidParsedFrame));
|
||||||
|
|
||||||
This Version supports building with libplacebo. https://github.com/haasn/libplacebo
|
This Version supports building with libplacebo. https://github.com/haasn/libplacebo
|
||||||
You have to enable it in the Makefile and install libplacebo yourself.
|
You have to enable it in the Makefile and install libplacebo yourself.
|
||||||
At the moment this is Work in progress.
|
At the moment this is Work in progress.
|
||||||
It also needs the NVIDIA driver 410.48 or newer as well as CUDA 10.
|
It also needs the NVIDIA driver 410.48 or newer as well as CUDA 10.
|
||||||
@ -74,7 +74,7 @@ In the settings you can enable a correction for Colorblindness. First you have t
|
|||||||
|
|
||||||
Also you can enable a Scaler Test feature. When enabled then the screen is split.On the left half you will see the scaler defined by Scaler Test and on the right side you will see the scaler defined at the Resolution setting. There is as small black line between the halfs to remaind you that Scaler Test is activ.
|
Also you can enable a Scaler Test feature. When enabled then the screen is split.On the left half you will see the scaler defined by Scaler Test and on the right side you will see the scaler defined at the Resolution setting. There is as small black line between the halfs to remaind you that Scaler Test is activ.
|
||||||
|
|
||||||
If your FFMEG supports it then you can enable YADIF in the Makefile and select btween the buildin NVIDIA CUDA deinterlacer and the YADIF cuda deinterlacer.
|
If your FFMEG supports it then you can enable YADIF in the Makefile and select btween the buildin NVIDIA CUDA deinterlacer and the YADIF cuda deinterlacer.
|
||||||
|
|
||||||
Good luck
|
Good luck
|
||||||
jojo61
|
jojo61
|
||||||
|
26
audio.c
26
audio.c
@ -87,7 +87,7 @@
|
|||||||
#endif
|
#endif
|
||||||
#include <pthread.h>
|
#include <pthread.h>
|
||||||
#ifndef HAVE_PTHREAD_NAME
|
#ifndef HAVE_PTHREAD_NAME
|
||||||
/// only available with newer glibc
|
/// only available with newer glibc
|
||||||
#define pthread_setname_np(thread, name)
|
#define pthread_setname_np(thread, name)
|
||||||
#endif
|
#endif
|
||||||
#endif
|
#endif
|
||||||
@ -132,7 +132,7 @@ char AudioAlsaCloseOpenDelay; ///< enable alsa close/open delay fix
|
|||||||
|
|
||||||
static const char *AudioModuleName; ///< which audio module to use
|
static const char *AudioModuleName; ///< which audio module to use
|
||||||
|
|
||||||
/// Selected audio module.
|
/// Selected audio module.
|
||||||
static const AudioModule *AudioUsedModule = &NoopModule;
|
static const AudioModule *AudioUsedModule = &NoopModule;
|
||||||
static const char *AudioPCMDevice; ///< PCM device name
|
static const char *AudioPCMDevice; ///< PCM device name
|
||||||
static const char *AudioPassthroughDevice; ///< Passthrough device name
|
static const char *AudioPassthroughDevice; ///< Passthrough device name
|
||||||
@ -174,30 +174,30 @@ static int AudioVolume; ///< current volume (0 .. 1000)
|
|||||||
|
|
||||||
extern int VideoAudioDelay; ///< import audio/video delay
|
extern int VideoAudioDelay; ///< import audio/video delay
|
||||||
|
|
||||||
/// default ring buffer size ~2s 8ch 16bit (3 * 5 * 7 * 8)
|
/// default ring buffer size ~2s 8ch 16bit (3 * 5 * 7 * 8)
|
||||||
static const unsigned AudioRingBufferSize = 3 * 5 * 7 * 8 * 2 * 1000;
|
static const unsigned AudioRingBufferSize = 3 * 5 * 7 * 8 * 2 * 1000;
|
||||||
|
|
||||||
static int AudioChannelsInHw[9]; ///< table which channels are supported
|
static int AudioChannelsInHw[9]; ///< table which channels are supported
|
||||||
enum _audio_rates
|
enum _audio_rates
|
||||||
{ ///< sample rates enumeration
|
{ ///< sample rates enumeration
|
||||||
// HW: 32000 44100 48000 88200 96000 176400 192000
|
// HW: 32000 44100 48000 88200 96000 176400 192000
|
||||||
//Audio32000, ///< 32.0Khz
|
// Audio32000, ///< 32.0Khz
|
||||||
Audio44100, ///< 44.1Khz
|
Audio44100, ///< 44.1Khz
|
||||||
Audio48000, ///< 48.0Khz
|
Audio48000, ///< 48.0Khz
|
||||||
//Audio88200, ///< 88.2Khz
|
// Audio88200, ///< 88.2Khz
|
||||||
//Audio96000, ///< 96.0Khz
|
// Audio96000, ///< 96.0Khz
|
||||||
//Audio176400, ///< 176.4Khz
|
// Audio176400, ///< 176.4Khz
|
||||||
Audio192000, ///< 192.0Khz
|
Audio192000, ///< 192.0Khz
|
||||||
AudioRatesMax ///< max index
|
AudioRatesMax ///< max index
|
||||||
};
|
};
|
||||||
|
|
||||||
/// table which rates are supported
|
/// table which rates are supported
|
||||||
static int AudioRatesInHw[AudioRatesMax];
|
static int AudioRatesInHw[AudioRatesMax];
|
||||||
|
|
||||||
/// input to hardware channel matrix
|
/// input to hardware channel matrix
|
||||||
static int AudioChannelMatrix[AudioRatesMax][9];
|
static int AudioChannelMatrix[AudioRatesMax][9];
|
||||||
|
|
||||||
/// rates tables (must be sorted by frequency)
|
/// rates tables (must be sorted by frequency)
|
||||||
static const unsigned AudioRatesTable[AudioRatesMax] = {
|
static const unsigned AudioRatesTable[AudioRatesMax] = {
|
||||||
44100, 48000, 192000
|
44100, 48000, 192000
|
||||||
};
|
};
|
||||||
@ -209,7 +209,7 @@ static const unsigned AudioRatesTable[AudioRatesMax] = {
|
|||||||
static const int AudioNormSamples = 4096; ///< number of samples
|
static const int AudioNormSamples = 4096; ///< number of samples
|
||||||
|
|
||||||
#define AudioNormMaxIndex 128 ///< number of average values
|
#define AudioNormMaxIndex 128 ///< number of average values
|
||||||
/// average of n last sample blocks
|
/// average of n last sample blocks
|
||||||
static uint32_t AudioNormAverage[AudioNormMaxIndex];
|
static uint32_t AudioNormAverage[AudioNormMaxIndex];
|
||||||
static int AudioNormIndex; ///< index into average table
|
static int AudioNormIndex; ///< index into average table
|
||||||
static int AudioNormReady; ///< index counter
|
static int AudioNormReady; ///< index counter
|
||||||
@ -1149,7 +1149,7 @@ static int64_t AlsaGetDelay(void)
|
|||||||
}
|
}
|
||||||
// delay in frames in alsa + kernel buffers
|
// delay in frames in alsa + kernel buffers
|
||||||
if ((err = snd_pcm_delay(AlsaPCMHandle, &delay)) < 0) {
|
if ((err = snd_pcm_delay(AlsaPCMHandle, &delay)) < 0) {
|
||||||
//Debug(3, "audio/alsa: no hw delay\n");
|
// Debug(3, "audio/alsa: no hw delay\n");
|
||||||
delay = 0L;
|
delay = 0L;
|
||||||
#ifdef DEBUG
|
#ifdef DEBUG
|
||||||
} else if (snd_pcm_state(AlsaPCMHandle) != SND_PCM_STATE_RUNNING) {
|
} else if (snd_pcm_state(AlsaPCMHandle) != SND_PCM_STATE_RUNNING) {
|
||||||
@ -2285,7 +2285,7 @@ void AudioEnqueue(const void *samples, int count)
|
|||||||
// forced start or enough video + audio buffered
|
// forced start or enough video + audio buffered
|
||||||
// for some exotic channels * 4 too small
|
// for some exotic channels * 4 too small
|
||||||
if (AudioStartThreshold * 10 < n || (AudioVideoIsReady
|
if (AudioStartThreshold * 10 < n || (AudioVideoIsReady
|
||||||
// if ((AudioVideoIsReady
|
// if ((AudioVideoIsReady
|
||||||
&& AudioStartThreshold < n)) {
|
&& AudioStartThreshold < n)) {
|
||||||
// restart play-back
|
// restart play-back
|
||||||
// no lock needed, can wakeup next time
|
// no lock needed, can wakeup next time
|
||||||
|
91
codec.c
91
codec.c
@ -1,4 +1,3 @@
|
|||||||
|
|
||||||
///
|
///
|
||||||
/// @file codec.c @brief Codec functions
|
/// @file codec.c @brief Codec functions
|
||||||
///
|
///
|
||||||
@ -31,15 +30,15 @@
|
|||||||
/// many bugs and incompatiblity in it. Don't use this shit.
|
/// many bugs and incompatiblity in it. Don't use this shit.
|
||||||
///
|
///
|
||||||
|
|
||||||
/// compile with pass-through support (stable, AC-3, E-AC-3 only)
|
/// compile with pass-through support (stable, AC-3, E-AC-3 only)
|
||||||
#define USE_PASSTHROUGH
|
#define USE_PASSTHROUGH
|
||||||
/// compile audio drift correction support (very experimental)
|
/// compile audio drift correction support (very experimental)
|
||||||
#define USE_AUDIO_DRIFT_CORRECTION
|
#define USE_AUDIO_DRIFT_CORRECTION
|
||||||
/// compile AC-3 audio drift correction support (very experimental)
|
/// compile AC-3 audio drift correction support (very experimental)
|
||||||
#define USE_AC3_DRIFT_CORRECTION
|
#define USE_AC3_DRIFT_CORRECTION
|
||||||
/// use ffmpeg libswresample API (autodected, Makefile)
|
/// use ffmpeg libswresample API (autodected, Makefile)
|
||||||
#define noUSE_SWRESAMPLE
|
#define noUSE_SWRESAMPLE
|
||||||
/// use libav libavresample API (autodected, Makefile)
|
/// use libav libavresample API (autodected, Makefile)
|
||||||
#define noUSE_AVRESAMPLE
|
#define noUSE_AVRESAMPLE
|
||||||
|
|
||||||
#include <stdio.h>
|
#include <stdio.h>
|
||||||
@ -89,15 +88,15 @@
|
|||||||
// Global
|
// Global
|
||||||
//----------------------------------------------------------------------------
|
//----------------------------------------------------------------------------
|
||||||
|
|
||||||
///
|
///
|
||||||
/// ffmpeg lock mutex
|
/// ffmpeg lock mutex
|
||||||
///
|
///
|
||||||
/// new ffmpeg dislikes simultanous open/close
|
/// new ffmpeg dislikes simultanous open/close
|
||||||
/// this breaks our code, until this is fixed use lock.
|
/// this breaks our code, until this is fixed use lock.
|
||||||
///
|
///
|
||||||
static pthread_mutex_t CodecLockMutex;
|
static pthread_mutex_t CodecLockMutex;
|
||||||
|
|
||||||
/// Flag prefer fast channel switch
|
/// Flag prefer fast channel switch
|
||||||
char CodecUsePossibleDefectFrames;
|
char CodecUsePossibleDefectFrames;
|
||||||
AVBufferRef *hw_device_ctx;
|
AVBufferRef *hw_device_ctx;
|
||||||
|
|
||||||
@ -155,7 +154,7 @@ static enum AVPixelFormat Codec_get_format(AVCodecContext * video_ctx, const enu
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
//static void Codec_free_buffer(void *opaque, uint8_t *data);
|
// static void Codec_free_buffer(void *opaque, uint8_t *data);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
** Video buffer management, get buffer for frame.
|
** Video buffer management, get buffer for frame.
|
||||||
@ -174,8 +173,8 @@ static int Codec_get_buffer2(AVCodecContext * video_ctx, AVFrame * frame, int fl
|
|||||||
if (!decoder->GetFormatDone) { // get_format missing
|
if (!decoder->GetFormatDone) { // get_format missing
|
||||||
enum AVPixelFormat fmts[2];
|
enum AVPixelFormat fmts[2];
|
||||||
|
|
||||||
// fprintf(stderr, "codec: buggy libav, use ffmpeg\n");
|
// fprintf(stderr, "codec: buggy libav, use ffmpeg\n");
|
||||||
// Warning(_("codec: buggy libav, use ffmpeg\n"));
|
// Warning(_("codec: buggy libav, use ffmpeg\n"));
|
||||||
fmts[0] = video_ctx->pix_fmt;
|
fmts[0] = video_ctx->pix_fmt;
|
||||||
fmts[1] = AV_PIX_FMT_NONE;
|
fmts[1] = AV_PIX_FMT_NONE;
|
||||||
Codec_get_format(video_ctx, fmts);
|
Codec_get_format(video_ctx, fmts);
|
||||||
@ -183,11 +182,11 @@ static int Codec_get_buffer2(AVCodecContext * video_ctx, AVFrame * frame, int fl
|
|||||||
#if 0
|
#if 0
|
||||||
if (decoder->hwaccel_get_buffer && (AV_PIX_FMT_VDPAU == decoder->hwaccel_pix_fmt
|
if (decoder->hwaccel_get_buffer && (AV_PIX_FMT_VDPAU == decoder->hwaccel_pix_fmt
|
||||||
|| AV_PIX_FMT_CUDA == decoder->hwaccel_pix_fmt || AV_PIX_FMT_VAAPI == decoder->hwaccel_pix_fmt)) {
|
|| AV_PIX_FMT_CUDA == decoder->hwaccel_pix_fmt || AV_PIX_FMT_VAAPI == decoder->hwaccel_pix_fmt)) {
|
||||||
//Debug(3,"hwaccel get_buffer\n");
|
// Debug(3,"hwaccel get_buffer\n");
|
||||||
return decoder->hwaccel_get_buffer(video_ctx, frame, flags);
|
return decoder->hwaccel_get_buffer(video_ctx, frame, flags);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
//Debug(3, "codec: fallback to default get_buffer\n");
|
// Debug(3, "codec: fallback to default get_buffer\n");
|
||||||
return avcodec_default_get_buffer2(video_ctx, frame, flags);
|
return avcodec_default_get_buffer2(video_ctx, frame, flags);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -322,7 +321,7 @@ void CodecVideoOpen(VideoDecoder * decoder, int codec_id)
|
|||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifdef CUVID
|
#ifdef CUVID
|
||||||
if (strcmp(decoder->VideoCodec->long_name, "Nvidia CUVID MPEG2VIDEO decoder") == 0) { // deinterlace for mpeg2 is somehow broken
|
if (strcmp(decoder->VideoCodec->long_name, "Nvidia CUVID MPEG2VIDEO decoder") == 0) { // deinterlace for mpeg2 is somehow broken
|
||||||
if (av_opt_set_int(decoder->VideoCtx->priv_data, "deint", deint, 0) < 0) { // adaptive
|
if (av_opt_set_int(decoder->VideoCtx->priv_data, "deint", deint, 0) < 0) { // adaptive
|
||||||
pthread_mutex_unlock(&CodecLockMutex);
|
pthread_mutex_unlock(&CodecLockMutex);
|
||||||
Fatal(_("codec: can't set option deint to video codec!\n"));
|
Fatal(_("codec: can't set option deint to video codec!\n"));
|
||||||
@ -368,12 +367,12 @@ void CodecVideoOpen(VideoDecoder * decoder, int codec_id)
|
|||||||
//decoder->VideoCtx->debug = FF_DEBUG_STARTCODE;
|
//decoder->VideoCtx->debug = FF_DEBUG_STARTCODE;
|
||||||
//decoder->VideoCtx->err_recognition |= AV_EF_EXPLODE;
|
//decoder->VideoCtx->err_recognition |= AV_EF_EXPLODE;
|
||||||
|
|
||||||
// av_log_set_level(AV_LOG_DEBUG);
|
// av_log_set_level(AV_LOG_DEBUG);
|
||||||
av_log_set_level(0);
|
av_log_set_level(0);
|
||||||
|
|
||||||
decoder->VideoCtx->get_format = Codec_get_format;
|
decoder->VideoCtx->get_format = Codec_get_format;
|
||||||
decoder->VideoCtx->get_buffer2 = Codec_get_buffer2;
|
decoder->VideoCtx->get_buffer2 = Codec_get_buffer2;
|
||||||
// decoder->VideoCtx->active_thread_type = 0;
|
// decoder->VideoCtx->active_thread_type = 0;
|
||||||
decoder->VideoCtx->draw_horiz_band = NULL;
|
decoder->VideoCtx->draw_horiz_band = NULL;
|
||||||
decoder->VideoCtx->hwaccel_context = VideoGetHwAccelContext(decoder->HwDecoder);
|
decoder->VideoCtx->hwaccel_context = VideoGetHwAccelContext(decoder->HwDecoder);
|
||||||
|
|
||||||
@ -403,7 +402,7 @@ void CodecVideoClose(VideoDecoder * video_decoder)
|
|||||||
AVFrame *frame;
|
AVFrame *frame;
|
||||||
|
|
||||||
// FIXME: play buffered data
|
// FIXME: play buffered data
|
||||||
// av_frame_free(&video_decoder->Frame); // callee does checks
|
// av_frame_free(&video_decoder->Frame); // callee does checks
|
||||||
|
|
||||||
Debug(3, "CodecVideoClose\n");
|
Debug(3, "CodecVideoClose\n");
|
||||||
if (video_decoder->VideoCtx) {
|
if (video_decoder->VideoCtx) {
|
||||||
@ -533,10 +532,10 @@ void CodecVideoDecode(VideoDecoder * decoder, const AVPacket * avpkt)
|
|||||||
pkt = avpkt; // use copy
|
pkt = avpkt; // use copy
|
||||||
got_frame = 0;
|
got_frame = 0;
|
||||||
|
|
||||||
// printf("decode packet %d\n",(GetusTicks()-first_time)/1000000);
|
// printf("decode packet %d\n",(GetusTicks()-first_time)/1000000);
|
||||||
ret1 = avcodec_send_packet(video_ctx, pkt);
|
ret1 = avcodec_send_packet(video_ctx, pkt);
|
||||||
|
|
||||||
// first_time = GetusTicks();
|
// first_time = GetusTicks();
|
||||||
|
|
||||||
if (ret1 >= 0) {
|
if (ret1 >= 0) {
|
||||||
consumed = 1;
|
consumed = 1;
|
||||||
@ -545,7 +544,7 @@ void CodecVideoDecode(VideoDecoder * decoder, const AVPacket * avpkt)
|
|||||||
if (!CuvidTestSurfaces())
|
if (!CuvidTestSurfaces())
|
||||||
usleep(1000);
|
usleep(1000);
|
||||||
|
|
||||||
//printf("send packet to decode %s\n",consumed?"ok":"Full");
|
// printf("send packet to decode %s\n",consumed?"ok":"Full");
|
||||||
|
|
||||||
if ((ret1 == AVERROR(EAGAIN) || ret1 == AVERROR_EOF || ret1 >= 0) && CuvidTestSurfaces()) {
|
if ((ret1 == AVERROR(EAGAIN) || ret1 == AVERROR_EOF || ret1 >= 0) && CuvidTestSurfaces()) {
|
||||||
ret = 0;
|
ret = 0;
|
||||||
@ -557,7 +556,7 @@ void CodecVideoDecode(VideoDecoder * decoder, const AVPacket * avpkt)
|
|||||||
} else {
|
} else {
|
||||||
got_frame = 0;
|
got_frame = 0;
|
||||||
}
|
}
|
||||||
// printf("got %s packet from decoder\n",got_frame?"1":"no");
|
// printf("got %s packet from decoder\n",got_frame?"1":"no");
|
||||||
if (got_frame) { // frame completed
|
if (got_frame) { // frame completed
|
||||||
#ifdef YADIF
|
#ifdef YADIF
|
||||||
if (decoder->filter) {
|
if (decoder->filter) {
|
||||||
@ -572,24 +571,24 @@ void CodecVideoDecode(VideoDecoder * decoder, const AVPacket * avpkt)
|
|||||||
}
|
}
|
||||||
if (frame->interlaced_frame && decoder->filter == 2 && (frame->height != 720)) { // broken ZDF sends Interlaced flag
|
if (frame->interlaced_frame && decoder->filter == 2 && (frame->height != 720)) { // broken ZDF sends Interlaced flag
|
||||||
ret = push_filters(video_ctx, decoder->HwDecoder, frame);
|
ret = push_filters(video_ctx, decoder->HwDecoder, frame);
|
||||||
// av_frame_unref(frame);
|
// av_frame_unref(frame);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
//DisplayPts(video_ctx, frame);
|
// DisplayPts(video_ctx, frame);
|
||||||
VideoRenderFrame(decoder->HwDecoder, video_ctx, frame);
|
VideoRenderFrame(decoder->HwDecoder, video_ctx, frame);
|
||||||
// av_frame_unref(frame);
|
// av_frame_unref(frame);
|
||||||
} else {
|
} else {
|
||||||
av_frame_free(&frame);
|
av_frame_free(&frame);
|
||||||
// printf("codec: got no frame %d send %d\n",ret,ret1);
|
// printf("codec: got no frame %d send %d\n",ret,ret1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (!CuvidTestSurfaces()) {
|
if (!CuvidTestSurfaces()) {
|
||||||
usleep(1000);
|
usleep(1000);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// consumed = 1;
|
// consumed = 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!consumed) {
|
if (!consumed) {
|
||||||
@ -694,9 +693,9 @@ static char CodecAudioDrift; ///< flag: enable audio-drift correction
|
|||||||
static const int CodecAudioDrift = 0;
|
static const int CodecAudioDrift = 0;
|
||||||
#endif
|
#endif
|
||||||
#ifdef USE_PASSTHROUGH
|
#ifdef USE_PASSTHROUGH
|
||||||
///
|
///
|
||||||
/// Pass-through flags: CodecPCM, CodecAC3, CodecEAC3, ...
|
/// Pass-through flags: CodecPCM, CodecAC3, CodecEAC3, ...
|
||||||
///
|
///
|
||||||
static char CodecPassthrough;
|
static char CodecPassthrough;
|
||||||
#else
|
#else
|
||||||
static const int CodecPassthrough = 0;
|
static const int CodecPassthrough = 0;
|
||||||
@ -745,7 +744,7 @@ void CodecAudioOpen(AudioDecoder * audio_decoder, int codec_id)
|
|||||||
|
|
||||||
Debug(3, "codec: using audio codec ID %#06x (%s)\n", codec_id, avcodec_get_name(codec_id));
|
Debug(3, "codec: using audio codec ID %#06x (%s)\n", codec_id, avcodec_get_name(codec_id));
|
||||||
if (!(audio_codec = avcodec_find_decoder(codec_id))) {
|
if (!(audio_codec = avcodec_find_decoder(codec_id))) {
|
||||||
// if (!(audio_codec = avcodec_find_decoder(codec_id))) {
|
// if (!(audio_codec = avcodec_find_decoder(codec_id))) {
|
||||||
Fatal(_("codec: codec ID %#06x not found\n"), codec_id);
|
Fatal(_("codec: codec ID %#06x not found\n"), codec_id);
|
||||||
// FIXME: errors aren't fatal
|
// FIXME: errors aren't fatal
|
||||||
}
|
}
|
||||||
@ -765,9 +764,9 @@ void CodecAudioOpen(AudioDecoder * audio_decoder, int codec_id)
|
|||||||
|
|
||||||
av_dict = NULL;
|
av_dict = NULL;
|
||||||
// FIXME: import settings
|
// FIXME: import settings
|
||||||
//av_dict_set(&av_dict, "dmix_mode", "0", 0);
|
// av_dict_set(&av_dict, "dmix_mode", "0", 0);
|
||||||
//av_dict_set(&av_dict, "ltrt_cmixlev", "1.414", 0);
|
// av_dict_set(&av_dict, "ltrt_cmixlev", "1.414", 0);
|
||||||
//av_dict_set(&av_dict, "loro_cmixlev", "1.414", 0);
|
// av_dict_set(&av_dict, "loro_cmixlev", "1.414", 0);
|
||||||
if (avcodec_open2(audio_decoder->AudioCtx, audio_codec, &av_dict) < 0) {
|
if (avcodec_open2(audio_decoder->AudioCtx, audio_codec, &av_dict) < 0) {
|
||||||
pthread_mutex_unlock(&CodecLockMutex);
|
pthread_mutex_unlock(&CodecLockMutex);
|
||||||
Fatal(_("codec: can't open audio codec\n"));
|
Fatal(_("codec: can't open audio codec\n"));
|
||||||
@ -1364,12 +1363,12 @@ int myavcodec_decode_audio3(AVCodecContext * avctx, int16_t * samples, int *fram
|
|||||||
#if 0
|
#if 0
|
||||||
ret = avcodec_decode_audio4(avctx, frame, &got_frame, avpkt);
|
ret = avcodec_decode_audio4(avctx, frame, &got_frame, avpkt);
|
||||||
#else
|
#else
|
||||||
// SUGGESTION
|
// SUGGESTION
|
||||||
// Now that avcodec_decode_audio4 is deprecated and replaced
|
// Now that avcodec_decode_audio4 is deprecated and replaced
|
||||||
// by 2 calls (receive frame and send packet), this could be optimized
|
// by 2 calls (receive frame and send packet), this could be optimized
|
||||||
// into separate routines or separate threads.
|
// into separate routines or separate threads.
|
||||||
// Also now that it always consumes a whole buffer some code
|
// Also now that it always consumes a whole buffer some code
|
||||||
// in the caller may be able to be optimized.
|
// in the caller may be able to be optimized.
|
||||||
ret = avcodec_receive_frame(avctx, frame);
|
ret = avcodec_receive_frame(avctx, frame);
|
||||||
if (ret == 0)
|
if (ret == 0)
|
||||||
got_frame = 1;
|
got_frame = 1;
|
||||||
@ -1380,7 +1379,7 @@ int myavcodec_decode_audio3(AVCodecContext * avctx, int16_t * samples, int *fram
|
|||||||
if (ret == AVERROR(EAGAIN))
|
if (ret == AVERROR(EAGAIN))
|
||||||
ret = 0;
|
ret = 0;
|
||||||
else if (ret < 0) {
|
else if (ret < 0) {
|
||||||
// Debug(3, "codec/audio: audio decode error: %1 (%2)\n",av_make_error_string(error, sizeof(error), ret),got_frame);
|
// Debug(3, "codec/audio: audio decode error: %1 (%2)\n",av_make_error_string(error, sizeof(error), ret),got_frame);
|
||||||
return ret;
|
return ret;
|
||||||
} else
|
} else
|
||||||
ret = avpkt->size;
|
ret = avpkt->size;
|
||||||
@ -1401,7 +1400,7 @@ int myavcodec_decode_audio3(AVCodecContext * avctx, int16_t * samples, int *fram
|
|||||||
samples = (char *)samples + data_size;
|
samples = (char *)samples + data_size;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
//Debug(3,"data_size %d nb_samples %d sample_fmt %d channels %d planar %d\n",data_size,frame->nb_samples,avctx->sample_fmt,avctx->channels,planar);
|
// Debug(3,"data_size %d nb_samples %d sample_fmt %d channels %d planar %d\n",data_size,frame->nb_samples,avctx->sample_fmt,avctx->channels,planar);
|
||||||
*frame_size_ptr = data_size * avctx->channels * frame->nb_samples;
|
*frame_size_ptr = data_size * avctx->channels * frame->nb_samples;
|
||||||
} else {
|
} else {
|
||||||
*frame_size_ptr = 0;
|
*frame_size_ptr = 0;
|
||||||
|
50
codec.h
50
codec.h
@ -60,10 +60,10 @@ struct _video_decoder_
|
|||||||
int GetFormatDone; ///< flag get format called!
|
int GetFormatDone; ///< flag get format called!
|
||||||
AVCodec *VideoCodec; ///< video codec
|
AVCodec *VideoCodec; ///< video codec
|
||||||
AVCodecContext *VideoCtx; ///< video codec context
|
AVCodecContext *VideoCtx; ///< video codec context
|
||||||
//#ifdef FFMPEG_WORKAROUND_ARTIFACTS
|
// #ifdef FFMPEG_WORKAROUND_ARTIFACTS
|
||||||
int FirstKeyFrame; ///< flag first frame
|
int FirstKeyFrame; ///< flag first frame
|
||||||
//#endif
|
// #endif
|
||||||
// AVFrame *Frame; ///< decoded video frame
|
// AVFrame *Frame; ///< decoded video frame
|
||||||
|
|
||||||
int filter; // flag for deint filter
|
int filter; // flag for deint filter
|
||||||
|
|
||||||
@ -94,82 +94,82 @@ struct _video_decoder_
|
|||||||
// Typedefs
|
// Typedefs
|
||||||
//----------------------------------------------------------------------------
|
//----------------------------------------------------------------------------
|
||||||
|
|
||||||
/// Video decoder typedef.
|
/// Video decoder typedef.
|
||||||
typedef struct _video_decoder_ VideoDecoder;
|
typedef struct _video_decoder_ VideoDecoder;
|
||||||
|
|
||||||
/// Audio decoder typedef.
|
/// Audio decoder typedef.
|
||||||
typedef struct _audio_decoder_ AudioDecoder;
|
typedef struct _audio_decoder_ AudioDecoder;
|
||||||
|
|
||||||
//----------------------------------------------------------------------------
|
//----------------------------------------------------------------------------
|
||||||
// Variables
|
// Variables
|
||||||
//----------------------------------------------------------------------------
|
//----------------------------------------------------------------------------
|
||||||
|
|
||||||
/// x11 display name
|
/// x11 display name
|
||||||
extern const char *X11DisplayName;
|
extern const char *X11DisplayName;
|
||||||
|
|
||||||
/// HW device context from video module
|
/// HW device context from video module
|
||||||
extern AVBufferRef *HwDeviceContext;
|
extern AVBufferRef *HwDeviceContext;
|
||||||
|
|
||||||
//----------------------------------------------------------------------------
|
//----------------------------------------------------------------------------
|
||||||
// Variables
|
// Variables
|
||||||
//----------------------------------------------------------------------------
|
//----------------------------------------------------------------------------
|
||||||
|
|
||||||
/// Flag prefer fast xhannel switch
|
/// Flag prefer fast xhannel switch
|
||||||
extern char CodecUsePossibleDefectFrames;
|
extern char CodecUsePossibleDefectFrames;
|
||||||
|
|
||||||
//----------------------------------------------------------------------------
|
//----------------------------------------------------------------------------
|
||||||
// Prototypes
|
// Prototypes
|
||||||
//----------------------------------------------------------------------------
|
//----------------------------------------------------------------------------
|
||||||
|
|
||||||
/// Allocate a new video decoder context.
|
/// Allocate a new video decoder context.
|
||||||
extern VideoDecoder *CodecVideoNewDecoder(VideoHwDecoder *);
|
extern VideoDecoder *CodecVideoNewDecoder(VideoHwDecoder *);
|
||||||
|
|
||||||
/// Deallocate a video decoder context.
|
/// Deallocate a video decoder context.
|
||||||
extern void CodecVideoDelDecoder(VideoDecoder *);
|
extern void CodecVideoDelDecoder(VideoDecoder *);
|
||||||
|
|
||||||
/// Open video codec.
|
/// Open video codec.
|
||||||
extern void CodecVideoOpen(VideoDecoder *, int);
|
extern void CodecVideoOpen(VideoDecoder *, int);
|
||||||
|
|
||||||
/// Close video codec.
|
/// Close video codec.
|
||||||
extern void CodecVideoClose(VideoDecoder *);
|
extern void CodecVideoClose(VideoDecoder *);
|
||||||
|
|
||||||
/// Decode a video packet.
|
/// Decode a video packet.
|
||||||
extern void CodecVideoDecode(VideoDecoder *, const AVPacket *);
|
extern void CodecVideoDecode(VideoDecoder *, const AVPacket *);
|
||||||
|
|
||||||
/// Flush video buffers.
|
/// Flush video buffers.
|
||||||
extern void CodecVideoFlushBuffers(VideoDecoder *);
|
extern void CodecVideoFlushBuffers(VideoDecoder *);
|
||||||
|
|
||||||
/// Allocate a new audio decoder context.
|
/// Allocate a new audio decoder context.
|
||||||
extern AudioDecoder *CodecAudioNewDecoder(void);
|
extern AudioDecoder *CodecAudioNewDecoder(void);
|
||||||
|
|
||||||
/// Deallocate an audio decoder context.
|
/// Deallocate an audio decoder context.
|
||||||
extern void CodecAudioDelDecoder(AudioDecoder *);
|
extern void CodecAudioDelDecoder(AudioDecoder *);
|
||||||
|
|
||||||
/// Open audio codec.
|
/// Open audio codec.
|
||||||
extern void CodecAudioOpen(AudioDecoder *, int);
|
extern void CodecAudioOpen(AudioDecoder *, int);
|
||||||
|
|
||||||
/// Close audio codec.
|
/// Close audio codec.
|
||||||
extern void CodecAudioClose(AudioDecoder *);
|
extern void CodecAudioClose(AudioDecoder *);
|
||||||
|
|
||||||
/// Set audio drift correction.
|
/// Set audio drift correction.
|
||||||
extern void CodecSetAudioDrift(int);
|
extern void CodecSetAudioDrift(int);
|
||||||
|
|
||||||
/// Set audio pass-through.
|
/// Set audio pass-through.
|
||||||
extern void CodecSetAudioPassthrough(int);
|
extern void CodecSetAudioPassthrough(int);
|
||||||
|
|
||||||
/// Set audio downmix.
|
/// Set audio downmix.
|
||||||
extern void CodecSetAudioDownmix(int);
|
extern void CodecSetAudioDownmix(int);
|
||||||
|
|
||||||
/// Decode an audio packet.
|
/// Decode an audio packet.
|
||||||
extern void CodecAudioDecode(AudioDecoder *, const AVPacket *);
|
extern void CodecAudioDecode(AudioDecoder *, const AVPacket *);
|
||||||
|
|
||||||
/// Flush audio buffers.
|
/// Flush audio buffers.
|
||||||
extern void CodecAudioFlushBuffers(AudioDecoder *);
|
extern void CodecAudioFlushBuffers(AudioDecoder *);
|
||||||
|
|
||||||
/// Setup and initialize codec module.
|
/// Setup and initialize codec module.
|
||||||
extern void CodecInit(void);
|
extern void CodecInit(void);
|
||||||
|
|
||||||
/// Cleanup and exit codec module.
|
/// Cleanup and exit codec module.
|
||||||
extern void CodecExit(void);
|
extern void CodecExit(void);
|
||||||
|
|
||||||
/// @}
|
/// @}
|
||||||
|
2
common.h
2
common.h
@ -86,7 +86,7 @@ void mpgl_load_functions2(GL * gl, void *(*get_fn)(void *ctx, const char *n), vo
|
|||||||
|
|
||||||
typedef void (GLAPIENTRY * MP_GLDEBUGPROC) (GLenum, GLenum, GLuint, GLenum, GLsizei, const GLchar *, const void *);
|
typedef void (GLAPIENTRY * MP_GLDEBUGPROC) (GLenum, GLenum, GLuint, GLenum, GLsizei, const GLchar *, const void *);
|
||||||
|
|
||||||
//function pointers loaded from the OpenGL library
|
// function pointers loaded from the OpenGL library
|
||||||
struct GL
|
struct GL
|
||||||
{
|
{
|
||||||
int version; // MPGL_VER() mangled (e.g. 210 for 2.1)
|
int version; // MPGL_VER() mangled (e.g. 210 for 2.1)
|
||||||
|
292
openglosd.cpp
292
openglosd.cpp
@ -23,7 +23,7 @@ extern "C" void OSD_release_context();
|
|||||||
****************************************************************************************/
|
****************************************************************************************/
|
||||||
|
|
||||||
#ifdef CUVID
|
#ifdef CUVID
|
||||||
const char *rectVertexShader =
|
const char *rectVertexShader =
|
||||||
"#version 330 core \n\
|
"#version 330 core \n\
|
||||||
\
|
\
|
||||||
layout (location = 0) in vec2 position; \
|
layout (location = 0) in vec2 position; \
|
||||||
@ -38,7 +38,7 @@ void main() \
|
|||||||
} \
|
} \
|
||||||
";
|
";
|
||||||
|
|
||||||
const char *rectFragmentShader =
|
const char *rectFragmentShader =
|
||||||
"#version 330 core \n\
|
"#version 330 core \n\
|
||||||
\
|
\
|
||||||
in vec4 rectCol; \
|
in vec4 rectCol; \
|
||||||
@ -50,7 +50,7 @@ void main() \
|
|||||||
} \
|
} \
|
||||||
";
|
";
|
||||||
|
|
||||||
const char *textureVertexShader =
|
const char *textureVertexShader =
|
||||||
"#version 330 core \n\
|
"#version 330 core \n\
|
||||||
\
|
\
|
||||||
layout (location = 0) in vec2 position; \
|
layout (location = 0) in vec2 position; \
|
||||||
@ -70,7 +70,7 @@ void main() \
|
|||||||
} \
|
} \
|
||||||
";
|
";
|
||||||
|
|
||||||
const char *textureFragmentShader =
|
const char *textureFragmentShader =
|
||||||
"#version 330 core \n\
|
"#version 330 core \n\
|
||||||
in vec2 TexCoords; \
|
in vec2 TexCoords; \
|
||||||
in vec4 alphaValue; \
|
in vec4 alphaValue; \
|
||||||
@ -84,7 +84,7 @@ void main() \
|
|||||||
} \
|
} \
|
||||||
";
|
";
|
||||||
|
|
||||||
const char *textVertexShader =
|
const char *textVertexShader =
|
||||||
"#version 330 core \n\
|
"#version 330 core \n\
|
||||||
\
|
\
|
||||||
layout (location = 0) in vec2 position; \
|
layout (location = 0) in vec2 position; \
|
||||||
@ -104,7 +104,7 @@ void main() \
|
|||||||
} \
|
} \
|
||||||
";
|
";
|
||||||
|
|
||||||
const char *textFragmentShader =
|
const char *textFragmentShader =
|
||||||
"#version 330 core \n\
|
"#version 330 core \n\
|
||||||
in vec2 TexCoords; \
|
in vec2 TexCoords; \
|
||||||
in vec4 textColor; \
|
in vec4 textColor; \
|
||||||
@ -122,7 +122,7 @@ void main() \
|
|||||||
|
|
||||||
#else
|
#else
|
||||||
|
|
||||||
const char *rectVertexShader =
|
const char *rectVertexShader =
|
||||||
"\n \
|
"\n \
|
||||||
\
|
\
|
||||||
layout (location = 0) in vec2 position; \
|
layout (location = 0) in vec2 position; \
|
||||||
@ -137,7 +137,7 @@ void main() \
|
|||||||
} \
|
} \
|
||||||
";
|
";
|
||||||
|
|
||||||
const char *rectFragmentShader =
|
const char *rectFragmentShader =
|
||||||
"\n \
|
"\n \
|
||||||
\
|
\
|
||||||
precision mediump float; \
|
precision mediump float; \
|
||||||
@ -150,7 +150,7 @@ void main() \
|
|||||||
} \
|
} \
|
||||||
";
|
";
|
||||||
|
|
||||||
const char *textureVertexShader =
|
const char *textureVertexShader =
|
||||||
"\n \
|
"\n \
|
||||||
\
|
\
|
||||||
layout (location = 0) in vec2 position; \
|
layout (location = 0) in vec2 position; \
|
||||||
@ -170,7 +170,7 @@ void main() \
|
|||||||
} \
|
} \
|
||||||
";
|
";
|
||||||
|
|
||||||
const char *textureFragmentShader =
|
const char *textureFragmentShader =
|
||||||
"\n \
|
"\n \
|
||||||
precision mediump float; \
|
precision mediump float; \
|
||||||
in vec2 TexCoords; \
|
in vec2 TexCoords; \
|
||||||
@ -185,7 +185,7 @@ void main() \
|
|||||||
} \
|
} \
|
||||||
";
|
";
|
||||||
|
|
||||||
const char *textVertexShader =
|
const char *textVertexShader =
|
||||||
"\n \
|
"\n \
|
||||||
\
|
\
|
||||||
layout (location = 0) in vec2 position; \
|
layout (location = 0) in vec2 position; \
|
||||||
@ -205,7 +205,7 @@ void main() \
|
|||||||
} \
|
} \
|
||||||
";
|
";
|
||||||
|
|
||||||
const char *textFragmentShader =
|
const char *textFragmentShader =
|
||||||
"\n \
|
"\n \
|
||||||
precision mediump float; \
|
precision mediump float; \
|
||||||
in vec2 TexCoords; \
|
in vec2 TexCoords; \
|
||||||
@ -222,7 +222,7 @@ void main() \
|
|||||||
} \
|
} \
|
||||||
";
|
";
|
||||||
#endif
|
#endif
|
||||||
static cShader *Shaders[stCount];
|
static cShader *Shaders[stCount];
|
||||||
|
|
||||||
void cShader::Use(void) {
|
void cShader::Use(void) {
|
||||||
glUseProgram(id);
|
glUseProgram(id);
|
||||||
@ -294,14 +294,14 @@ bool cShader::Compile(const char *vertexCode, const char *fragmentCode) {
|
|||||||
sVertex = glCreateShader(GL_VERTEX_SHADER);
|
sVertex = glCreateShader(GL_VERTEX_SHADER);
|
||||||
glShaderSource(sVertex, 1, &vertexCode, NULL);
|
glShaderSource(sVertex, 1, &vertexCode, NULL);
|
||||||
glCompileShader(sVertex);
|
glCompileShader(sVertex);
|
||||||
// esyslog("[softhddev]:SHADER:VERTEX %s\n",vertexCode);
|
// esyslog("[softhddev]:SHADER:VERTEX %s\n",vertexCode);
|
||||||
if (!CheckCompileErrors(sVertex))
|
if (!CheckCompileErrors(sVertex))
|
||||||
return false;
|
return false;
|
||||||
// Fragment Shader
|
// Fragment Shader
|
||||||
sFragment = glCreateShader(GL_FRAGMENT_SHADER);
|
sFragment = glCreateShader(GL_FRAGMENT_SHADER);
|
||||||
glShaderSource(sFragment, 1, &fragmentCode, NULL);
|
glShaderSource(sFragment, 1, &fragmentCode, NULL);
|
||||||
glCompileShader(sFragment);
|
glCompileShader(sFragment);
|
||||||
// esyslog("[softhddev]:SHADER:FRAGMENT %s\n",fragmentCode);
|
// esyslog("[softhddev]:SHADER:FRAGMENT %s\n",fragmentCode);
|
||||||
if (!CheckCompileErrors(sFragment))
|
if (!CheckCompileErrors(sFragment))
|
||||||
return false;
|
return false;
|
||||||
// link Program
|
// link Program
|
||||||
@ -375,8 +375,8 @@ void cOglGlyph::BindTexture(void) {
|
|||||||
void cOglGlyph::LoadTexture(FT_BitmapGlyph ftGlyph) {
|
void cOglGlyph::LoadTexture(FT_BitmapGlyph ftGlyph) {
|
||||||
// Disable byte-alignment restriction
|
// Disable byte-alignment restriction
|
||||||
#ifdef VAAPI
|
#ifdef VAAPI
|
||||||
OSD_release_context();
|
OSD_release_context();
|
||||||
OSD_get_shared_context();
|
OSD_get_shared_context();
|
||||||
#endif
|
#endif
|
||||||
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
|
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
|
||||||
glGenTextures(1, &texture);
|
glGenTextures(1, &texture);
|
||||||
@ -400,10 +400,10 @@ void cOglGlyph::LoadTexture(FT_BitmapGlyph ftGlyph) {
|
|||||||
glBindTexture(GL_TEXTURE_2D, 0);
|
glBindTexture(GL_TEXTURE_2D, 0);
|
||||||
glPixelStorei(GL_UNPACK_ALIGNMENT, 4);
|
glPixelStorei(GL_UNPACK_ALIGNMENT, 4);
|
||||||
#ifdef VAAPI
|
#ifdef VAAPI
|
||||||
OSD_release_context();
|
OSD_release_context();
|
||||||
OSD_get_context();
|
OSD_get_context();
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -445,7 +445,7 @@ cOglFont *cOglFont::Get(const char *name, int charHeight) {
|
|||||||
fonts->Add(font);
|
fonts->Add(font);
|
||||||
return font;
|
return font;
|
||||||
}
|
}
|
||||||
|
|
||||||
void cOglFont::Init(void) {
|
void cOglFont::Init(void) {
|
||||||
fonts = new cList<cOglFont>;
|
fonts = new cList<cOglFont>;
|
||||||
if (FT_Init_FreeType(&ftLib))
|
if (FT_Init_FreeType(&ftLib))
|
||||||
@ -489,7 +489,7 @@ cOglGlyph* cOglFont::Glyph(uint charCode) const {
|
|||||||
error = FT_Stroker_New( ftLib, &stroker );
|
error = FT_Stroker_New( ftLib, &stroker );
|
||||||
if (error) {
|
if (error) {
|
||||||
esyslog("[softhddev]FT_Stroker_New FT_Error (0x%02x) : %s\n", FT_Errors[error].code, FT_Errors[error].message);
|
esyslog("[softhddev]FT_Stroker_New FT_Error (0x%02x) : %s\n", FT_Errors[error].code, FT_Errors[error].message);
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
float outlineWidth = 0.25f;
|
float outlineWidth = 0.25f;
|
||||||
FT_Stroker_Set(stroker,
|
FT_Stroker_Set(stroker,
|
||||||
@ -498,11 +498,11 @@ cOglGlyph* cOglFont::Glyph(uint charCode) const {
|
|||||||
FT_STROKER_LINEJOIN_ROUND,
|
FT_STROKER_LINEJOIN_ROUND,
|
||||||
0);
|
0);
|
||||||
|
|
||||||
|
|
||||||
error = FT_Get_Glyph(face->glyph, &ftGlyph);
|
error = FT_Get_Glyph(face->glyph, &ftGlyph);
|
||||||
if (error) {
|
if (error) {
|
||||||
esyslog("[softhddev]FT_Get_Glyph FT_Error (0x%02x) : %s\n", FT_Errors[error].code, FT_Errors[error].message);
|
esyslog("[softhddev]FT_Get_Glyph FT_Error (0x%02x) : %s\n", FT_Errors[error].code, FT_Errors[error].message);
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
error = FT_Glyph_StrokeBorder( &ftGlyph, stroker, 0, 1 );
|
error = FT_Glyph_StrokeBorder( &ftGlyph, stroker, 0, 1 );
|
||||||
@ -515,9 +515,9 @@ cOglGlyph* cOglFont::Glyph(uint charCode) const {
|
|||||||
error = FT_Glyph_To_Bitmap( &ftGlyph, FT_RENDER_MODE_NORMAL, 0, 1);
|
error = FT_Glyph_To_Bitmap( &ftGlyph, FT_RENDER_MODE_NORMAL, 0, 1);
|
||||||
if (error) {
|
if (error) {
|
||||||
esyslog("[softhddev]FT_Glyph_To_Bitmap FT_Error (0x%02x) : %s\n", FT_Errors[error].code, FT_Errors[error].message);
|
esyslog("[softhddev]FT_Glyph_To_Bitmap FT_Error (0x%02x) : %s\n", FT_Errors[error].code, FT_Errors[error].message);
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
cOglGlyph *Glyph = new cOglGlyph(charCode, (FT_BitmapGlyph)ftGlyph);
|
cOglGlyph *Glyph = new cOglGlyph(charCode, (FT_BitmapGlyph)ftGlyph);
|
||||||
glyphCache.Add(Glyph);
|
glyphCache.Add(Glyph);
|
||||||
FT_Done_Glyph(ftGlyph);
|
FT_Done_Glyph(ftGlyph);
|
||||||
@ -559,18 +559,18 @@ cOglFb::cOglFb(GLint width, GLint height, GLint viewPortWidth, GLint viewPortHei
|
|||||||
}
|
}
|
||||||
|
|
||||||
cOglFb::~cOglFb(void) {
|
cOglFb::~cOglFb(void) {
|
||||||
if (texture)
|
if (texture)
|
||||||
glDeleteTextures(1, &texture);
|
glDeleteTextures(1, &texture);
|
||||||
if (fb)
|
if (fb)
|
||||||
glDeleteFramebuffers(1, &fb);
|
glDeleteFramebuffers(1, &fb);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
bool cOglFb::Init(void) {
|
bool cOglFb::Init(void) {
|
||||||
initiated = true;
|
initiated = true;
|
||||||
#ifdef VAAPI
|
#ifdef VAAPI
|
||||||
OSD_release_context();
|
OSD_release_context();
|
||||||
OSD_get_shared_context();
|
OSD_get_shared_context();
|
||||||
#endif
|
#endif
|
||||||
glGenTextures(1, &texture);
|
glGenTextures(1, &texture);
|
||||||
glBindTexture(GL_TEXTURE_2D, texture);
|
glBindTexture(GL_TEXTURE_2D, texture);
|
||||||
@ -582,20 +582,20 @@ bool cOglFb::Init(void) {
|
|||||||
|
|
||||||
glGenFramebuffers(1, &fb);
|
glGenFramebuffers(1, &fb);
|
||||||
glBindFramebuffer(GL_FRAMEBUFFER, fb);
|
glBindFramebuffer(GL_FRAMEBUFFER, fb);
|
||||||
|
|
||||||
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, texture, 0);
|
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, texture, 0);
|
||||||
|
|
||||||
if(glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) {
|
if(glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) {
|
||||||
esyslog("[softhddev]ERROR: %d Framebuffer is not complete!\n",__LINE__);
|
esyslog("[softhddev]ERROR: %d Framebuffer is not complete!\n",__LINE__);
|
||||||
#ifdef VAAPI
|
#ifdef VAAPI
|
||||||
OSD_release_context();
|
OSD_release_context();
|
||||||
OSD_get_context();
|
OSD_get_context();
|
||||||
#endif
|
#endif
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
#ifdef VAAPI
|
#ifdef VAAPI
|
||||||
OSD_release_context();
|
OSD_release_context();
|
||||||
OSD_get_context();
|
OSD_get_context();
|
||||||
#endif
|
#endif
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
@ -616,7 +616,7 @@ void cOglFb::BindWrite(void) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void cOglFb::Unbind(void) {
|
void cOglFb::Unbind(void) {
|
||||||
glBindFramebuffer(GL_FRAMEBUFFER, 0);
|
glBindFramebuffer(GL_FRAMEBUFFER, 0);
|
||||||
glBindTexture(GL_TEXTURE_2D, 0);
|
glBindTexture(GL_TEXTURE_2D, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -636,29 +636,29 @@ void cOglFb::Blit(GLint destX1, GLint destY1, GLint destX2, GLint destY2) {
|
|||||||
* cOglOutputFb
|
* cOglOutputFb
|
||||||
****************************************************************************************/
|
****************************************************************************************/
|
||||||
cOglOutputFb::cOglOutputFb(GLint width, GLint height) : cOglFb(width, height, width, height) {
|
cOglOutputFb::cOglOutputFb(GLint width, GLint height) : cOglFb(width, height, width, height) {
|
||||||
// surface = 0;
|
// surface = 0;
|
||||||
initiated = false;
|
initiated = false;
|
||||||
fb = 0;
|
fb = 0;
|
||||||
texture = 0;
|
texture = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
cOglOutputFb::~cOglOutputFb(void) {
|
cOglOutputFb::~cOglOutputFb(void) {
|
||||||
// glVDPAUUnregisterSurfaceNV(surface);
|
// glVDPAUUnregisterSurfaceNV(surface);
|
||||||
glDeleteTextures(1, &texture);
|
glDeleteTextures(1, &texture);
|
||||||
glDeleteFramebuffers(1, &fb);
|
glDeleteFramebuffers(1, &fb);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool cOglOutputFb::Init(void) {
|
bool cOglOutputFb::Init(void) {
|
||||||
initiated = true;
|
initiated = true;
|
||||||
|
|
||||||
glGenTextures(1, &texture);
|
glGenTextures(1, &texture);
|
||||||
glBindTexture(GL_TEXTURE_2D, texture);
|
glBindTexture(GL_TEXTURE_2D, texture);
|
||||||
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL);
|
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL);
|
||||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_BORDER);
|
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_BORDER);
|
||||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_BORDER);
|
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_BORDER);
|
||||||
|
|
||||||
glGenFramebuffers(1, &fb);
|
glGenFramebuffers(1, &fb);
|
||||||
glBindFramebuffer(GL_FRAMEBUFFER, fb);
|
glBindFramebuffer(GL_FRAMEBUFFER, fb);
|
||||||
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, texture, 0);
|
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, texture, 0);
|
||||||
@ -670,7 +670,7 @@ bool cOglOutputFb::Init(void) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void cOglOutputFb::BindWrite(void) {
|
void cOglOutputFb::BindWrite(void) {
|
||||||
if (!initiated)
|
if (!initiated)
|
||||||
Init();
|
Init();
|
||||||
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, fb);
|
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, fb);
|
||||||
}
|
}
|
||||||
@ -682,7 +682,7 @@ void cOglOutputFb::Unbind(void) {
|
|||||||
/****************************************************************************************
|
/****************************************************************************************
|
||||||
* cOglVb
|
* cOglVb
|
||||||
****************************************************************************************/
|
****************************************************************************************/
|
||||||
static cOglVb *VertexBuffers[vbCount];
|
static cOglVb *VertexBuffers[vbCount];
|
||||||
|
|
||||||
cOglVb::cOglVb(int type) {
|
cOglVb::cOglVb(int type) {
|
||||||
this->type = (eVertexBufferType)type;
|
this->type = (eVertexBufferType)type;
|
||||||
@ -700,35 +700,35 @@ cOglVb::~cOglVb(void) {
|
|||||||
bool cOglVb::Init(void) {
|
bool cOglVb::Init(void) {
|
||||||
|
|
||||||
if (type == vbTexture) {
|
if (type == vbTexture) {
|
||||||
//Texture VBO definition
|
// Texture VBO definition
|
||||||
sizeVertex1 = 2;
|
sizeVertex1 = 2;
|
||||||
sizeVertex2 = 2;
|
sizeVertex2 = 2;
|
||||||
numVertices = 6;
|
numVertices = 6;
|
||||||
drawMode = GL_TRIANGLES;
|
drawMode = GL_TRIANGLES;
|
||||||
shader = stTexture;
|
shader = stTexture;
|
||||||
} else if (type == vbRect) {
|
} else if (type == vbRect) {
|
||||||
//Rectangle VBO definition
|
// Rectangle VBO definition
|
||||||
sizeVertex1 = 2;
|
sizeVertex1 = 2;
|
||||||
sizeVertex2 = 0;
|
sizeVertex2 = 0;
|
||||||
numVertices = 4;
|
numVertices = 4;
|
||||||
drawMode = GL_TRIANGLE_FAN;
|
drawMode = GL_TRIANGLE_FAN;
|
||||||
shader = stRect;
|
shader = stRect;
|
||||||
} else if (type == vbEllipse) {
|
} else if (type == vbEllipse) {
|
||||||
//Ellipse VBO definition
|
// Ellipse VBO definition
|
||||||
sizeVertex1 = 2;
|
sizeVertex1 = 2;
|
||||||
sizeVertex2 = 0;
|
sizeVertex2 = 0;
|
||||||
numVertices = 182;
|
numVertices = 182;
|
||||||
drawMode = GL_TRIANGLE_FAN;
|
drawMode = GL_TRIANGLE_FAN;
|
||||||
shader = stRect;
|
shader = stRect;
|
||||||
} else if (type == vbSlope) {
|
} else if (type == vbSlope) {
|
||||||
//Slope VBO definition
|
// Slope VBO definition
|
||||||
sizeVertex1 = 2;
|
sizeVertex1 = 2;
|
||||||
sizeVertex2 = 0;
|
sizeVertex2 = 0;
|
||||||
numVertices = 102;
|
numVertices = 102;
|
||||||
drawMode = GL_TRIANGLE_FAN;
|
drawMode = GL_TRIANGLE_FAN;
|
||||||
shader = stRect;
|
shader = stRect;
|
||||||
} else if (type == vbText) {
|
} else if (type == vbText) {
|
||||||
//Text VBO definition
|
// Text VBO definition
|
||||||
sizeVertex1 = 2;
|
sizeVertex1 = 2;
|
||||||
sizeVertex2 = 2;
|
sizeVertex2 = 2;
|
||||||
numVertices = 6;
|
numVertices = 6;
|
||||||
@ -746,9 +746,9 @@ bool cOglVb::Init(void) {
|
|||||||
glVertexAttribPointer(0, sizeVertex1, GL_FLOAT, GL_FALSE, (sizeVertex1 + sizeVertex2) * sizeof(GLfloat), (GLvoid*)0);
|
glVertexAttribPointer(0, sizeVertex1, GL_FLOAT, GL_FALSE, (sizeVertex1 + sizeVertex2) * sizeof(GLfloat), (GLvoid*)0);
|
||||||
if (sizeVertex2 > 0) {
|
if (sizeVertex2 > 0) {
|
||||||
glEnableVertexAttribArray(1);
|
glEnableVertexAttribArray(1);
|
||||||
glVertexAttribPointer(1, sizeVertex2, GL_FLOAT, GL_FALSE, (sizeVertex1 + sizeVertex2) * sizeof(GLfloat), (GLvoid*)(sizeVertex1 * sizeof(GLfloat)));
|
glVertexAttribPointer(1, sizeVertex2, GL_FLOAT, GL_FALSE, (sizeVertex1 + sizeVertex2) * sizeof(GLfloat), (GLvoid*)(sizeVertex1 * sizeof(GLfloat)));
|
||||||
}
|
}
|
||||||
|
|
||||||
glBindBuffer(GL_ARRAY_BUFFER, 0);
|
glBindBuffer(GL_ARRAY_BUFFER, 0);
|
||||||
glBindVertexArray(0);
|
glBindVertexArray(0);
|
||||||
|
|
||||||
@ -803,7 +803,7 @@ void cOglVb::DrawArrays(int count) {
|
|||||||
if (count == 0)
|
if (count == 0)
|
||||||
count = numVertices;
|
count = numVertices;
|
||||||
glDrawArrays(drawMode, 0, count);
|
glDrawArrays(drawMode, 0, count);
|
||||||
glFlush();
|
glFlush();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -839,8 +839,8 @@ cOglCmdDeleteFb::cOglCmdDeleteFb(cOglFb *fb) : cOglCmd(fb) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
bool cOglCmdDeleteFb::Execute(void) {
|
bool cOglCmdDeleteFb::Execute(void) {
|
||||||
if (fb)
|
if (fb)
|
||||||
delete fb;
|
delete fb;
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -889,7 +889,7 @@ bool cOglCmdRenderFbToBufferFb::Execute(void) {
|
|||||||
buffer->Bind();
|
buffer->Bind();
|
||||||
if (!fb->BindTexture())
|
if (!fb->BindTexture())
|
||||||
return false;
|
return false;
|
||||||
VertexBuffers[vbTexture]->Bind();
|
VertexBuffers[vbTexture]->Bind();
|
||||||
VertexBuffers[vbTexture]->SetVertexData(quadVertices);
|
VertexBuffers[vbTexture]->SetVertexData(quadVertices);
|
||||||
VertexBuffers[vbTexture]->DrawArrays();
|
VertexBuffers[vbTexture]->DrawArrays();
|
||||||
VertexBuffers[vbTexture]->Unbind();
|
VertexBuffers[vbTexture]->Unbind();
|
||||||
@ -906,33 +906,33 @@ cOglCmdCopyBufferToOutputFb::cOglCmdCopyBufferToOutputFb(cOglFb *fb, cOglOutputF
|
|||||||
}
|
}
|
||||||
|
|
||||||
#ifdef PLACEBO
|
#ifdef PLACEBO
|
||||||
//extern "C" {
|
//extern "C" {
|
||||||
extern unsigned char *posd;
|
extern unsigned char *posd;
|
||||||
//}
|
//}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
bool cOglCmdCopyBufferToOutputFb::Execute(void) {
|
bool cOglCmdCopyBufferToOutputFb::Execute(void) {
|
||||||
int i;
|
int i;
|
||||||
pthread_mutex_lock(&OSDMutex);
|
pthread_mutex_lock(&OSDMutex);
|
||||||
fb->BindRead();
|
fb->BindRead();
|
||||||
oFb->BindWrite();
|
oFb->BindWrite();
|
||||||
glClear(GL_COLOR_BUFFER_BIT);
|
glClear(GL_COLOR_BUFFER_BIT);
|
||||||
|
|
||||||
#ifdef PLACEBO
|
#ifdef PLACEBO
|
||||||
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
|
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
|
||||||
glPixelStorei(GL_PACK_ALIGNMENT, 1);
|
glPixelStorei(GL_PACK_ALIGNMENT, 1);
|
||||||
if (posd)
|
if (posd)
|
||||||
glReadPixels(0, 0 ,fb->Width(), fb->Height(),GL_BGRA,GL_UNSIGNED_BYTE,posd);
|
glReadPixels(0, 0 ,fb->Width(), fb->Height(),GL_BGRA,GL_UNSIGNED_BYTE,posd);
|
||||||
#else
|
#else
|
||||||
fb->Blit(x, y + fb->Height(), x + fb->Width(), y);
|
fb->Blit(x, y + fb->Height(), x + fb->Width(), y);
|
||||||
glFlush();
|
glFlush();
|
||||||
#endif
|
#endif
|
||||||
ActivateOsd(oFb->texture,x, y, fb->Width() ,fb->Height());
|
ActivateOsd(oFb->texture,x, y, fb->Width() ,fb->Height());
|
||||||
|
|
||||||
oFb->Unbind();
|
oFb->Unbind();
|
||||||
pthread_mutex_unlock(&OSDMutex);
|
pthread_mutex_unlock(&OSDMutex);
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
@ -946,7 +946,7 @@ bool cOglCmdFill::Execute(void) {
|
|||||||
glm::vec4 col;
|
glm::vec4 col;
|
||||||
ConvertColor(color, col);
|
ConvertColor(color, col);
|
||||||
fb->Bind();
|
fb->Bind();
|
||||||
glClearColor(col.r, col.g, col.b, col.a);
|
glClearColor(col.r, col.g, col.b, col.a);
|
||||||
glClear(GL_COLOR_BUFFER_BIT);
|
glClear(GL_COLOR_BUFFER_BIT);
|
||||||
fb->Unbind();
|
fb->Unbind();
|
||||||
return true;
|
return true;
|
||||||
@ -977,7 +977,7 @@ bool cOglCmdDrawRectangle::Execute(void) {
|
|||||||
VertexBuffers[vbRect]->ActivateShader();
|
VertexBuffers[vbRect]->ActivateShader();
|
||||||
VertexBuffers[vbRect]->SetShaderColor(color);
|
VertexBuffers[vbRect]->SetShaderColor(color);
|
||||||
VertexBuffers[vbRect]->SetShaderProjectionMatrix(fb->Width(), fb->Height());
|
VertexBuffers[vbRect]->SetShaderProjectionMatrix(fb->Width(), fb->Height());
|
||||||
|
|
||||||
fb->Bind();
|
fb->Bind();
|
||||||
VertexBuffers[vbRect]->DisableBlending();
|
VertexBuffers[vbRect]->DisableBlending();
|
||||||
VertexBuffers[vbRect]->Bind();
|
VertexBuffers[vbRect]->Bind();
|
||||||
@ -1027,7 +1027,7 @@ bool cOglCmdDrawEllipse::Execute(void) {
|
|||||||
VertexBuffers[vbEllipse]->SetShaderColor(color);
|
VertexBuffers[vbEllipse]->SetShaderColor(color);
|
||||||
VertexBuffers[vbEllipse]->SetShaderProjectionMatrix(fb->Width(), fb->Height());
|
VertexBuffers[vbEllipse]->SetShaderProjectionMatrix(fb->Width(), fb->Height());
|
||||||
|
|
||||||
//not antialiased
|
// not antialiased
|
||||||
fb->Bind();
|
fb->Bind();
|
||||||
VertexBuffers[vbEllipse]->DisableBlending();
|
VertexBuffers[vbEllipse]->DisableBlending();
|
||||||
VertexBuffers[vbEllipse]->Bind();
|
VertexBuffers[vbEllipse]->Bind();
|
||||||
@ -1111,7 +1111,7 @@ GLfloat *cOglCmdDrawEllipse::CreateVerticesQuadrant(int &numVertices) {
|
|||||||
vertices[0] = x + width;
|
vertices[0] = x + width;
|
||||||
vertices[1] = y + height;
|
vertices[1] = y + height;
|
||||||
startAngle = 270;
|
startAngle = 270;
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@ -1129,7 +1129,7 @@ GLfloat *cOglCmdDrawEllipse::CreateVerticesHalf(int &numVertices) {
|
|||||||
GLfloat radiusY = 0.0f;
|
GLfloat radiusY = 0.0f;
|
||||||
GLint transX = 0;
|
GLint transX = 0;
|
||||||
GLint transY = 0;
|
GLint transY = 0;
|
||||||
GLint startAngle = 0;
|
GLint startAngle = 0;
|
||||||
GLfloat *vertices = new GLfloat[size];
|
GLfloat *vertices = new GLfloat[size];
|
||||||
switch (quadrants) {
|
switch (quadrants) {
|
||||||
case 5:
|
case 5:
|
||||||
@ -1245,7 +1245,7 @@ bool cOglCmdDrawSlope::Execute(void) {
|
|||||||
VertexBuffers[vbSlope]->SetShaderColor(color);
|
VertexBuffers[vbSlope]->SetShaderColor(color);
|
||||||
VertexBuffers[vbSlope]->SetShaderProjectionMatrix(fb->Width(), fb->Height());
|
VertexBuffers[vbSlope]->SetShaderProjectionMatrix(fb->Width(), fb->Height());
|
||||||
|
|
||||||
//not antialiased
|
// not antialiased
|
||||||
fb->Bind();
|
fb->Bind();
|
||||||
VertexBuffers[vbSlope]->DisableBlending();
|
VertexBuffers[vbSlope]->DisableBlending();
|
||||||
VertexBuffers[vbSlope]->Bind();
|
VertexBuffers[vbSlope]->Bind();
|
||||||
@ -1260,7 +1260,7 @@ bool cOglCmdDrawSlope::Execute(void) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
//------------------ cOglCmdDrawText --------------------
|
//------------------ cOglCmdDrawText --------------------
|
||||||
cOglCmdDrawText::cOglCmdDrawText( cOglFb *fb, GLint x, GLint y, unsigned int *symbols, GLint limitX,
|
cOglCmdDrawText::cOglCmdDrawText( cOglFb *fb, GLint x, GLint y, unsigned int *symbols, GLint limitX,
|
||||||
const char *name, int fontSize, tColor colorText) : cOglCmd(fb), fontName(name) {
|
const char *name, int fontSize, tColor colorText) : cOglCmd(fb), fontName(name) {
|
||||||
this->x = x;
|
this->x = x;
|
||||||
this->y = y;
|
this->y = y;
|
||||||
@ -1318,9 +1318,9 @@ bool cOglCmdDrawText::Execute(void) {
|
|||||||
|
|
||||||
x1, y2, 0.0, 1.0, // left bottom
|
x1, y2, 0.0, 1.0, // left bottom
|
||||||
x2, y1, 1.0, 0.0, // right top
|
x2, y1, 1.0, 0.0, // right top
|
||||||
x2, y2, 1.0, 1.0 // right bottom
|
x2, y2, 1.0, 1.0 // right bottom
|
||||||
};
|
};
|
||||||
|
|
||||||
g->BindTexture();
|
g->BindTexture();
|
||||||
VertexBuffers[vbText]->SetVertexData(vertices);
|
VertexBuffers[vbText]->SetVertexData(vertices);
|
||||||
VertexBuffers[vbText]->DrawArrays();
|
VertexBuffers[vbText]->DrawArrays();
|
||||||
@ -1356,8 +1356,8 @@ cOglCmdDrawImage::~cOglCmdDrawImage(void) {
|
|||||||
bool cOglCmdDrawImage::Execute(void) {
|
bool cOglCmdDrawImage::Execute(void) {
|
||||||
GLuint texture;
|
GLuint texture;
|
||||||
#ifdef VAAPI
|
#ifdef VAAPI
|
||||||
OSD_release_context();
|
OSD_release_context();
|
||||||
OSD_get_shared_context();
|
OSD_get_shared_context();
|
||||||
#endif
|
#endif
|
||||||
glGenTextures(1, &texture);
|
glGenTextures(1, &texture);
|
||||||
glBindTexture(GL_TEXTURE_2D, texture);
|
glBindTexture(GL_TEXTURE_2D, texture);
|
||||||
@ -1378,10 +1378,10 @@ bool cOglCmdDrawImage::Execute(void) {
|
|||||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||||
glBindTexture(GL_TEXTURE_2D, 0);
|
glBindTexture(GL_TEXTURE_2D, 0);
|
||||||
#ifdef VAAPI
|
#ifdef VAAPI
|
||||||
OSD_release_context();
|
OSD_release_context();
|
||||||
OSD_get_context();
|
OSD_get_context();
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
GLfloat x1 = x; //left
|
GLfloat x1 = x; //left
|
||||||
GLfloat y1 = y; //top
|
GLfloat y1 = y; //top
|
||||||
GLfloat x2 = x + width; //right
|
GLfloat x2 = x + width; //right
|
||||||
@ -1394,7 +1394,7 @@ bool cOglCmdDrawImage::Execute(void) {
|
|||||||
|
|
||||||
x1, y2, 0.0, 1.0, // left bottom
|
x1, y2, 0.0, 1.0, // left bottom
|
||||||
x2, y1, 1.0, 0.0, // right top
|
x2, y1, 1.0, 0.0, // right top
|
||||||
x2, y2, 1.0, 1.0 // right bottom
|
x2, y2, 1.0, 1.0 // right bottom
|
||||||
};
|
};
|
||||||
|
|
||||||
VertexBuffers[vbTexture]->ActivateShader();
|
VertexBuffers[vbTexture]->ActivateShader();
|
||||||
@ -1462,7 +1462,7 @@ bool cOglCmdDrawTexture::Execute(void) {
|
|||||||
//------------------ cOglCmdStoreImage --------------------
|
//------------------ cOglCmdStoreImage --------------------
|
||||||
cOglCmdStoreImage::cOglCmdStoreImage(sOglImage *imageRef, tColor *argb) : cOglCmd(NULL) {
|
cOglCmdStoreImage::cOglCmdStoreImage(sOglImage *imageRef, tColor *argb) : cOglCmd(NULL) {
|
||||||
this->imageRef = imageRef;
|
this->imageRef = imageRef;
|
||||||
data = argb;
|
data = argb;
|
||||||
}
|
}
|
||||||
|
|
||||||
cOglCmdStoreImage::~cOglCmdStoreImage(void) {
|
cOglCmdStoreImage::~cOglCmdStoreImage(void) {
|
||||||
@ -1471,8 +1471,8 @@ cOglCmdStoreImage::~cOglCmdStoreImage(void) {
|
|||||||
|
|
||||||
bool cOglCmdStoreImage::Execute(void) {
|
bool cOglCmdStoreImage::Execute(void) {
|
||||||
#ifdef VAAPI
|
#ifdef VAAPI
|
||||||
OSD_release_context();
|
OSD_release_context();
|
||||||
OSD_get_shared_context();
|
OSD_get_shared_context();
|
||||||
#endif
|
#endif
|
||||||
glGenTextures(1, &imageRef->texture);
|
glGenTextures(1, &imageRef->texture);
|
||||||
glBindTexture(GL_TEXTURE_2D, imageRef->texture);
|
glBindTexture(GL_TEXTURE_2D, imageRef->texture);
|
||||||
@ -1493,8 +1493,8 @@ bool cOglCmdStoreImage::Execute(void) {
|
|||||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||||
glBindTexture(GL_TEXTURE_2D, 0);
|
glBindTexture(GL_TEXTURE_2D, 0);
|
||||||
#ifdef VAAPI
|
#ifdef VAAPI
|
||||||
OSD_release_context();
|
OSD_release_context();
|
||||||
OSD_get_context();
|
OSD_get_context();
|
||||||
#endif
|
#endif
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
@ -1524,7 +1524,7 @@ cOglThread::cOglThread(cCondWait *startWait, int maxCacheSize) : cThread("oglThr
|
|||||||
maxTextureSize = 0;
|
maxTextureSize = 0;
|
||||||
for (int i = 0; i < OGL_MAX_OSDIMAGES; i++) {
|
for (int i = 0; i < OGL_MAX_OSDIMAGES; i++) {
|
||||||
imageCache[i].used = false;
|
imageCache[i].used = false;
|
||||||
imageCache[i].texture = GL_NONE;
|
imageCache[i].texture = GL_NONE;
|
||||||
imageCache[i].width = 0;
|
imageCache[i].width = 0;
|
||||||
imageCache[i].height = 0;
|
imageCache[i].height = 0;
|
||||||
}
|
}
|
||||||
@ -1550,7 +1550,7 @@ void cOglThread::Stop(void) {
|
|||||||
void cOglThread::DoCmd(cOglCmd* cmd) {
|
void cOglThread::DoCmd(cOglCmd* cmd) {
|
||||||
while (stalled)
|
while (stalled)
|
||||||
cCondWait::SleepMs(10);
|
cCondWait::SleepMs(10);
|
||||||
|
|
||||||
bool doSignal = false;
|
bool doSignal = false;
|
||||||
Lock();
|
Lock();
|
||||||
if (commands.size() == 0)
|
if (commands.size() == 0)
|
||||||
@ -1584,7 +1584,7 @@ int cOglThread::StoreImage(const cImage &image) {
|
|||||||
esyslog("[softhddev]Maximum size for GPU cache reached. Used: %.2fMB Max: %.2fMB", cachedMB, maxMB);
|
esyslog("[softhddev]Maximum size for GPU cache reached. Used: %.2fMB Max: %.2fMB", cachedMB, maxMB);
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
int slot = GetFreeSlot();
|
int slot = GetFreeSlot();
|
||||||
if (!slot)
|
if (!slot)
|
||||||
return 0;
|
return 0;
|
||||||
@ -1635,7 +1635,7 @@ void cOglThread::ClearSlot(int slot) {
|
|||||||
int i = -slot - 1;
|
int i = -slot - 1;
|
||||||
if (i >= 0 && i < OGL_MAX_OSDIMAGES) {
|
if (i >= 0 && i < OGL_MAX_OSDIMAGES) {
|
||||||
Lock();
|
Lock();
|
||||||
imageCache[i].used = false;
|
imageCache[i].used = false;
|
||||||
imageCache[i].texture = GL_NONE;
|
imageCache[i].texture = GL_NONE;
|
||||||
imageCache[i].width = 0;
|
imageCache[i].width = 0;
|
||||||
imageCache[i].height = 0;
|
imageCache[i].height = 0;
|
||||||
@ -1671,7 +1671,7 @@ void cOglThread::Action(void) {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
dsyslog("[softhddev]OpenGL Context initialized");
|
dsyslog("[softhddev]OpenGL Context initialized");
|
||||||
|
|
||||||
if (!InitShaders()) {
|
if (!InitShaders()) {
|
||||||
esyslog("[softhddev]Could not initiate Shaders");
|
esyslog("[softhddev]Could not initiate Shaders");
|
||||||
Cleanup();
|
Cleanup();
|
||||||
@ -1679,7 +1679,7 @@ void cOglThread::Action(void) {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
dsyslog("[softhddev]Shaders initialized");
|
dsyslog("[softhddev]Shaders initialized");
|
||||||
|
|
||||||
if (!InitVdpauInterop()) {
|
if (!InitVdpauInterop()) {
|
||||||
esyslog("[softhddev]: vdpau interop NOT initialized");
|
esyslog("[softhddev]: vdpau interop NOT initialized");
|
||||||
Cleanup();
|
Cleanup();
|
||||||
@ -1702,7 +1702,7 @@ void cOglThread::Action(void) {
|
|||||||
//now Thread is ready to do his job
|
//now Thread is ready to do his job
|
||||||
startWait->Signal();
|
startWait->Signal();
|
||||||
stalled = false;
|
stalled = false;
|
||||||
|
|
||||||
while(Running()) {
|
while(Running()) {
|
||||||
|
|
||||||
if (commands.empty()) {
|
if (commands.empty()) {
|
||||||
@ -1714,14 +1714,14 @@ void cOglThread::Action(void) {
|
|||||||
cOglCmd* cmd = commands.front();
|
cOglCmd* cmd = commands.front();
|
||||||
commands.pop();
|
commands.pop();
|
||||||
Unlock();
|
Unlock();
|
||||||
//uint64_t start = cTimeMs::Now();
|
// uint64_t start = cTimeMs::Now();
|
||||||
cmd->Execute();
|
cmd->Execute();
|
||||||
//esyslog("[softhddev]\"%s\", %dms, %d commands left, time %" PRIu64 "", cmd->Description(), (int)(cTimeMs::Now() - start), commands.size(), cTimeMs::Now());
|
// esyslog("[softhddev]\"%s\", %dms, %d commands left, time %" PRIu64 "", cmd->Description(), (int)(cTimeMs::Now() - start), commands.size(), cTimeMs::Now());
|
||||||
delete cmd;
|
delete cmd;
|
||||||
if (stalled && commands.size() < OGL_CMDQUEUE_SIZE / 2)
|
if (stalled && commands.size() < OGL_CMDQUEUE_SIZE / 2)
|
||||||
stalled = false;
|
stalled = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
dsyslog("[softhddev]Cleaning up OpenGL stuff");
|
dsyslog("[softhddev]Cleaning up OpenGL stuff");
|
||||||
Cleanup();
|
Cleanup();
|
||||||
dsyslog("[softhddev]OpenGL Worker Thread Ended");
|
dsyslog("[softhddev]OpenGL Worker Thread Ended");
|
||||||
@ -1733,7 +1733,7 @@ extern "C" int GlxInitopengl();
|
|||||||
|
|
||||||
|
|
||||||
bool cOglThread::InitOpenGL(void) {
|
bool cOglThread::InitOpenGL(void) {
|
||||||
|
|
||||||
|
|
||||||
#ifdef PLACEBO
|
#ifdef PLACEBO
|
||||||
const char *displayName = X11DisplayName;
|
const char *displayName = X11DisplayName;
|
||||||
@ -1762,7 +1762,7 @@ bool cOglThread::InitOpenGL(void) {
|
|||||||
free(buffer[0]);
|
free(buffer[0]);
|
||||||
free(buffer[1]);
|
free(buffer[1]);
|
||||||
free(buffer[2]);
|
free(buffer[2]);
|
||||||
|
|
||||||
GLenum err = glewInit();
|
GLenum err = glewInit();
|
||||||
if( err != GLEW_OK) {
|
if( err != GLEW_OK) {
|
||||||
esyslog("[softhddev]glewInit failed, aborting\n");
|
esyslog("[softhddev]glewInit failed, aborting\n");
|
||||||
@ -1770,8 +1770,8 @@ bool cOglThread::InitOpenGL(void) {
|
|||||||
}
|
}
|
||||||
#else
|
#else
|
||||||
|
|
||||||
if (!GlxInitopengl())
|
if (!GlxInitopengl())
|
||||||
return false;
|
return false;
|
||||||
#endif
|
#endif
|
||||||
VertexBuffers[vbText]->EnableBlending();
|
VertexBuffers[vbText]->EnableBlending();
|
||||||
glDisable(GL_DEPTH_TEST);
|
glDisable(GL_DEPTH_TEST);
|
||||||
@ -1800,7 +1800,7 @@ bool cOglThread::InitVdpauInterop(void) {
|
|||||||
void *procAdress = GetVDPAUProcAdress();
|
void *procAdress = GetVDPAUProcAdress();
|
||||||
while (glGetError() != GL_NO_ERROR);
|
while (glGetError() != GL_NO_ERROR);
|
||||||
glVDPAUInitNV(vdpDevice, procAdress);
|
glVDPAUInitNV(vdpDevice, procAdress);
|
||||||
if (glGetError() != GL_NO_ERROR)
|
if (glGetError() != GL_NO_ERROR)
|
||||||
return false;
|
return false;
|
||||||
#endif
|
#endif
|
||||||
return true;
|
return true;
|
||||||
@ -1823,21 +1823,21 @@ void cOglThread::DeleteVertexBuffers(void) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void cOglThread::Cleanup(void) {
|
void cOglThread::Cleanup(void) {
|
||||||
esyslog("[softhddev]OglThread cleanup\n");
|
esyslog("[softhddev]OglThread cleanup\n");
|
||||||
pthread_mutex_lock(&OSDMutex);
|
pthread_mutex_lock(&OSDMutex);
|
||||||
OsdClose();
|
OsdClose();
|
||||||
|
|
||||||
DeleteVertexBuffers();
|
DeleteVertexBuffers();
|
||||||
delete cOglOsd::oFb;
|
delete cOglOsd::oFb;
|
||||||
|
|
||||||
cOglOsd::oFb = NULL;
|
cOglOsd::oFb = NULL;
|
||||||
DeleteShaders();
|
DeleteShaders();
|
||||||
// glVDPAUFiniNV();
|
// glVDPAUFiniNV();
|
||||||
cOglFont::Cleanup();
|
cOglFont::Cleanup();
|
||||||
#ifdef PLACEBO
|
#ifdef PLACEBO
|
||||||
glutExit();
|
glutExit();
|
||||||
#endif
|
#endif
|
||||||
pthread_mutex_unlock(&OSDMutex);
|
pthread_mutex_unlock(&OSDMutex);
|
||||||
}
|
}
|
||||||
|
|
||||||
/****************************************************************************************
|
/****************************************************************************************
|
||||||
@ -1849,7 +1849,7 @@ cOglPixmap::cOglPixmap(std::shared_ptr<cOglThread> oglThread, int Layer, const c
|
|||||||
int width = DrawPort.IsEmpty() ? ViewPort.Width() : DrawPort.Width();
|
int width = DrawPort.IsEmpty() ? ViewPort.Width() : DrawPort.Width();
|
||||||
int height = DrawPort.IsEmpty() ? ViewPort.Height() : DrawPort.Height();
|
int height = DrawPort.IsEmpty() ? ViewPort.Height() : DrawPort.Height();
|
||||||
fb = new cOglFb(width, height, ViewPort.Width(), ViewPort.Height());
|
fb = new cOglFb(width, height, ViewPort.Width(), ViewPort.Height());
|
||||||
dirty = true;
|
dirty = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
cOglPixmap::~cOglPixmap(void) {
|
cOglPixmap::~cOglPixmap(void) {
|
||||||
@ -1922,7 +1922,7 @@ void cOglPixmap::DrawImage(const cPoint &Point, int ImageHandle) {
|
|||||||
oglThread->DoCmd(new cOglCmdDrawTexture(fb, img, Point.X(), Point.Y()));
|
oglThread->DoCmd(new cOglCmdDrawTexture(fb, img, Point.X(), Point.Y()));
|
||||||
}
|
}
|
||||||
/*
|
/*
|
||||||
Fallback to VDR implementation, needs to separate cSoftOsdProvider from softhddevice.cpp
|
Fallback to VDR implementation, needs to separate cSoftOsdProvider from softhddevice.cpp
|
||||||
else {
|
else {
|
||||||
if (cSoftOsdProvider::GetImageData(ImageHandle))
|
if (cSoftOsdProvider::GetImageData(ImageHandle))
|
||||||
DrawImage(Point, *cSoftOsdProvider::GetImageData(ImageHandle));
|
DrawImage(Point, *cSoftOsdProvider::GetImageData(ImageHandle));
|
||||||
@ -1935,7 +1935,7 @@ void cOglPixmap::DrawImage(const cPoint &Point, int ImageHandle) {
|
|||||||
void cOglPixmap::DrawPixel(const cPoint &Point, tColor Color) {
|
void cOglPixmap::DrawPixel(const cPoint &Point, tColor Color) {
|
||||||
cRect r(Point.X(), Point.Y(), 1, 1);
|
cRect r(Point.X(), Point.Y(), 1, 1);
|
||||||
oglThread->DoCmd(new cOglCmdDrawRectangle(fb, r.X(), r.Y(), r.Width(), r.Height(), Color));
|
oglThread->DoCmd(new cOglCmdDrawRectangle(fb, r.X(), r.Y(), r.Width(), r.Height(), Color));
|
||||||
|
|
||||||
SetDirty();
|
SetDirty();
|
||||||
MarkDrawPortDirty(r);
|
MarkDrawPortDirty(r);
|
||||||
}
|
}
|
||||||
@ -1984,7 +1984,7 @@ void cOglPixmap::DrawText(const cPoint &Point, const char *s, tColor ColorFg, tC
|
|||||||
int cw = Width ? Width : w;
|
int cw = Width ? Width : w;
|
||||||
int ch = Height ? Height : h;
|
int ch = Height ? Height : h;
|
||||||
cRect r(x, y, cw, ch);
|
cRect r(x, y, cw, ch);
|
||||||
|
|
||||||
if (ColorBg != clrTransparent)
|
if (ColorBg != clrTransparent)
|
||||||
oglThread->DoCmd(new cOglCmdDrawRectangle(fb, r.X(), r.Y(), r.Width(), r.Height(), ColorBg));
|
oglThread->DoCmd(new cOglCmdDrawRectangle(fb, r.X(), r.Y(), r.Width(), r.Height(), ColorBg));
|
||||||
|
|
||||||
@ -2077,40 +2077,40 @@ cOglOsd::cOglOsd(int Left, int Top, uint Level, std::shared_ptr<cOglThread> oglT
|
|||||||
isSubtitleOsd = false;
|
isSubtitleOsd = false;
|
||||||
int osdWidth = 0;
|
int osdWidth = 0;
|
||||||
int osdHeight = 0;
|
int osdHeight = 0;
|
||||||
|
|
||||||
pthread_mutex_lock(&OSDMutex);
|
pthread_mutex_lock(&OSDMutex);
|
||||||
VideoGetOsdSize(&osdWidth, &osdHeight);
|
VideoGetOsdSize(&osdWidth, &osdHeight);
|
||||||
// osdWidth = 1920;
|
// osdWidth = 1920;
|
||||||
// osdHeight = 1080;
|
// osdHeight = 1080;
|
||||||
|
|
||||||
dsyslog("[softhddev]cOglOsd osdLeft %d osdTop %d screenWidth %d screenHeight %d", Left, Top, osdWidth, osdHeight);
|
dsyslog("[softhddev]cOglOsd osdLeft %d osdTop %d screenWidth %d screenHeight %d", Left, Top, osdWidth, osdHeight);
|
||||||
#ifdef PLACEBO
|
#ifdef PLACEBO
|
||||||
if (posd)
|
if (posd)
|
||||||
free(posd);
|
free(posd);
|
||||||
posd = MALLOC(unsigned char, osdWidth * osdHeight * 4);
|
posd = MALLOC(unsigned char, osdWidth * osdHeight * 4);
|
||||||
#endif
|
#endif
|
||||||
//create output framebuffer
|
// create output framebuffer
|
||||||
#ifdef VAAPI
|
#ifdef VAAPI
|
||||||
OSD_release_context();
|
OSD_release_context();
|
||||||
OSD_get_shared_context();
|
OSD_get_shared_context();
|
||||||
#endif
|
#endif
|
||||||
if (!oFb) {
|
if (!oFb) {
|
||||||
oFb = new cOglOutputFb(osdWidth, osdHeight);
|
oFb = new cOglOutputFb(osdWidth, osdHeight);
|
||||||
oglThread->DoCmd(new cOglCmdInitOutputFb(oFb));
|
oglThread->DoCmd(new cOglCmdInitOutputFb(oFb));
|
||||||
}
|
}
|
||||||
#ifdef VAAPI
|
#ifdef VAAPI
|
||||||
OSD_release_context();
|
OSD_release_context();
|
||||||
#endif
|
#endif
|
||||||
pthread_mutex_unlock(&OSDMutex);
|
pthread_mutex_unlock(&OSDMutex);
|
||||||
}
|
}
|
||||||
|
|
||||||
cOglOsd::~cOglOsd() {
|
cOglOsd::~cOglOsd() {
|
||||||
OsdClose();
|
OsdClose();
|
||||||
SetActive(false);
|
SetActive(false);
|
||||||
#ifdef PLACEBO
|
#ifdef PLACEBO
|
||||||
if (posd)
|
if (posd)
|
||||||
free(posd);
|
free(posd);
|
||||||
posd = 0;
|
posd = 0;
|
||||||
#endif
|
#endif
|
||||||
oglThread->DoCmd(new cOglCmdDeleteFb(bFb));
|
oglThread->DoCmd(new cOglCmdDeleteFb(bFb));
|
||||||
}
|
}
|
||||||
@ -2124,9 +2124,9 @@ eOsdError cOglOsd::SetAreas(const tArea *Areas, int NumAreas) {
|
|||||||
|
|
||||||
tArea area = { r.Left(), r.Top(), r.Right(), r.Bottom(), 32 };
|
tArea area = { r.Left(), r.Top(), r.Right(), r.Bottom(), 32 };
|
||||||
|
|
||||||
//now we know the actuaL osd size, create double buffer frame buffer
|
// now we know the actuaL osd size, create double buffer frame buffer
|
||||||
if (bFb) {
|
if (bFb) {
|
||||||
oglThread->DoCmd(new cOglCmdDeleteFb(bFb));
|
oglThread->DoCmd(new cOglCmdDeleteFb(bFb));
|
||||||
DestroyPixmap(oglPixmaps[0]);
|
DestroyPixmap(oglPixmaps[0]);
|
||||||
}
|
}
|
||||||
bFb = new cOglFb(r.Width(), r.Height(), r.Width(), r.Height());
|
bFb = new cOglFb(r.Width(), r.Height(), r.Width(), r.Height());
|
||||||
@ -2145,7 +2145,7 @@ cPixmap *cOglOsd::CreatePixmap(int Layer, const cRect &ViewPort, const cRect &Dr
|
|||||||
int height = DrawPort.IsEmpty() ? ViewPort.Height() : DrawPort.Height();
|
int height = DrawPort.IsEmpty() ? ViewPort.Height() : DrawPort.Height();
|
||||||
|
|
||||||
if (width > oglThread->MaxTextureSize() || height > oglThread->MaxTextureSize()) {
|
if (width > oglThread->MaxTextureSize() || height > oglThread->MaxTextureSize()) {
|
||||||
esyslog("[softhddev] cannot allocate pixmap of %dpx x %dpx, clipped to %dpx x %dpx!",
|
esyslog("[softhddev] cannot allocate pixmap of %dpx x %dpx, clipped to %dpx x %dpx!",
|
||||||
width, height, std::min(width, oglThread->MaxTextureSize()), std::min(height, oglThread->MaxTextureSize()));
|
width, height, std::min(width, oglThread->MaxTextureSize()), std::min(height, oglThread->MaxTextureSize()));
|
||||||
width = std::min(width, oglThread->MaxTextureSize());
|
width = std::min(width, oglThread->MaxTextureSize());
|
||||||
height = std::min(height, oglThread->MaxTextureSize());
|
height = std::min(height, oglThread->MaxTextureSize());
|
||||||
@ -2154,11 +2154,11 @@ cPixmap *cOglOsd::CreatePixmap(int Layer, const cRect &ViewPort, const cRect &Dr
|
|||||||
cOglPixmap *p = new cOglPixmap(oglThread, Layer, ViewPort, DrawPort);
|
cOglPixmap *p = new cOglPixmap(oglThread, Layer, ViewPort, DrawPort);
|
||||||
|
|
||||||
if (cOsd::AddPixmap(p)) {
|
if (cOsd::AddPixmap(p)) {
|
||||||
//find free slot
|
// find free slot
|
||||||
for (int i = 0; i < oglPixmaps.Size(); i++)
|
for (int i = 0; i < oglPixmaps.Size(); i++)
|
||||||
if (!oglPixmaps[i])
|
if (!oglPixmaps[i])
|
||||||
return oglPixmaps[i] = p;
|
return oglPixmaps[i] = p;
|
||||||
//append at end
|
// append at end
|
||||||
oglPixmaps.Append(p);
|
oglPixmaps.Append(p);
|
||||||
return p;
|
return p;
|
||||||
}
|
}
|
||||||
@ -2190,28 +2190,28 @@ void cOglOsd::Flush(void) {
|
|||||||
if (!oglThread->Active())
|
if (!oglThread->Active())
|
||||||
return;
|
return;
|
||||||
LOCK_PIXMAPS;
|
LOCK_PIXMAPS;
|
||||||
//check if any pixmap is dirty
|
// check if any pixmap is dirty
|
||||||
bool dirty = false;
|
bool dirty = false;
|
||||||
for (int i = 0; i < oglPixmaps.Size() && !dirty; i++)
|
for (int i = 0; i < oglPixmaps.Size() && !dirty; i++)
|
||||||
if (oglPixmaps[i] && oglPixmaps[i]->Layer() >= 0 && oglPixmaps[i]->IsDirty())
|
if (oglPixmaps[i] && oglPixmaps[i]->Layer() >= 0 && oglPixmaps[i]->IsDirty())
|
||||||
dirty = true;
|
dirty = true;
|
||||||
if (!dirty)
|
if (!dirty)
|
||||||
return;
|
return;
|
||||||
//clear buffer
|
// clear buffer
|
||||||
//uint64_t start = cTimeMs::Now();
|
// uint64_t start = cTimeMs::Now();
|
||||||
//dsyslog("[softhddev]Start Flush at %" PRIu64 "", cTimeMs::Now());
|
// dsyslog("[softhddev]Start Flush at %" PRIu64 "", cTimeMs::Now());
|
||||||
|
|
||||||
|
|
||||||
oglThread->DoCmd(new cOglCmdFill(bFb, clrTransparent));
|
oglThread->DoCmd(new cOglCmdFill(bFb, clrTransparent));
|
||||||
|
|
||||||
//render pixmap textures blended to buffer
|
// render pixmap textures blended to buffer
|
||||||
for (int layer = 0; layer < MAXPIXMAPLAYERS; layer++) {
|
for (int layer = 0; layer < MAXPIXMAPLAYERS; layer++) {
|
||||||
for (int i = 0; i < oglPixmaps.Size(); i++) {
|
for (int i = 0; i < oglPixmaps.Size(); i++) {
|
||||||
if (oglPixmaps[i]) {
|
if (oglPixmaps[i]) {
|
||||||
if (oglPixmaps[i]->Layer() == layer) {
|
if (oglPixmaps[i]->Layer() == layer) {
|
||||||
oglThread->DoCmd(new cOglCmdRenderFbToBufferFb( oglPixmaps[i]->Fb(),
|
oglThread->DoCmd(new cOglCmdRenderFbToBufferFb( oglPixmaps[i]->Fb(),
|
||||||
bFb,
|
bFb,
|
||||||
oglPixmaps[i]->ViewPort().X(),
|
oglPixmaps[i]->ViewPort().X(),
|
||||||
(!isSubtitleOsd) ? oglPixmaps[i]->ViewPort().Y() : 0,
|
(!isSubtitleOsd) ? oglPixmaps[i]->ViewPort().Y() : 0,
|
||||||
oglPixmaps[i]->Alpha(),
|
oglPixmaps[i]->Alpha(),
|
||||||
oglPixmaps[i]->DrawPort().X(),
|
oglPixmaps[i]->DrawPort().X(),
|
||||||
@ -2223,7 +2223,7 @@ void cOglOsd::Flush(void) {
|
|||||||
}
|
}
|
||||||
oglThread->DoCmd(new cOglCmdCopyBufferToOutputFb(bFb, oFb, Left(), Top()));
|
oglThread->DoCmd(new cOglCmdCopyBufferToOutputFb(bFb, oFb, Left(), Top()));
|
||||||
|
|
||||||
//dsyslog("[softhddev]End Flush at %" PRIu64 ", duration %d", cTimeMs::Now(), (int)(cTimeMs::Now()-start));
|
// dsyslog("[softhddev]End Flush at %" PRIu64 ", duration %d", cTimeMs::Now(), (int)(cTimeMs::Now()-start));
|
||||||
}
|
}
|
||||||
|
|
||||||
void cOglOsd::DrawScaledBitmap(int x, int y, const cBitmap &Bitmap, double FactorX, double FactorY, bool AntiAlias) {
|
void cOglOsd::DrawScaledBitmap(int x, int y, const cBitmap &Bitmap, double FactorX, double FactorY, bool AntiAlias) {
|
||||||
|
@ -202,8 +202,8 @@ class cOglFb
|
|||||||
{
|
{
|
||||||
protected:
|
protected:
|
||||||
bool initiated;
|
bool initiated;
|
||||||
// GLuint fb;
|
// GLuint fb;
|
||||||
// GLuint texture;
|
// GLuint texture;
|
||||||
GLint width, height;
|
GLint width, height;
|
||||||
GLint viewPortWidth, viewPortHeight;
|
GLint viewPortWidth, viewPortHeight;
|
||||||
bool scrollable;
|
bool scrollable;
|
||||||
@ -268,7 +268,7 @@ class cOglOutputFb:public cOglFb
|
|||||||
|
|
||||||
/****************************************************************************************
|
/****************************************************************************************
|
||||||
* cOglVb
|
* cOglVb
|
||||||
* Vertex Buffer - OpenGl Vertices for the different drawing commands
|
* Vertex Buffer - OpenGl Vertices for the different drawing commands
|
||||||
****************************************************************************************/
|
****************************************************************************************/
|
||||||
enum eVertexBufferType
|
enum eVertexBufferType
|
||||||
{
|
{
|
||||||
|
@ -33,7 +33,7 @@
|
|||||||
#include "iatomic.h"
|
#include "iatomic.h"
|
||||||
#include "ringbuffer.h"
|
#include "ringbuffer.h"
|
||||||
|
|
||||||
/// ring buffer structure
|
/// ring buffer structure
|
||||||
struct _ring_buffer_
|
struct _ring_buffer_
|
||||||
{
|
{
|
||||||
char *Buffer; ///< ring buffer data
|
char *Buffer; ///< ring buffer data
|
||||||
|
24
ringbuffer.h
24
ringbuffer.h
@ -23,40 +23,40 @@
|
|||||||
/// @addtogroup Ringbuffer
|
/// @addtogroup Ringbuffer
|
||||||
/// @{
|
/// @{
|
||||||
|
|
||||||
/// ring buffer typedef
|
/// ring buffer typedef
|
||||||
typedef struct _ring_buffer_ RingBuffer;
|
typedef struct _ring_buffer_ RingBuffer;
|
||||||
|
|
||||||
/// reset ring buffer pointers
|
/// reset ring buffer pointers
|
||||||
extern void RingBufferReset(RingBuffer *);
|
extern void RingBufferReset(RingBuffer *);
|
||||||
|
|
||||||
/// create new ring buffer
|
/// create new ring buffer
|
||||||
extern RingBuffer *RingBufferNew(size_t);
|
extern RingBuffer *RingBufferNew(size_t);
|
||||||
|
|
||||||
/// free ring buffer
|
/// free ring buffer
|
||||||
extern void RingBufferDel(RingBuffer *);
|
extern void RingBufferDel(RingBuffer *);
|
||||||
|
|
||||||
/// write into ring buffer
|
/// write into ring buffer
|
||||||
extern size_t RingBufferWrite(RingBuffer *, const void *, size_t);
|
extern size_t RingBufferWrite(RingBuffer *, const void *, size_t);
|
||||||
|
|
||||||
/// get write pointer of ring buffer
|
/// get write pointer of ring buffer
|
||||||
extern size_t RingBufferGetWritePointer(RingBuffer *, void **);
|
extern size_t RingBufferGetWritePointer(RingBuffer *, void **);
|
||||||
|
|
||||||
/// advance write pointer of ring buffer
|
/// advance write pointer of ring buffer
|
||||||
extern size_t RingBufferWriteAdvance(RingBuffer *, size_t);
|
extern size_t RingBufferWriteAdvance(RingBuffer *, size_t);
|
||||||
|
|
||||||
/// read from ring buffer
|
/// read from ring buffer
|
||||||
extern size_t RingBufferRead(RingBuffer *, void *, size_t);
|
extern size_t RingBufferRead(RingBuffer *, void *, size_t);
|
||||||
|
|
||||||
/// get read pointer of ring buffer
|
/// get read pointer of ring buffer
|
||||||
extern size_t RingBufferGetReadPointer(RingBuffer *, const void **);
|
extern size_t RingBufferGetReadPointer(RingBuffer *, const void **);
|
||||||
|
|
||||||
/// advance read pointer of ring buffer
|
/// advance read pointer of ring buffer
|
||||||
extern size_t RingBufferReadAdvance(RingBuffer *, size_t);
|
extern size_t RingBufferReadAdvance(RingBuffer *, size_t);
|
||||||
|
|
||||||
/// free bytes ring buffer
|
/// free bytes ring buffer
|
||||||
extern size_t RingBufferFreeBytes(RingBuffer *);
|
extern size_t RingBufferFreeBytes(RingBuffer *);
|
||||||
|
|
||||||
/// used bytes ring buffer
|
/// used bytes ring buffer
|
||||||
extern size_t RingBufferUsedBytes(RingBuffer *);
|
extern size_t RingBufferUsedBytes(RingBuffer *);
|
||||||
|
|
||||||
/// @}
|
/// @}
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
|
|
||||||
// shader
|
// shader
|
||||||
#ifdef CUVID
|
#ifdef CUVID
|
||||||
char vertex_osd[] = { "\
|
char vertex_osd[] = { "\
|
||||||
|
@ -89,4 +89,3 @@ color.a = 1.0;
|
|||||||
// color mapping
|
// color mapping
|
||||||
out_color = color;
|
out_color = color;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
560
softhdcuvid.cpp
560
softhdcuvid.cpp
File diff suppressed because it is too large
Load Diff
56
softhddev.c
56
softhddev.c
@ -90,7 +90,7 @@ static enum AVCodecID AudioCodecID; ///< current codec id
|
|||||||
static int AudioChannelID; ///< current audio channel id
|
static int AudioChannelID; ///< current audio channel id
|
||||||
static VideoStream *AudioSyncStream; ///< video stream for audio/video sync
|
static VideoStream *AudioSyncStream; ///< video stream for audio/video sync
|
||||||
|
|
||||||
/// Minimum free space in audio buffer 8 packets for 8 channels
|
/// Minimum free space in audio buffer 8 packets for 8 channels
|
||||||
#define AUDIO_MIN_BUFFER_FREE (3072 * 8 * 8)
|
#define AUDIO_MIN_BUFFER_FREE (3072 * 8 * 8)
|
||||||
#define AUDIO_BUFFER_SIZE (512 * 1024) ///< audio PES buffer default size
|
#define AUDIO_BUFFER_SIZE (512 * 1024) ///< audio PES buffer default size
|
||||||
static AVPacket AudioAvPkt[1]; ///< audio a/v packet
|
static AVPacket AudioAvPkt[1]; ///< audio a/v packet
|
||||||
@ -884,9 +884,9 @@ static void PesParse(PesDemux * pesdx, const uint8_t * data, int size, int is_st
|
|||||||
// Transport stream demux
|
// Transport stream demux
|
||||||
//////////////////////////////////////////////////////////////////////////////
|
//////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
/// Transport stream packet size
|
/// Transport stream packet size
|
||||||
#define TS_PACKET_SIZE 188
|
#define TS_PACKET_SIZE 188
|
||||||
/// Transport stream packet sync byte
|
/// Transport stream packet sync byte
|
||||||
#define TS_PACKET_SYNC 0x47
|
#define TS_PACKET_SYNC 0x47
|
||||||
|
|
||||||
///
|
///
|
||||||
@ -962,11 +962,11 @@ static int TsDemuxer(TsDemux * tsdx, const uint8_t * data, int size)
|
|||||||
#if 0
|
#if 0
|
||||||
int tmp;
|
int tmp;
|
||||||
|
|
||||||
// check continuity
|
// check continuity
|
||||||
tmp = p[3] & 0x0F; // continuity counter
|
tmp = p[3] & 0x0F; // continuity counter
|
||||||
if (((tsdx->CC + 1) & 0x0F) != tmp) {
|
if (((tsdx->CC + 1) & 0x0F) != tmp) {
|
||||||
Debug(3, "tsdemux: OUT OF SYNC: %d %d\n", tmp, tsdx->CC);
|
Debug(3, "tsdemux: OUT OF SYNC: %d %d\n", tmp, tsdx->CC);
|
||||||
//TS discontinuity (received 8, expected 0) for PID
|
// TS discontinuity (received 8, expected 0) for PID
|
||||||
}
|
}
|
||||||
tsdx->CC = tmp;
|
tsdx->CC = tmp;
|
||||||
#endif
|
#endif
|
||||||
@ -1049,7 +1049,7 @@ int PlayAudio(const uint8_t * data, int size, uint8_t id)
|
|||||||
AudioAvPkt->pts =
|
AudioAvPkt->pts =
|
||||||
(int64_t) (data[9] & 0x0E) << 29 | data[10] << 22 | (data[11] & 0xFE) << 14 | data[12] << 7 | (data[13] &
|
(int64_t) (data[9] & 0x0E) << 29 | data[10] << 22 | (data[11] & 0xFE) << 14 | data[12] << 7 | (data[13] &
|
||||||
0xFE) >> 1;
|
0xFE) >> 1;
|
||||||
//Debug(3, "audio: pts %#012" PRIx64 "\n", AudioAvPkt->pts);
|
// Debug(3, "audio: pts %#012" PRIx64 "\n", AudioAvPkt->pts);
|
||||||
}
|
}
|
||||||
if (0) { // dts is unused
|
if (0) { // dts is unused
|
||||||
if (data[7] & 0x40) {
|
if (data[7] & 0x40) {
|
||||||
@ -1256,7 +1256,7 @@ int PlayTsAudio(const uint8_t * data, int size)
|
|||||||
Debug(3, "AudioDelay %dms\n", AudioDelay);
|
Debug(3, "AudioDelay %dms\n", AudioDelay);
|
||||||
usleep(AudioDelay * 1000);
|
usleep(AudioDelay * 1000);
|
||||||
AudioDelay = 0;
|
AudioDelay = 0;
|
||||||
// TsDemuxer(tsdx, data, size); // insert dummy audio
|
// TsDemuxer(tsdx, data, size); // insert dummy audio
|
||||||
|
|
||||||
}
|
}
|
||||||
return TsDemuxer(tsdx, data, size);
|
return TsDemuxer(tsdx, data, size);
|
||||||
@ -1333,7 +1333,7 @@ static VideoStream PipVideoStream[1]; ///< pip video stream
|
|||||||
uint32_t VideoSwitch; ///< debug video switch ticks
|
uint32_t VideoSwitch; ///< debug video switch ticks
|
||||||
static int VideoMaxPacketSize; ///< biggest used packet buffer
|
static int VideoMaxPacketSize; ///< biggest used packet buffer
|
||||||
#endif
|
#endif
|
||||||
//#define STILL_DEBUG 2
|
// #define STILL_DEBUG 2
|
||||||
#ifdef STILL_DEBUG
|
#ifdef STILL_DEBUG
|
||||||
static char InStillPicture; ///< flag still picture
|
static char InStillPicture; ///< flag still picture
|
||||||
#endif
|
#endif
|
||||||
@ -1394,7 +1394,7 @@ static void VideoEnqueue(VideoStream * stream, int64_t pts, int64_t dts, const v
|
|||||||
{
|
{
|
||||||
AVPacket *avpkt;
|
AVPacket *avpkt;
|
||||||
|
|
||||||
// Debug(3, "video: enqueue %d\n", size);
|
// Debug(3, "video: enqueue %d\n", size);
|
||||||
|
|
||||||
avpkt = &stream->PacketRb[stream->PacketWrite];
|
avpkt = &stream->PacketRb[stream->PacketWrite];
|
||||||
if (!avpkt->stream_index) { // add pts only for first added
|
if (!avpkt->stream_index) { // add pts only for first added
|
||||||
@ -1404,8 +1404,8 @@ static void VideoEnqueue(VideoStream * stream, int64_t pts, int64_t dts, const v
|
|||||||
|
|
||||||
if (avpkt->stream_index + size >= avpkt->size) {
|
if (avpkt->stream_index + size >= avpkt->size) {
|
||||||
|
|
||||||
// Warning(_("video: packet buffer too small for %d\n"),
|
// Warning(_("video: packet buffer too small for %d\n"),
|
||||||
// avpkt->stream_index + size);
|
// avpkt->stream_index + size);
|
||||||
|
|
||||||
// new + grow reserves FF_INPUT_BUFFER_PADDING_SIZE
|
// new + grow reserves FF_INPUT_BUFFER_PADDING_SIZE
|
||||||
av_grow_packet(avpkt, ((size + VIDEO_BUFFER_SIZE / 2)
|
av_grow_packet(avpkt, ((size + VIDEO_BUFFER_SIZE / 2)
|
||||||
@ -1479,7 +1479,7 @@ static void VideoNextPacket(VideoStream * stream, int codec_id)
|
|||||||
memset(avpkt->data + avpkt->stream_index, 0, AV_INPUT_BUFFER_PADDING_SIZE);
|
memset(avpkt->data + avpkt->stream_index, 0, AV_INPUT_BUFFER_PADDING_SIZE);
|
||||||
|
|
||||||
stream->CodecIDRb[stream->PacketWrite] = codec_id;
|
stream->CodecIDRb[stream->PacketWrite] = codec_id;
|
||||||
//DumpH264(avpkt->data, avpkt->stream_index);
|
// DumpH264(avpkt->data, avpkt->stream_index);
|
||||||
|
|
||||||
// advance packet write
|
// advance packet write
|
||||||
stream->PacketWrite = (stream->PacketWrite + 1) % VIDEO_PACKET_MAX;
|
stream->PacketWrite = (stream->PacketWrite + 1) % VIDEO_PACKET_MAX;
|
||||||
@ -1618,7 +1618,7 @@ static void VideoMpegEnqueue(VideoStream * stream, int64_t pts, int64_t dts, con
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
if (!p[0] && !p[1] && p[2] == 0x01 && p[3] == 0xb3) {
|
if (!p[0] && !p[1] && p[2] == 0x01 && p[3] == 0xb3) {
|
||||||
// printf("aspectratio %02x\n",p[7]>>4);
|
// printf("aspectratio %02x\n",p[7]>>4);
|
||||||
}
|
}
|
||||||
--n;
|
--n;
|
||||||
++p;
|
++p;
|
||||||
@ -1868,7 +1868,7 @@ int VideoDecodeInput(VideoStream * stream)
|
|||||||
}
|
}
|
||||||
|
|
||||||
filled = atomic_read(&stream->PacketsFilled);
|
filled = atomic_read(&stream->PacketsFilled);
|
||||||
// printf("Packets in Decode %d\n",filled);
|
// printf("Packets in Decode %d\n",filled);
|
||||||
if (!filled) {
|
if (!filled) {
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
@ -1906,7 +1906,7 @@ int VideoDecodeInput(VideoStream * stream)
|
|||||||
Debug(3, "in VideoDecode make close\n");
|
Debug(3, "in VideoDecode make close\n");
|
||||||
stream->LastCodecID = AV_CODEC_ID_NONE;
|
stream->LastCodecID = AV_CODEC_ID_NONE;
|
||||||
CodecVideoClose(stream->Decoder);
|
CodecVideoClose(stream->Decoder);
|
||||||
// FIXME: CodecVideoClose calls/uses hw decoder
|
// FIXME: CodecVideoClose calls/uses hw decoder
|
||||||
goto skip;
|
goto skip;
|
||||||
}
|
}
|
||||||
// FIXME: look if more close are in the queue
|
// FIXME: look if more close are in the queue
|
||||||
@ -1941,8 +1941,8 @@ int VideoDecodeInput(VideoStream * stream)
|
|||||||
avpkt->stream_index = 0;
|
avpkt->stream_index = 0;
|
||||||
|
|
||||||
#ifdef USE_PIP
|
#ifdef USE_PIP
|
||||||
//fprintf(stderr, "[");
|
// fprintf(stderr, "[");
|
||||||
//DumpMpeg(avpkt->data, avpkt->size);
|
// DumpMpeg(avpkt->data, avpkt->size);
|
||||||
#ifdef STILL_DEBUG
|
#ifdef STILL_DEBUG
|
||||||
if (InStillPicture) {
|
if (InStillPicture) {
|
||||||
DumpMpeg(avpkt->data, avpkt->size);
|
DumpMpeg(avpkt->data, avpkt->size);
|
||||||
@ -1954,7 +1954,7 @@ int VideoDecodeInput(VideoStream * stream)
|
|||||||
CodecVideoDecode(stream->Decoder, avpkt);
|
CodecVideoDecode(stream->Decoder, avpkt);
|
||||||
}
|
}
|
||||||
pthread_mutex_unlock(&stream->DecoderLockMutex);
|
pthread_mutex_unlock(&stream->DecoderLockMutex);
|
||||||
//fprintf(stderr, "]\n");
|
// fprintf(stderr, "]\n");
|
||||||
#else
|
#else
|
||||||
// old version
|
// old version
|
||||||
if (stream->LastCodecID == AV_CODEC_ID_MPEG2VIDEO) {
|
if (stream->LastCodecID == AV_CODEC_ID_MPEG2VIDEO) {
|
||||||
@ -2193,7 +2193,7 @@ int PlayVideo3(VideoStream * stream, const uint8_t * data, int size)
|
|||||||
}
|
}
|
||||||
// hard limit buffer full: needed for replay
|
// hard limit buffer full: needed for replay
|
||||||
if (atomic_read(&stream->PacketsFilled) >= VIDEO_PACKET_MAX - 10) {
|
if (atomic_read(&stream->PacketsFilled) >= VIDEO_PACKET_MAX - 10) {
|
||||||
// Debug(3, "video: video buffer full\n");
|
// Debug(3, "video: video buffer full\n");
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
#ifdef USE_SOFTLIMIT
|
#ifdef USE_SOFTLIMIT
|
||||||
@ -2225,7 +2225,7 @@ int PlayVideo3(VideoStream * stream, const uint8_t * data, int size)
|
|||||||
z = 0;
|
z = 0;
|
||||||
while (!*check) { // count leading zeros
|
while (!*check) { // count leading zeros
|
||||||
if (l < 3) {
|
if (l < 3) {
|
||||||
// Warning(_("[softhddev] empty video packet %d bytes\n"), size);
|
// Warning(_("[softhddev] empty video packet %d bytes\n"), size);
|
||||||
z = 0;
|
z = 0;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@ -2278,7 +2278,7 @@ int PlayVideo3(VideoStream * stream, const uint8_t * data, int size)
|
|||||||
VideoEnqueue(stream, pts, dts, check - 2, l + 2);
|
VideoEnqueue(stream, pts, dts, check - 2, l + 2);
|
||||||
return size;
|
return size;
|
||||||
}
|
}
|
||||||
// HEVC Codec
|
// HEVC Codec
|
||||||
if ((data[6] & 0xC0) == 0x80 && z >= 2 && check[0] == 0x01 && check[1] == 0x46) {
|
if ((data[6] & 0xC0) == 0x80 && z >= 2 && check[0] == 0x01 && check[1] == 0x46) {
|
||||||
// old PES HDTV recording z == 2 -> stronger check!
|
// old PES HDTV recording z == 2 -> stronger check!
|
||||||
if (stream->CodecID == AV_CODEC_ID_HEVC) {
|
if (stream->CodecID == AV_CODEC_ID_HEVC) {
|
||||||
@ -2372,7 +2372,7 @@ int PlayVideo(const uint8_t * data, int size)
|
|||||||
return PlayVideo3(MyVideoStream, data, size);
|
return PlayVideo3(MyVideoStream, data, size);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// call VDR support function
|
/// call VDR support function
|
||||||
extern uint8_t *CreateJpeg(uint8_t *, int *, int, int, int);
|
extern uint8_t *CreateJpeg(uint8_t *, int *, int, int, int);
|
||||||
|
|
||||||
#if defined(USE_JPEG) && JPEG_LIB_VERSION >= 80
|
#if defined(USE_JPEG) && JPEG_LIB_VERSION >= 80
|
||||||
@ -3033,7 +3033,7 @@ static void StartXServer(void)
|
|||||||
int maxfd;
|
int maxfd;
|
||||||
int fd;
|
int fd;
|
||||||
|
|
||||||
// X server
|
// X server
|
||||||
if (X11Server) {
|
if (X11Server) {
|
||||||
args[0] = X11Server;
|
args[0] = X11Server;
|
||||||
} else {
|
} else {
|
||||||
@ -3047,7 +3047,7 @@ static void StartXServer(void)
|
|||||||
// export display for childs
|
// export display for childs
|
||||||
setenv("DISPLAY", X11DisplayName, 1);
|
setenv("DISPLAY", X11DisplayName, 1);
|
||||||
}
|
}
|
||||||
// split X server arguments string into words
|
// split X server arguments string into words
|
||||||
if ((sval = X11ServerArguments)) {
|
if ((sval = X11ServerArguments)) {
|
||||||
char *s;
|
char *s;
|
||||||
|
|
||||||
@ -3071,13 +3071,13 @@ static void StartXServer(void)
|
|||||||
// FIXME: append VTxx
|
// FIXME: append VTxx
|
||||||
args[argn] = NULL;
|
args[argn] = NULL;
|
||||||
|
|
||||||
// arm the signal
|
// arm the signal
|
||||||
memset(&usr1, 0, sizeof(struct sigaction));
|
memset(&usr1, 0, sizeof(struct sigaction));
|
||||||
usr1.sa_handler = Usr1Handler;
|
usr1.sa_handler = Usr1Handler;
|
||||||
sigaction(SIGUSR1, &usr1, NULL);
|
sigaction(SIGUSR1, &usr1, NULL);
|
||||||
|
|
||||||
Debug(3, "x-setup: Starting X server '%s' '%s'\n", args[0], X11ServerArguments);
|
Debug(3, "x-setup: Starting X server '%s' '%s'\n", args[0], X11ServerArguments);
|
||||||
// fork
|
// fork
|
||||||
if ((pid = fork())) { // parent
|
if ((pid = fork())) { // parent
|
||||||
|
|
||||||
X11ServerPid = pid;
|
X11ServerPid = pid;
|
||||||
@ -3096,7 +3096,7 @@ static void StartXServer(void)
|
|||||||
close(fd); // vdr should open with O_CLOEXEC
|
close(fd); // vdr should open with O_CLOEXEC
|
||||||
}
|
}
|
||||||
|
|
||||||
// start the X server
|
// start the X server
|
||||||
execvp(args[0], (char *const *)args);
|
execvp(args[0], (char *const *)args);
|
||||||
|
|
||||||
Error(_("x-setup: Failed to start X server '%s'\n"), args[0]);
|
Error(_("x-setup: Failed to start X server '%s'\n"), args[0]);
|
||||||
@ -3269,7 +3269,7 @@ void MainThreadHook(void)
|
|||||||
// Suspend/Resume
|
// Suspend/Resume
|
||||||
//////////////////////////////////////////////////////////////////////////////
|
//////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
/// call VDR support function
|
/// call VDR support function
|
||||||
extern void DelPip(void);
|
extern void DelPip(void);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
369
video.c
369
video.c
@ -44,10 +44,10 @@
|
|||||||
#define noUSE_SCREENSAVER ///< support disable screensaver
|
#define noUSE_SCREENSAVER ///< support disable screensaver
|
||||||
|
|
||||||
#define USE_GRAB ///< experimental grab code
|
#define USE_GRAB ///< experimental grab code
|
||||||
//#define USE_GLX ///< outdated GLX code
|
// #define USE_GLX ///< outdated GLX code
|
||||||
#define USE_DOUBLEBUFFER ///< use GLX double buffers
|
#define USE_DOUBLEBUFFER ///< use GLX double buffers
|
||||||
#define USE_CUVID ///< enable cuvid support
|
#define USE_CUVID ///< enable cuvid support
|
||||||
//#define AV_INFO ///< log a/v sync informations
|
// #define AV_INFO ///< log a/v sync informations
|
||||||
#ifndef AV_INFO_TIME
|
#ifndef AV_INFO_TIME
|
||||||
#define AV_INFO_TIME (50 * 60) ///< a/v info every minute
|
#define AV_INFO_TIME (50 * 60) ///< a/v info every minute
|
||||||
#endif
|
#endif
|
||||||
@ -86,7 +86,7 @@
|
|||||||
#include <time.h>
|
#include <time.h>
|
||||||
#include <signal.h>
|
#include <signal.h>
|
||||||
#ifndef HAVE_PTHREAD_NAME
|
#ifndef HAVE_PTHREAD_NAME
|
||||||
/// only available with newer glibc
|
/// only available with newer glibc
|
||||||
#define pthread_setname_np(thread, name)
|
#define pthread_setname_np(thread, name)
|
||||||
#endif
|
#endif
|
||||||
#endif
|
#endif
|
||||||
@ -104,12 +104,12 @@
|
|||||||
#include <xcb/dpms.h>
|
#include <xcb/dpms.h>
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
//#include <xcb/shm.h>
|
// #include <xcb/shm.h>
|
||||||
//#include <xcb/xv.h>
|
// #include <xcb/xv.h>
|
||||||
|
|
||||||
//#include <xcb/xcb_image.h>
|
// #include <xcb/xcb_image.h>
|
||||||
//#include <xcb/xcb_event.h>
|
// #include <xcb/xcb_event.h>
|
||||||
//#include <xcb/xcb_atom.h>
|
// #include <xcb/xcb_atom.h>
|
||||||
#include <xcb/xcb_icccm.h>
|
#include <xcb/xcb_icccm.h>
|
||||||
#ifdef XCB_ICCCM_NUM_WM_SIZE_HINTS_ELEMENTS
|
#ifdef XCB_ICCCM_NUM_WM_SIZE_HINTS_ELEMENTS
|
||||||
#include <xcb/xcb_ewmh.h>
|
#include <xcb/xcb_ewmh.h>
|
||||||
@ -132,10 +132,10 @@ typedef enum
|
|||||||
|
|
||||||
#ifdef USE_GLX
|
#ifdef USE_GLX
|
||||||
#include <GL/glew.h>
|
#include <GL/glew.h>
|
||||||
//#include <GL/gl.h> // For GL_COLOR_BUFFER_BIT
|
// #include <GL/gl.h> // For GL_COLOR_BUFFER_BIT
|
||||||
//#include <GL/glext.h> // For GL_COLOR_BUFFER_BIT
|
// #include <GL/glext.h> // For GL_COLOR_BUFFER_BIT
|
||||||
//#include <GL/glxew.h>
|
// #include <GL/glxew.h>
|
||||||
//#include <GL/glx.h>
|
// #include <GL/glx.h>
|
||||||
// only for gluErrorString
|
// only for gluErrorString
|
||||||
#include <GL/glu.h>
|
#include <GL/glu.h>
|
||||||
#include <GL/glut.h>
|
#include <GL/glut.h>
|
||||||
@ -146,8 +146,8 @@ typedef enum
|
|||||||
#include <libavutil/pixdesc.h>
|
#include <libavutil/pixdesc.h>
|
||||||
|
|
||||||
#ifdef CUVID
|
#ifdef CUVID
|
||||||
//#include <GL/gl.h> // For GL_COLOR_BUFFER_BIT
|
// #include <GL/gl.h> // For GL_COLOR_BUFFER_BIT
|
||||||
//#include <GL/glext.h> // For GL_COLOR_BUFFER_BIT
|
// #include <GL/glext.h> // For GL_COLOR_BUFFER_BIT
|
||||||
#include <cuda.h>
|
#include <cuda.h>
|
||||||
#include <cuda_runtime_api.h>
|
#include <cuda_runtime_api.h>
|
||||||
#include <cudaGL.h>
|
#include <cudaGL.h>
|
||||||
@ -168,7 +168,7 @@ typedef enum
|
|||||||
#endif
|
#endif
|
||||||
|
|
||||||
#include <assert.h>
|
#include <assert.h>
|
||||||
//#define EGL_EGLEXT_PROTOTYPES
|
// #define EGL_EGLEXT_PROTOTYPES
|
||||||
#include <EGL/egl.h>
|
#include <EGL/egl.h>
|
||||||
#include <EGL/eglext.h>
|
#include <EGL/eglext.h>
|
||||||
#ifndef GL_OES_EGL_image
|
#ifndef GL_OES_EGL_image
|
||||||
@ -332,8 +332,8 @@ typedef struct
|
|||||||
|
|
||||||
#define CODEC_SURFACES_MAX 12 //
|
#define CODEC_SURFACES_MAX 12 //
|
||||||
|
|
||||||
#define VIDEO_SURFACES_MAX 6 ///< video output surfaces for queue
|
#define VIDEO_SURFACES_MAX 6 ///< video output surfaces for queue
|
||||||
//#define OUTPUT_SURFACES_MAX 4 ///< output surfaces for flip page
|
// #define OUTPUT_SURFACES_MAX 4 ///< output surfaces for flip page
|
||||||
#ifdef VAAPI
|
#ifdef VAAPI
|
||||||
#define PIXEL_FORMAT AV_PIX_FMT_VAAPI
|
#define PIXEL_FORMAT AV_PIX_FMT_VAAPI
|
||||||
#define SWAP_BUFFER_SIZE 3
|
#define SWAP_BUFFER_SIZE 3
|
||||||
@ -367,53 +367,53 @@ static unsigned VideoWindowHeight; ///< video output window height
|
|||||||
|
|
||||||
static const VideoModule NoopModule; ///< forward definition of noop module
|
static const VideoModule NoopModule; ///< forward definition of noop module
|
||||||
|
|
||||||
/// selected video module
|
/// selected video module
|
||||||
static const VideoModule *VideoUsedModule = &NoopModule;
|
static const VideoModule *VideoUsedModule = &NoopModule;
|
||||||
|
|
||||||
signed char VideoHardwareDecoder = -1; ///< flag use hardware decoder
|
signed char VideoHardwareDecoder = -1; ///< flag use hardware decoder
|
||||||
|
|
||||||
static char VideoSurfaceModesChanged; ///< flag surface modes changed
|
static char VideoSurfaceModesChanged; ///< flag surface modes changed
|
||||||
|
|
||||||
/// flag use transparent OSD.
|
/// flag use transparent OSD.
|
||||||
static const char VideoTransparentOsd = 1;
|
static const char VideoTransparentOsd = 1;
|
||||||
|
|
||||||
static uint32_t VideoBackground; ///< video background color
|
static uint32_t VideoBackground; ///< video background color
|
||||||
static char VideoStudioLevels; ///< flag use studio levels
|
static char VideoStudioLevels; ///< flag use studio levels
|
||||||
|
|
||||||
/// Default deinterlace mode.
|
/// Default deinterlace mode.
|
||||||
static VideoDeinterlaceModes VideoDeinterlace[VideoResolutionMax];
|
static VideoDeinterlaceModes VideoDeinterlace[VideoResolutionMax];
|
||||||
|
|
||||||
/// Default number of deinterlace surfaces
|
/// Default number of deinterlace surfaces
|
||||||
static const int VideoDeinterlaceSurfaces = 4;
|
static const int VideoDeinterlaceSurfaces = 4;
|
||||||
|
|
||||||
/// Default skip chroma deinterlace flag (CUVID only).
|
/// Default skip chroma deinterlace flag (CUVID only).
|
||||||
static char VideoSkipChromaDeinterlace[VideoResolutionMax];
|
static char VideoSkipChromaDeinterlace[VideoResolutionMax];
|
||||||
|
|
||||||
/// Default inverse telecine flag (CUVID only).
|
/// Default inverse telecine flag (CUVID only).
|
||||||
static char VideoInverseTelecine[VideoResolutionMax];
|
static char VideoInverseTelecine[VideoResolutionMax];
|
||||||
|
|
||||||
/// Default amount of noise reduction algorithm to apply (0 .. 1000).
|
/// Default amount of noise reduction algorithm to apply (0 .. 1000).
|
||||||
static int VideoDenoise[VideoResolutionMax];
|
static int VideoDenoise[VideoResolutionMax];
|
||||||
|
|
||||||
/// Default amount of sharpening, or blurring, to apply (-1000 .. 1000).
|
/// Default amount of sharpening, or blurring, to apply (-1000 .. 1000).
|
||||||
static int VideoSharpen[VideoResolutionMax];
|
static int VideoSharpen[VideoResolutionMax];
|
||||||
|
|
||||||
/// Default cut top and bottom in pixels
|
/// Default cut top and bottom in pixels
|
||||||
static int VideoCutTopBottom[VideoResolutionMax];
|
static int VideoCutTopBottom[VideoResolutionMax];
|
||||||
|
|
||||||
/// Default cut left and right in pixels
|
/// Default cut left and right in pixels
|
||||||
static int VideoCutLeftRight[VideoResolutionMax];
|
static int VideoCutLeftRight[VideoResolutionMax];
|
||||||
|
|
||||||
/// Default scaling mode
|
/// Default scaling mode
|
||||||
static VideoScalingModes VideoScaling[VideoResolutionMax];
|
static VideoScalingModes VideoScaling[VideoResolutionMax];
|
||||||
|
|
||||||
/// Default audio/video delay
|
/// Default audio/video delay
|
||||||
int VideoAudioDelay;
|
int VideoAudioDelay;
|
||||||
|
|
||||||
/// Default zoom mode for 4:3
|
/// Default zoom mode for 4:3
|
||||||
static VideoZoomModes Video4to3ZoomMode;
|
static VideoZoomModes Video4to3ZoomMode;
|
||||||
|
|
||||||
/// Default zoom mode for 16:9 and others
|
/// Default zoom mode for 16:9 and others
|
||||||
static VideoZoomModes VideoOtherZoomMode;
|
static VideoZoomModes VideoOtherZoomMode;
|
||||||
|
|
||||||
static char Video60HzMode; ///< handle 60hz displays
|
static char Video60HzMode; ///< handle 60hz displays
|
||||||
@ -452,9 +452,9 @@ pthread_mutex_t OSDMutex; ///< OSD update mutex
|
|||||||
|
|
||||||
static pthread_t VideoDisplayThread; ///< video display thread
|
static pthread_t VideoDisplayThread; ///< video display thread
|
||||||
|
|
||||||
//static pthread_cond_t VideoDisplayWakeupCond; ///< wakeup condition variable
|
// static pthread_cond_t VideoDisplayWakeupCond; ///< wakeup condition variable
|
||||||
//static pthread_mutex_t VideoDisplayMutex; ///< video condition mutex
|
// static pthread_mutex_t VideoDisplayMutex; ///< video condition mutex
|
||||||
//static pthread_mutex_t VideoDisplayLockMutex; ///< video lock mutex
|
// static pthread_mutex_t VideoDisplayLockMutex; ///< video lock mutex
|
||||||
|
|
||||||
static int OsdConfigWidth; ///< osd configured width
|
static int OsdConfigWidth; ///< osd configured width
|
||||||
static int OsdConfigHeight; ///< osd configured height
|
static int OsdConfigHeight; ///< osd configured height
|
||||||
@ -546,21 +546,21 @@ static void VideoSetPts(int64_t * pts_p, int interlaced, const AVCodecContext *
|
|||||||
// FIXME: using framerate as workaround for av_frame_get_pkt_duration
|
// FIXME: using framerate as workaround for av_frame_get_pkt_duration
|
||||||
//
|
//
|
||||||
|
|
||||||
// if (video_ctx->framerate.num && video_ctx->framerate.den) {
|
// if (video_ctx->framerate.num && video_ctx->framerate.den) {
|
||||||
// duration = 1000 * video_ctx->framerate.den / video_ctx->framerate.num;
|
// duration = 1000 * video_ctx->framerate.den / video_ctx->framerate.num;
|
||||||
// } else {
|
// } else {
|
||||||
duration = interlaced ? 40 : 20; // 50Hz -> 20ms default
|
duration = interlaced ? 40 : 20; // 50Hz -> 20ms default
|
||||||
// }
|
// }
|
||||||
// Debug(4, "video: %d/%d %" PRIx64 " -> %d\n", video_ctx->framerate.den, video_ctx->framerate.num, av_frame_get_pkt_duration(frame), duration);
|
// Debug(4, "video: %d/%d %" PRIx64 " -> %d\n", video_ctx->framerate.den, video_ctx->framerate.num, av_frame_get_pkt_duration(frame), duration);
|
||||||
|
|
||||||
// update video clock
|
// update video clock
|
||||||
if (*pts_p != (int64_t) AV_NOPTS_VALUE) {
|
if (*pts_p != (int64_t) AV_NOPTS_VALUE) {
|
||||||
*pts_p += duration * 90;
|
*pts_p += duration * 90;
|
||||||
//Info("video: %s +pts\n", Timestamp2String(*pts_p));
|
//Info("video: %s +pts\n", Timestamp2String(*pts_p));
|
||||||
}
|
}
|
||||||
//av_opt_ptr(avcodec_get_frame_class(), frame, "best_effort_timestamp");
|
// av_opt_ptr(avcodec_get_frame_class(), frame, "best_effort_timestamp");
|
||||||
//pts = frame->best_effort_timestamp;
|
// pts = frame->best_effort_timestamp;
|
||||||
// pts = frame->pkt_pts;
|
// pts = frame->pkt_pts;
|
||||||
pts = frame->pts;
|
pts = frame->pts;
|
||||||
if (pts == (int64_t) AV_NOPTS_VALUE || !pts) {
|
if (pts == (int64_t) AV_NOPTS_VALUE || !pts) {
|
||||||
// libav: 0.8pre didn't set pts
|
// libav: 0.8pre didn't set pts
|
||||||
@ -672,7 +672,6 @@ static void VideoUpdateOutput(AVRational input_aspect_ratio, int input_width, in
|
|||||||
(video_height * display_aspect_ratio.num + display_aspect_ratio.den - 1) / display_aspect_ratio.den;
|
(video_height * display_aspect_ratio.num + display_aspect_ratio.den - 1) / display_aspect_ratio.den;
|
||||||
*output_height =
|
*output_height =
|
||||||
(video_width * display_aspect_ratio.den + display_aspect_ratio.num - 1) / display_aspect_ratio.num;
|
(video_width * display_aspect_ratio.den + display_aspect_ratio.num - 1) / display_aspect_ratio.num;
|
||||||
// JOJO
|
|
||||||
if (*output_width > video_width) {
|
if (*output_width > video_width) {
|
||||||
*output_width = video_width;
|
*output_width = video_width;
|
||||||
*output_y += (video_height - *output_height) / 2;
|
*output_y += (video_height - *output_height) / 2;
|
||||||
@ -744,25 +743,25 @@ static uint64_t test_time = 0;
|
|||||||
#define VideoThreadLock(void)\
|
#define VideoThreadLock(void)\
|
||||||
{\
|
{\
|
||||||
if (VideoThread) {\
|
if (VideoThread) {\
|
||||||
if (pthread_mutex_lock(&VideoLockMutex)) {\
|
if (pthread_mutex_lock(&VideoLockMutex)) {\
|
||||||
Error(_("video: can't lock thread\n"));\
|
Error(_("video: can't lock thread\n"));\
|
||||||
}\
|
}\
|
||||||
}\
|
}\
|
||||||
}
|
}
|
||||||
// test_time = GetusTicks();
|
// test_time = GetusTicks();
|
||||||
// printf("Lock start....");
|
// printf("Lock start....");
|
||||||
///
|
///
|
||||||
/// Unlock video thread.
|
/// Unlock video thread.
|
||||||
///
|
///
|
||||||
#define VideoThreadUnlock(void)\
|
#define VideoThreadUnlock(void)\
|
||||||
{\
|
{\
|
||||||
if (VideoThread) {\
|
if (VideoThread) {\
|
||||||
if (pthread_mutex_unlock(&VideoLockMutex)) {\
|
if (pthread_mutex_unlock(&VideoLockMutex)) {\
|
||||||
Error(_("video: can't unlock thread\n"));\
|
Error(_("video: can't unlock thread\n"));\
|
||||||
}\
|
}\
|
||||||
}\
|
}\
|
||||||
}
|
}
|
||||||
// printf("Video Locked for %d\n",(GetusTicks()-test_time)/1000);
|
// printf("Video Locked for %d\n",(GetusTicks()-test_time)/1000);
|
||||||
|
|
||||||
//----------------------------------------------------------------------------
|
//----------------------------------------------------------------------------
|
||||||
// GLX
|
// GLX
|
||||||
@ -791,7 +790,7 @@ static PFNGLXSWAPINTERVALSGIPROC GlxSwapIntervalSGI;
|
|||||||
GLenum err;\
|
GLenum err;\
|
||||||
\
|
\
|
||||||
if ((err = glGetError()) != GL_NO_ERROR) {\
|
if ((err = glGetError()) != GL_NO_ERROR) {\
|
||||||
Debug(3, "video/glx: error %s:%d %d '%s'\n",__FILE__,__LINE__, err, gluErrorString(err));\
|
Debug(3, "video/glx: error %s:%d %d '%s'\n",__FILE__,__LINE__, err, gluErrorString(err));\
|
||||||
}\
|
}\
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -840,7 +839,7 @@ char *eglErrorString(EGLint error)
|
|||||||
EGLint err;\
|
EGLint err;\
|
||||||
\
|
\
|
||||||
if ((err = eglGetError()) != EGL_SUCCESS) {\
|
if ((err = eglGetError()) != EGL_SUCCESS) {\
|
||||||
Debug(3, "video/egl: %s:%d error %d %s\n", __FILE__,__LINE__,err,eglErrorString(err));\
|
Debug(3, "video/egl: %s:%d error %d %s\n", __FILE__,__LINE__,err,eglErrorString(err));\
|
||||||
}\
|
}\
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -849,19 +848,19 @@ char *eglErrorString(EGLint error)
|
|||||||
void OSD_get_shared_context()
|
void OSD_get_shared_context()
|
||||||
{
|
{
|
||||||
eglMakeCurrent(eglDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE, eglSharedContext);
|
eglMakeCurrent(eglDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE, eglSharedContext);
|
||||||
// EglCheck();
|
// EglCheck();
|
||||||
}
|
}
|
||||||
|
|
||||||
void OSD_get_context()
|
void OSD_get_context()
|
||||||
{
|
{
|
||||||
eglMakeCurrent(eglDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE, OSDcontext);
|
eglMakeCurrent(eglDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE, OSDcontext);
|
||||||
// EglCheck();
|
// EglCheck();
|
||||||
}
|
}
|
||||||
|
|
||||||
void OSD_release_context()
|
void OSD_release_context()
|
||||||
{
|
{
|
||||||
eglMakeCurrent(eglDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT);
|
eglMakeCurrent(eglDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT);
|
||||||
// EglCheck();
|
// EglCheck();
|
||||||
}
|
}
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
@ -968,7 +967,7 @@ static void EglInit(void)
|
|||||||
return;
|
return;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
//The desired 30-bit color visual
|
// The desired 30-bit color visual
|
||||||
int attributeList10[] = {
|
int attributeList10[] = {
|
||||||
GLX_DRAWABLE_TYPE, GLX_WINDOW_BIT,
|
GLX_DRAWABLE_TYPE, GLX_WINDOW_BIT,
|
||||||
GLX_RENDER_TYPE, GLX_RGBA_BIT,
|
GLX_RENDER_TYPE, GLX_RGBA_BIT,
|
||||||
@ -1186,10 +1185,10 @@ static void EglExit(void)
|
|||||||
// must destroy contet
|
// must destroy contet
|
||||||
#ifdef CUVID
|
#ifdef CUVID
|
||||||
// must destroy glx
|
// must destroy glx
|
||||||
// if (glXGetCurrentContext() == glxContext) {
|
// if (glXGetCurrentContext() == glxContext) {
|
||||||
// if currently used, set to none
|
// if currently used, set to none
|
||||||
glXMakeCurrent(XlibDisplay, None, NULL);
|
glXMakeCurrent(XlibDisplay, None, NULL);
|
||||||
// }
|
// }
|
||||||
if (OSDcontext) {
|
if (OSDcontext) {
|
||||||
glXDestroyContext(XlibDisplay, OSDcontext);
|
glXDestroyContext(XlibDisplay, OSDcontext);
|
||||||
GlxCheck();
|
GlxCheck();
|
||||||
@ -1342,10 +1341,10 @@ typedef struct _cuvid_decoder_
|
|||||||
int SurfacesFree[CODEC_SURFACES_MAX];
|
int SurfacesFree[CODEC_SURFACES_MAX];
|
||||||
/// video surface ring buffer
|
/// video surface ring buffer
|
||||||
int SurfacesRb[VIDEO_SURFACES_MAX];
|
int SurfacesRb[VIDEO_SURFACES_MAX];
|
||||||
// CUcontext cuda_ctx;
|
// CUcontext cuda_ctx;
|
||||||
|
|
||||||
// cudaStream_t stream; // make my own cuda stream
|
// cudaStream_t stream; // make my own cuda stream
|
||||||
// CUgraphicsResource cuResource;
|
// CUgraphicsResource cuResource;
|
||||||
int SurfaceWrite; ///< write pointer
|
int SurfaceWrite; ///< write pointer
|
||||||
int SurfaceRead; ///< read pointer
|
int SurfaceRead; ///< read pointer
|
||||||
atomic_t SurfacesFilled; ///< how many of the buffer is used
|
atomic_t SurfacesFilled; ///< how many of the buffer is used
|
||||||
@ -1406,11 +1405,11 @@ typedef struct priv
|
|||||||
struct pl_renderer *renderertest;
|
struct pl_renderer *renderertest;
|
||||||
const struct pl_swapchain *swapchain;
|
const struct pl_swapchain *swapchain;
|
||||||
struct pl_context_params context;
|
struct pl_context_params context;
|
||||||
// struct pl_render_target r_target;
|
// struct pl_render_target r_target;
|
||||||
// struct pl_render_params r_params;
|
// struct pl_render_params r_params;
|
||||||
// struct pl_tex final_fbo;
|
// struct pl_tex final_fbo;
|
||||||
VkSurfaceKHR pSurface;
|
VkSurfaceKHR pSurface;
|
||||||
// VkSemaphore sig_in;
|
// VkSemaphore sig_in;
|
||||||
int has_dma_buf;
|
int has_dma_buf;
|
||||||
} priv;
|
} priv;
|
||||||
static priv *p;
|
static priv *p;
|
||||||
@ -1420,9 +1419,9 @@ static int semid;
|
|||||||
struct itimerval itimer;
|
struct itimerval itimer;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
GLuint vao_buffer; //
|
GLuint vao_buffer;
|
||||||
|
|
||||||
//GLuint vao_vao[4]; //
|
//GLuint vao_vao[4];
|
||||||
GLuint gl_shader = 0, gl_prog = 0, gl_fbo = 0; // shader programm
|
GLuint gl_shader = 0, gl_prog = 0, gl_fbo = 0; // shader programm
|
||||||
GLint gl_colormatrix, gl_colormatrix_c;
|
GLint gl_colormatrix, gl_colormatrix_c;
|
||||||
GLuint OSDfb = 0;
|
GLuint OSDfb = 0;
|
||||||
@ -1619,7 +1618,7 @@ static int CuvidGetVideoSurface0(CuvidDecoder * decoder)
|
|||||||
int i;
|
int i;
|
||||||
|
|
||||||
if (!decoder->SurfaceFreeN) {
|
if (!decoder->SurfaceFreeN) {
|
||||||
// Error(_("video/cuvid: out of surfaces\n"));
|
// Error(_("video/cuvid: out of surfaces\n"));
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
// use oldest surface
|
// use oldest surface
|
||||||
@ -1739,9 +1738,9 @@ static const struct mp_egl_config_attr mp_egl_attribs[] = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const int mpgl_preferred_gl_versions[] = {
|
const int mpgl_preferred_gl_versions[] = {
|
||||||
// 440,
|
// 440,
|
||||||
// 430,
|
// 430,
|
||||||
// 400,
|
// 400,
|
||||||
330,
|
330,
|
||||||
320,
|
320,
|
||||||
310,
|
310,
|
||||||
@ -1923,7 +1922,7 @@ static CuvidDecoder *CuvidNewHwDecoder(VideoStream * stream)
|
|||||||
|
|
||||||
int i = 0;
|
int i = 0;
|
||||||
|
|
||||||
// setenv ("DISPLAY", ":0", 0);
|
// setenv ("DISPLAY", ":0", 0);
|
||||||
|
|
||||||
Debug(3, "Cuvid New HW Decoder\n");
|
Debug(3, "Cuvid New HW Decoder\n");
|
||||||
if ((unsigned)CuvidDecoderN >= sizeof(CuvidDecoders) / sizeof(*CuvidDecoders)) {
|
if ((unsigned)CuvidDecoderN >= sizeof(CuvidDecoders) / sizeof(*CuvidDecoders)) {
|
||||||
@ -1936,8 +1935,8 @@ static CuvidDecoder *CuvidNewHwDecoder(VideoStream * stream)
|
|||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
#ifdef VAAPI
|
#ifdef VAAPI
|
||||||
|
// if ((i = av_hwdevice_ctx_create(&hw_device_ctx, AV_HWDEVICE_TYPE_VAAPI, ":0.0" , NULL, 0)) != 0 ) {
|
||||||
if ((i = av_hwdevice_ctx_create(&hw_device_ctx, AV_HWDEVICE_TYPE_VAAPI, "/dev/dri/renderD128", NULL, 0)) != 0) {
|
if ((i = av_hwdevice_ctx_create(&hw_device_ctx, AV_HWDEVICE_TYPE_VAAPI, "/dev/dri/renderD128", NULL, 0)) != 0) {
|
||||||
// if ((i = av_hwdevice_ctx_create(&hw_device_ctx, AV_HWDEVICE_TYPE_VAAPI, ":0.0" , NULL, 0)) != 0 ) {
|
|
||||||
Fatal("codec: can't allocate HW video codec context err %04x", i);
|
Fatal("codec: can't allocate HW video codec context err %04x", i);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
@ -1952,8 +1951,8 @@ static CuvidDecoder *CuvidNewHwDecoder(VideoStream * stream)
|
|||||||
decoder->VaDisplay = VaDisplay;
|
decoder->VaDisplay = VaDisplay;
|
||||||
#endif
|
#endif
|
||||||
decoder->Window = VideoWindow;
|
decoder->Window = VideoWindow;
|
||||||
//decoder->VideoX = 0; // done by calloc
|
// decoder->VideoX = 0; // done by calloc
|
||||||
//decoder->VideoY = 0;
|
// decoder->VideoY = 0;
|
||||||
decoder->VideoWidth = VideoWindowWidth;
|
decoder->VideoWidth = VideoWindowWidth;
|
||||||
decoder->VideoHeight = VideoWindowHeight;
|
decoder->VideoHeight = VideoWindowHeight;
|
||||||
|
|
||||||
@ -2049,7 +2048,7 @@ static void CuvidDelHwDecoder(CuvidDecoder * decoder)
|
|||||||
if (decoder == CuvidDecoders[0])
|
if (decoder == CuvidDecoders[0])
|
||||||
VideoThreadUnlock();
|
VideoThreadUnlock();
|
||||||
|
|
||||||
// glXMakeCurrent(XlibDisplay, None, NULL);
|
// glXMakeCurrent(XlibDisplay, None, NULL);
|
||||||
for (i = 0; i < CuvidDecoderN; ++i) {
|
for (i = 0; i < CuvidDecoderN; ++i) {
|
||||||
if (CuvidDecoders[i] == decoder) {
|
if (CuvidDecoders[i] == decoder) {
|
||||||
CuvidDecoders[i] = NULL;
|
CuvidDecoders[i] = NULL;
|
||||||
@ -2057,7 +2056,7 @@ static void CuvidDelHwDecoder(CuvidDecoder * decoder)
|
|||||||
if (i < --CuvidDecoderN) {
|
if (i < --CuvidDecoderN) {
|
||||||
CuvidDecoders[i] = CuvidDecoders[CuvidDecoderN];
|
CuvidDecoders[i] = CuvidDecoders[CuvidDecoderN];
|
||||||
}
|
}
|
||||||
// CuvidCleanup(decoder);
|
// CuvidCleanup(decoder);
|
||||||
CuvidPrintFrames(decoder);
|
CuvidPrintFrames(decoder);
|
||||||
#ifdef CUVID
|
#ifdef CUVID
|
||||||
if (decoder->cuda_ctx && CuvidDecoderN == 1) {
|
if (decoder->cuda_ctx && CuvidDecoderN == 1) {
|
||||||
@ -2143,7 +2142,7 @@ void generateCUDAImage(CuvidDecoder * decoder, int index, const AVFrame * frame,
|
|||||||
{
|
{
|
||||||
int n;
|
int n;
|
||||||
|
|
||||||
for (n = 0; n < 2; n++) { //
|
for (n = 0; n < 2; n++) {
|
||||||
// widthInBytes must account for the chroma plane
|
// widthInBytes must account for the chroma plane
|
||||||
// elements being two samples wide.
|
// elements being two samples wide.
|
||||||
CUDA_MEMCPY2D cpy = {
|
CUDA_MEMCPY2D cpy = {
|
||||||
@ -2172,7 +2171,7 @@ void createTextureDst(CuvidDecoder * decoder, int anz, unsigned int size_x, unsi
|
|||||||
struct pl_image *img;
|
struct pl_image *img;
|
||||||
struct pl_plane *pl;
|
struct pl_plane *pl;
|
||||||
|
|
||||||
//printf("Create textures and planes %d %d\n",size_x,size_y);
|
// printf("Create textures and planes %d %d\n",size_x,size_y);
|
||||||
Debug(3, "video/vulkan: create %d Textures Format %s w %d h %d \n", anz,
|
Debug(3, "video/vulkan: create %d Textures Format %s w %d h %d \n", anz,
|
||||||
PixFmt == AV_PIX_FMT_NV12 ? "NV12" : "P010", size_x, size_y);
|
PixFmt == AV_PIX_FMT_NV12 ? "NV12" : "P010", size_x, size_y);
|
||||||
|
|
||||||
@ -2192,11 +2191,11 @@ void createTextureDst(CuvidDecoder * decoder, int anz, unsigned int size_x, unsi
|
|||||||
size = 2;
|
size = 2;
|
||||||
}
|
}
|
||||||
if (decoder->pl_images[i].planes[n].texture) {
|
if (decoder->pl_images[i].planes[n].texture) {
|
||||||
//#ifdef VAAPI
|
// #ifdef VAAPI
|
||||||
if (decoder->pl_images[i].planes[n].texture->params.shared_mem.handle.fd) {
|
if (decoder->pl_images[i].planes[n].texture->params.shared_mem.handle.fd) {
|
||||||
close(decoder->pl_images[i].planes[n].texture->params.shared_mem.handle.fd);
|
close(decoder->pl_images[i].planes[n].texture->params.shared_mem.handle.fd);
|
||||||
}
|
}
|
||||||
//#endif
|
// #endif
|
||||||
pl_tex_destroy(p->gpu, &decoder->pl_images[i].planes[n].texture); // delete old texture
|
pl_tex_destroy(p->gpu, &decoder->pl_images[i].planes[n].texture); // delete old texture
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2335,7 +2334,7 @@ void generateVAAPIImage(CuvidDecoder * decoder, int index, const AVFrame * frame
|
|||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
//printf("vor create Object %d with fd %d import size %u offset %d %dx%d\n",id,fd,size,offset, tex_params.w,tex_params.h);
|
// printf("vor create Object %d with fd %d import size %u offset %d %dx%d\n",id,fd,size,offset, tex_params.w,tex_params.h);
|
||||||
|
|
||||||
if (decoder->pl_images[index].planes[n].texture) {
|
if (decoder->pl_images[index].planes[n].texture) {
|
||||||
pl_tex_destroy(p->gpu, &decoder->pl_images[index].planes[n].texture);
|
pl_tex_destroy(p->gpu, &decoder->pl_images[index].planes[n].texture);
|
||||||
@ -2409,21 +2408,21 @@ void createTextureDst(CuvidDecoder * decoder, int anz, unsigned int size_x, unsi
|
|||||||
|
|
||||||
#ifdef VAAPI
|
#ifdef VAAPI
|
||||||
#define MP_ARRAY_SIZE(s) (sizeof(s) / sizeof((s)[0]))
|
#define MP_ARRAY_SIZE(s) (sizeof(s) / sizeof((s)[0]))
|
||||||
#define ADD_ATTRIB(name, value) \
|
#define ADD_ATTRIB(name, value) \
|
||||||
do { \
|
do { \
|
||||||
assert(num_attribs + 3 < MP_ARRAY_SIZE(attribs)); \
|
assert(num_attribs + 3 < MP_ARRAY_SIZE(attribs)); \
|
||||||
attribs[num_attribs++] = (name); \
|
attribs[num_attribs++] = (name); \
|
||||||
attribs[num_attribs++] = (value); \
|
attribs[num_attribs++] = (value); \
|
||||||
attribs[num_attribs] = EGL_NONE; \
|
attribs[num_attribs] = EGL_NONE; \
|
||||||
} while(0)
|
} while(0)
|
||||||
|
|
||||||
#define ADD_PLANE_ATTRIBS(plane) do { \
|
#define ADD_PLANE_ATTRIBS(plane) do { \
|
||||||
ADD_ATTRIB(EGL_DMA_BUF_PLANE ## plane ## _FD_EXT, \
|
ADD_ATTRIB(EGL_DMA_BUF_PLANE ## plane ## _FD_EXT, \
|
||||||
desc.objects[desc.layers[n].object_index[plane]].fd); \
|
desc.objects[desc.layers[n].object_index[plane]].fd); \
|
||||||
ADD_ATTRIB(EGL_DMA_BUF_PLANE ## plane ## _OFFSET_EXT, \
|
ADD_ATTRIB(EGL_DMA_BUF_PLANE ## plane ## _OFFSET_EXT, \
|
||||||
desc.layers[n].offset[plane]); \
|
desc.layers[n].offset[plane]); \
|
||||||
ADD_ATTRIB(EGL_DMA_BUF_PLANE ## plane ## _PITCH_EXT, \
|
ADD_ATTRIB(EGL_DMA_BUF_PLANE ## plane ## _PITCH_EXT, \
|
||||||
desc.layers[n].pitch[plane]); \
|
desc.layers[n].pitch[plane]); \
|
||||||
} while (0)
|
} while (0)
|
||||||
|
|
||||||
void generateVAAPIImage(CuvidDecoder * decoder, int index, const AVFrame * frame, int image_width, int image_height)
|
void generateVAAPIImage(CuvidDecoder * decoder, int index, const AVFrame * frame, int image_width, int image_height)
|
||||||
@ -2528,12 +2527,12 @@ int push_filters(AVCodecContext * dec_ctx, CuvidDecoder * decoder, AVFrame * fra
|
|||||||
av_log(NULL, AV_LOG_ERROR, "Error while feeding the filtergraph\n");
|
av_log(NULL, AV_LOG_ERROR, "Error while feeding the filtergraph\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
//printf("Interlaced %d tff %d\n",frame->interlaced_frame,frame->top_field_first);
|
// printf("Interlaced %d tff %d\n",frame->interlaced_frame,frame->top_field_first);
|
||||||
/* pull filtered frames from the filtergraph */
|
/* pull filtered frames from the filtergraph */
|
||||||
while ((ret = av_buffersink_get_frame(decoder->buffersink_ctx, filt_frame)) >= 0) {
|
while ((ret = av_buffersink_get_frame(decoder->buffersink_ctx, filt_frame)) >= 0) {
|
||||||
filt_frame->pts /= 2;
|
filt_frame->pts /= 2;
|
||||||
decoder->Interlaced = 0;
|
decoder->Interlaced = 0;
|
||||||
// printf("vaapideint video:new %#012" PRIx64 " old %#012" PRIx64 "\n",filt_frame->pts,frame->pts);
|
// printf("vaapideint video:new %#012" PRIx64 " old %#012" PRIx64 "\n",filt_frame->pts,frame->pts);
|
||||||
CuvidSyncRenderFrame(decoder, dec_ctx, filt_frame);
|
CuvidSyncRenderFrame(decoder, dec_ctx, filt_frame);
|
||||||
filt_frame = av_frame_alloc(); // get new frame
|
filt_frame = av_frame_alloc(); // get new frame
|
||||||
|
|
||||||
@ -2592,7 +2591,7 @@ int init_filters(AVCodecContext * dec_ctx, CuvidDecoder * decoder, AVFrame * fra
|
|||||||
src_params->frame_rate.den = 1;
|
src_params->frame_rate.den = 1;
|
||||||
src_params->sample_aspect_ratio = dec_ctx->sample_aspect_ratio;
|
src_params->sample_aspect_ratio = dec_ctx->sample_aspect_ratio;
|
||||||
|
|
||||||
//printf("width %d height %d hw_frames_ctx %p\n",dec_ctx->width,dec_ctx->height ,frame->hw_frames_ctx);
|
// printf("width %d height %d hw_frames_ctx %p\n",dec_ctx->width,dec_ctx->height ,frame->hw_frames_ctx);
|
||||||
ret = av_buffersrc_parameters_set(decoder->buffersrc_ctx, src_params);
|
ret = av_buffersrc_parameters_set(decoder->buffersrc_ctx, src_params);
|
||||||
if (ret < 0) {
|
if (ret < 0) {
|
||||||
Debug(3, "Cannot set hw_frames_ctx to src\n");
|
Debug(3, "Cannot set hw_frames_ctx to src\n");
|
||||||
@ -2714,7 +2713,7 @@ static enum AVPixelFormat Cuvid_get_format(CuvidDecoder * decoder, AVCodecContex
|
|||||||
|
|
||||||
ist->GetFormatDone = 1;
|
ist->GetFormatDone = 1;
|
||||||
|
|
||||||
Debug(3, "video: create decoder 16bit?=%d %dx%d old %d %d\n", bitformat16, video_ctx->width, video_ctx->height,
|
Debug(3, "video: create decoder 16bit?=%d %dx%d old %d %d\n", bitformat16, video_ctx->width, video_ctx->height,
|
||||||
decoder->InputWidth, decoder->InputHeight);
|
decoder->InputWidth, decoder->InputHeight);
|
||||||
|
|
||||||
if (*fmt_idx == PIXEL_FORMAT) { // HWACCEL used
|
if (*fmt_idx == PIXEL_FORMAT) { // HWACCEL used
|
||||||
@ -2729,8 +2728,8 @@ static enum AVPixelFormat Cuvid_get_format(CuvidDecoder * decoder, AVCodecContex
|
|||||||
ist->hwaccel_output_format = AV_PIX_FMT_NV12;
|
ist->hwaccel_output_format = AV_PIX_FMT_NV12;
|
||||||
}
|
}
|
||||||
|
|
||||||
// if ((video_ctx->width != decoder->InputWidth
|
// if ((video_ctx->width != decoder->InputWidth
|
||||||
// || video_ctx->height != decoder->InputHeight) && decoder->TrickSpeed == 0) {
|
// || video_ctx->height != decoder->InputHeight) && decoder->TrickSpeed == 0) {
|
||||||
|
|
||||||
if (decoder->TrickSpeed == 0) {
|
if (decoder->TrickSpeed == 0) {
|
||||||
#ifdef PLACEBO
|
#ifdef PLACEBO
|
||||||
@ -2812,7 +2811,7 @@ int get_RGB(CuvidDecoder * decoder)
|
|||||||
|
|
||||||
#ifndef PLACEBO
|
#ifndef PLACEBO
|
||||||
|
|
||||||
// eglMakeCurrent(eglDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE, eglSharedContext);
|
// eglMakeCurrent(eglDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE, eglSharedContext);
|
||||||
glGenTextures(1, &texture);
|
glGenTextures(1, &texture);
|
||||||
GlxCheck();
|
GlxCheck();
|
||||||
glBindTexture(GL_TEXTURE_2D, texture);
|
glBindTexture(GL_TEXTURE_2D, texture);
|
||||||
@ -2860,7 +2859,7 @@ int get_RGB(CuvidDecoder * decoder)
|
|||||||
GLint texLoc;
|
GLint texLoc;
|
||||||
|
|
||||||
#ifdef CUVID
|
#ifdef CUVID
|
||||||
// glXMakeCurrent(XlibDisplay, VideoWindow, glxSharedContext);
|
// glXMakeCurrent(XlibDisplay, VideoWindow, glxSharedContext);
|
||||||
GlxCheck();
|
GlxCheck();
|
||||||
#endif
|
#endif
|
||||||
glEnable(GL_BLEND);
|
glEnable(GL_BLEND);
|
||||||
@ -2884,9 +2883,9 @@ int get_RGB(CuvidDecoder * decoder)
|
|||||||
glUseProgram(0);
|
glUseProgram(0);
|
||||||
glActiveTexture(GL_TEXTURE0);
|
glActiveTexture(GL_TEXTURE0);
|
||||||
#ifdef CUVID
|
#ifdef CUVID
|
||||||
// glXMakeCurrent(XlibDisplay, VideoWindow, glxThreadContext);
|
// glXMakeCurrent(XlibDisplay, VideoWindow, glxThreadContext);
|
||||||
#else
|
#else
|
||||||
// eglMakeCurrent(eglDisplay, eglSurface,eglSurface, eglThreadContext);
|
// eglMakeCurrent(eglDisplay, eglSurface,eglSurface, eglThreadContext);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
glFlush();
|
glFlush();
|
||||||
@ -2992,9 +2991,9 @@ static uint8_t *CuvidGrabOutputSurfaceLocked(int *ret_size, int *ret_width, int
|
|||||||
if (decoder == NULL) // no video aktiv
|
if (decoder == NULL) // no video aktiv
|
||||||
return NULL;
|
return NULL;
|
||||||
|
|
||||||
// surface = CuvidSurfacesRb[CuvidOutputSurfaceIndex];
|
// surface = CuvidSurfacesRb[CuvidOutputSurfaceIndex];
|
||||||
|
|
||||||
// get real surface size
|
// get real surface size
|
||||||
#ifdef PLACEBO
|
#ifdef PLACEBO
|
||||||
width = decoder->VideoWidth;
|
width = decoder->VideoWidth;
|
||||||
height = decoder->VideoHeight;
|
height = decoder->VideoHeight;
|
||||||
@ -3003,7 +3002,7 @@ static uint8_t *CuvidGrabOutputSurfaceLocked(int *ret_size, int *ret_width, int
|
|||||||
height = decoder->InputHeight;
|
height = decoder->InputHeight;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
// Debug(3, "video/cuvid: grab %dx%d\n", width, height);
|
// Debug(3, "video/cuvid: grab %dx%d\n", width, height);
|
||||||
|
|
||||||
source_rect.x0 = 0;
|
source_rect.x0 = 0;
|
||||||
source_rect.y0 = 0;
|
source_rect.y0 = 0;
|
||||||
@ -3036,7 +3035,7 @@ static uint8_t *CuvidGrabOutputSurfaceLocked(int *ret_size, int *ret_width, int
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// printf("video/cuvid: grab source dim %dx%d\n", width, height);
|
// printf("video/cuvid: grab source dim %dx%d\n", width, height);
|
||||||
|
|
||||||
size = width * height * sizeof(uint32_t);
|
size = width * height * sizeof(uint32_t);
|
||||||
|
|
||||||
@ -3057,7 +3056,7 @@ static uint8_t *CuvidGrabOutputSurfaceLocked(int *ret_size, int *ret_width, int
|
|||||||
while (decoder->grab) {
|
while (decoder->grab) {
|
||||||
usleep(1000); // wait for data
|
usleep(1000); // wait for data
|
||||||
}
|
}
|
||||||
// Debug(3,"got grab data\n");
|
// Debug(3,"got grab data\n");
|
||||||
|
|
||||||
if (ret_size) {
|
if (ret_size) {
|
||||||
*ret_size = size;
|
*ret_size = size;
|
||||||
@ -3120,7 +3119,7 @@ static void CuvidQueueVideoSurface(CuvidDecoder * decoder, int surface, int soft
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
//
|
//
|
||||||
// Check and release, old surface
|
// Check and release, old surface
|
||||||
//
|
//
|
||||||
if ((old = decoder->SurfacesRb[decoder->SurfaceWrite]) != -1) {
|
if ((old = decoder->SurfacesRb[decoder->SurfaceWrite]) != -1) {
|
||||||
// now we can release the surface, software surfaces only
|
// now we can release the surface, software surfaces only
|
||||||
@ -3172,7 +3171,7 @@ static void CuvidRenderFrame(CuvidDecoder * decoder, const AVCodecContext * vide
|
|||||||
Debug(3, "video/vdpau: aspect ratio changed\n");
|
Debug(3, "video/vdpau: aspect ratio changed\n");
|
||||||
|
|
||||||
decoder->InputAspect = frame->sample_aspect_ratio;
|
decoder->InputAspect = frame->sample_aspect_ratio;
|
||||||
//printf("new aspect %d:%d\n",frame->sample_aspect_ratio.num,frame->sample_aspect_ratio.den);
|
// printf("new aspect %d:%d\n",frame->sample_aspect_ratio.num,frame->sample_aspect_ratio.den);
|
||||||
CuvidUpdateOutput(decoder);
|
CuvidUpdateOutput(decoder);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -3185,9 +3184,9 @@ static void CuvidRenderFrame(CuvidDecoder * decoder, const AVCodecContext * vide
|
|||||||
//
|
//
|
||||||
if ( // decoder->PixFmt != video_ctx->pix_fmt
|
if ( // decoder->PixFmt != video_ctx->pix_fmt
|
||||||
video_ctx->width != decoder->InputWidth
|
video_ctx->width != decoder->InputWidth
|
||||||
// || decoder->ColorSpace != color
|
// || decoder->ColorSpace != color
|
||||||
|| video_ctx->height != decoder->InputHeight) {
|
|| video_ctx->height != decoder->InputHeight) {
|
||||||
//Debug(3,"fmt %02d:%02d width %d:%d hight %d:%d\n",decoder->ColorSpace,frame->colorspace ,video_ctx->width, decoder->InputWidth,video_ctx->height, decoder->InputHeight);
|
// Debug(3,"fmt %02d:%02d width %d:%d hight %d:%d\n",decoder->ColorSpace,frame->colorspace ,video_ctx->width, decoder->InputWidth,video_ctx->height, decoder->InputHeight);
|
||||||
|
|
||||||
decoder->InputWidth = video_ctx->width;
|
decoder->InputWidth = video_ctx->width;
|
||||||
decoder->InputHeight = video_ctx->height;
|
decoder->InputHeight = video_ctx->height;
|
||||||
@ -3236,7 +3235,7 @@ static void CuvidRenderFrame(CuvidDecoder * decoder, const AVCodecContext * vide
|
|||||||
output = av_frame_alloc();
|
output = av_frame_alloc();
|
||||||
av_hwframe_transfer_data(output, frame, 0);
|
av_hwframe_transfer_data(output, frame, 0);
|
||||||
av_frame_copy_props(output, frame);
|
av_frame_copy_props(output, frame);
|
||||||
// printf("Save Surface ID %d %p %p\n",surface,decoder->pl_images[surface].planes[0].texture,decoder->pl_images[surface].planes[1].texture);
|
// printf("Save Surface ID %d %p %p\n",surface,decoder->pl_images[surface].planes[0].texture,decoder->pl_images[surface].planes[1].texture);
|
||||||
bool ok = pl_tex_upload(p->gpu, &(struct pl_tex_transfer_params) {
|
bool ok = pl_tex_upload(p->gpu, &(struct pl_tex_transfer_params) {
|
||||||
.tex = decoder->pl_images[surface].planes[0].texture,
|
.tex = decoder->pl_images[surface].planes[0].texture,
|
||||||
.stride_w = output->linesize[0],
|
.stride_w = output->linesize[0],
|
||||||
@ -3287,7 +3286,7 @@ static void *CuvidGetHwAccelContext(CuvidDecoder * decoder)
|
|||||||
unsigned int version;
|
unsigned int version;
|
||||||
|
|
||||||
Debug(3, "Initializing cuvid hwaccel thread ID:%ld\n", (long int)syscall(186));
|
Debug(3, "Initializing cuvid hwaccel thread ID:%ld\n", (long int)syscall(186));
|
||||||
//turn NULL;
|
// turn NULL;
|
||||||
#ifdef CUVID
|
#ifdef CUVID
|
||||||
if (decoder->cuda_ctx) {
|
if (decoder->cuda_ctx) {
|
||||||
Debug(3, "schon passiert\n");
|
Debug(3, "schon passiert\n");
|
||||||
@ -3345,9 +3344,9 @@ static void CuvidAdvanceDecoderFrame(CuvidDecoder * decoder)
|
|||||||
// keep use of last surface
|
// keep use of last surface
|
||||||
++decoder->FramesDuped;
|
++decoder->FramesDuped;
|
||||||
// FIXME: don't warn after stream start, don't warn during pause
|
// FIXME: don't warn after stream start, don't warn during pause
|
||||||
// printf("video: display buffer empty, duping frame (%d/%d) %d\n",
|
// printf("video: display buffer empty, duping frame (%d/%d) %d\n",
|
||||||
// decoder->FramesDuped, decoder->FrameCounter,
|
// decoder->FramesDuped, decoder->FrameCounter,
|
||||||
// VideoGetBuffers(decoder->Stream));
|
// VideoGetBuffers(decoder->Stream));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -3470,10 +3469,10 @@ static void CuvidMixVideo(CuvidDecoder * decoder, __attribute__((unused))
|
|||||||
case AVCOL_SPC_UNSPECIFIED: // comes with UHD
|
case AVCOL_SPC_UNSPECIFIED: // comes with UHD
|
||||||
img->repr.sys = PL_COLOR_SYSTEM_BT_709;
|
img->repr.sys = PL_COLOR_SYSTEM_BT_709;
|
||||||
memcpy(&img->color, &pl_color_space_bt709, sizeof(struct pl_color_space));
|
memcpy(&img->color, &pl_color_space_bt709, sizeof(struct pl_color_space));
|
||||||
// img->color.primaries = PL_COLOR_PRIM_BT_709;
|
// img->color.primaries = PL_COLOR_PRIM_BT_709;
|
||||||
// img->color.transfer = PL_COLOR_TRC_BT_1886;
|
// img->color.transfer = PL_COLOR_TRC_BT_1886;
|
||||||
// img->color.light = PL_COLOR_LIGHT_SCENE_709_1886;
|
// img->color.light = PL_COLOR_LIGHT_SCENE_709_1886;
|
||||||
// img->color.light = PL_COLOR_LIGHT_DISPLAY;
|
// img->color.light = PL_COLOR_LIGHT_DISPLAY;
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case AVCOL_SPC_BT2020_NCL:
|
case AVCOL_SPC_BT2020_NCL:
|
||||||
@ -3485,22 +3484,22 @@ static void CuvidMixVideo(CuvidDecoder * decoder, __attribute__((unused))
|
|||||||
#ifdef VAAPI
|
#ifdef VAAPI
|
||||||
render_params.peak_detect_params = NULL;
|
render_params.peak_detect_params = NULL;
|
||||||
#endif
|
#endif
|
||||||
// img->color.primaries = PL_COLOR_PRIM_BT_2020;
|
// img->color.primaries = PL_COLOR_PRIM_BT_2020;
|
||||||
// img->color.transfer = PL_COLOR_TRC_HLG;
|
// img->color.transfer = PL_COLOR_TRC_HLG;
|
||||||
// img->color.light = PL_COLOR_LIGHT_SCENE_HLG;
|
// img->color.light = PL_COLOR_LIGHT_SCENE_HLG;
|
||||||
break;
|
break;
|
||||||
|
|
||||||
default: // fallback
|
default: // fallback
|
||||||
img->repr.sys = PL_COLOR_SYSTEM_BT_709;
|
img->repr.sys = PL_COLOR_SYSTEM_BT_709;
|
||||||
memcpy(&img->color, &pl_color_space_bt709, sizeof(struct pl_color_space));
|
memcpy(&img->color, &pl_color_space_bt709, sizeof(struct pl_color_space));
|
||||||
// img->color.primaries = PL_COLOR_PRIM_BT_709;
|
// img->color.primaries = PL_COLOR_PRIM_BT_709;
|
||||||
// img->color.transfer = PL_COLOR_TRC_BT_1886;
|
// img->color.transfer = PL_COLOR_TRC_BT_1886;
|
||||||
// img->color.light = PL_COLOR_LIGHT_DISPLAY;
|
// img->color.light = PL_COLOR_LIGHT_DISPLAY;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
// Source crop
|
// Source crop
|
||||||
if (VideoScalerTest) { // right side defnied scaler
|
if (VideoScalerTest) { // right side defnied scaler
|
||||||
// pl_tex_clear(p->gpu,target->fbo,(float[4]){0}); // clear frame
|
// pl_tex_clear(p->gpu,target->fbo,(float[4]){0}); // clear frame
|
||||||
img->src_rect.x0 = video_src_rect.x1 / 2 + 1;
|
img->src_rect.x0 = video_src_rect.x1 / 2 + 1;
|
||||||
img->src_rect.y0 = video_src_rect.y0;
|
img->src_rect.y0 = video_src_rect.y0;
|
||||||
img->src_rect.x1 = video_src_rect.x1;
|
img->src_rect.x1 = video_src_rect.x1;
|
||||||
@ -3551,7 +3550,7 @@ static void CuvidMixVideo(CuvidDecoder * decoder, __attribute__((unused))
|
|||||||
render_params.cone_params = NULL;
|
render_params.cone_params = NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
// render_params.upscaler = &pl_filter_ewa_lanczos;
|
// render_params.upscaler = &pl_filter_ewa_lanczos;
|
||||||
|
|
||||||
render_params.upscaler = pl_named_filters[VideoScaling[decoder->Resolution]].filter;
|
render_params.upscaler = pl_named_filters[VideoScaling[decoder->Resolution]].filter;
|
||||||
render_params.downscaler = pl_named_filters[VideoScaling[decoder->Resolution]].filter;
|
render_params.downscaler = pl_named_filters[VideoScaling[decoder->Resolution]].filter;
|
||||||
@ -3722,10 +3721,10 @@ static void CuvidDisplayFrame(void)
|
|||||||
#if 1
|
#if 1
|
||||||
diff = (GetusTicks() - last_time) / 1000;
|
diff = (GetusTicks() - last_time) / 1000;
|
||||||
|
|
||||||
// last_time = GetusTicks();
|
// last_time = GetusTicks();
|
||||||
//printf("Roundtrip Displayframe %d\n",diff);
|
// printf("Roundtrip Displayframe %d\n",diff);
|
||||||
if (diff < 15000 && diff > 0) {
|
if (diff < 15000 && diff > 0) {
|
||||||
//printf("Sleep %d\n",15000-diff);
|
// printf("Sleep %d\n",15000-diff);
|
||||||
usleep((15000 - diff)); // * 1000);
|
usleep((15000 - diff)); // * 1000);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -3733,17 +3732,17 @@ static void CuvidDisplayFrame(void)
|
|||||||
if (!p->swapchain)
|
if (!p->swapchain)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
//last_time = GetusTicks();
|
// last_time = GetusTicks();
|
||||||
|
|
||||||
#ifdef CUVID
|
#ifdef CUVID
|
||||||
//first_time = GetusTicks();
|
// first_time = GetusTicks();
|
||||||
VideoThreadLock();
|
VideoThreadLock();
|
||||||
if (!first) {
|
if (!first) {
|
||||||
// last_time = GetusTicks();
|
// last_time = GetusTicks();
|
||||||
if (!pl_swapchain_submit_frame(p->swapchain))
|
if (!pl_swapchain_submit_frame(p->swapchain))
|
||||||
Error(_("Failed to submit swapchain buffer\n"));
|
Error(_("Failed to submit swapchain buffer\n"));
|
||||||
pl_swapchain_swap_buffers(p->swapchain); // swap buffers
|
pl_swapchain_swap_buffers(p->swapchain); // swap buffers
|
||||||
// printf("submit and swap %d\n",(GetusTicks()-last_time)/1000000);
|
// printf("submit and swap %d\n",(GetusTicks()-last_time)/1000000);
|
||||||
}
|
}
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
@ -3782,9 +3781,9 @@ static void CuvidDisplayFrame(void)
|
|||||||
else
|
else
|
||||||
target.repr.levels = PL_COLOR_LEVELS_TV;
|
target.repr.levels = PL_COLOR_LEVELS_TV;
|
||||||
target.repr.alpha = PL_ALPHA_UNKNOWN;
|
target.repr.alpha = PL_ALPHA_UNKNOWN;
|
||||||
// target.repr.bits.sample_depth = 16;
|
// target.repr.bits.sample_depth = 16;
|
||||||
// target.repr.bits.color_depth = 16;
|
// target.repr.bits.color_depth = 16;
|
||||||
// target.repr.bits.bit_shift =0;
|
// target.repr.bits.bit_shift =0;
|
||||||
|
|
||||||
switch (VulkanTargetColorSpace) {
|
switch (VulkanTargetColorSpace) {
|
||||||
case 0:
|
case 0:
|
||||||
@ -3877,7 +3876,7 @@ static void CuvidDisplayFrame(void)
|
|||||||
GlxCheck();
|
GlxCheck();
|
||||||
#endif
|
#endif
|
||||||
#ifdef VAAPI
|
#ifdef VAAPI
|
||||||
// eglMakeCurrent(eglDisplay, eglSurface, eglSurface, OSDcontext);
|
// eglMakeCurrent(eglDisplay, eglSurface, eglSurface, OSDcontext);
|
||||||
#endif
|
#endif
|
||||||
glEnable(GL_BLEND);
|
glEnable(GL_BLEND);
|
||||||
GlxCheck();
|
GlxCheck();
|
||||||
@ -3905,21 +3904,21 @@ static void CuvidDisplayFrame(void)
|
|||||||
glXMakeCurrent(XlibDisplay, VideoWindow, glxThreadContext);
|
glXMakeCurrent(XlibDisplay, VideoWindow, glxThreadContext);
|
||||||
#endif
|
#endif
|
||||||
#ifdef VAAPI
|
#ifdef VAAPI
|
||||||
// eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglThreadContext);
|
// eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglThreadContext);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifdef PLACEBO
|
#ifdef PLACEBO
|
||||||
#ifdef VAAPI
|
#ifdef VAAPI
|
||||||
// first_time = GetusTicks();
|
// first_time = GetusTicks();
|
||||||
if (!pl_swapchain_submit_frame(p->swapchain))
|
if (!pl_swapchain_submit_frame(p->swapchain))
|
||||||
Fatal(_("Failed to submit swapchain buffer\n"));
|
Fatal(_("Failed to submit swapchain buffer\n"));
|
||||||
pl_swapchain_swap_buffers(p->swapchain); // swap buffers
|
pl_swapchain_swap_buffers(p->swapchain); // swap buffers
|
||||||
// printf("submit and swap %d us\n",(GetusTicks()-first_time)/1000);
|
// printf("submit and swap %d us\n",(GetusTicks()-first_time)/1000);
|
||||||
#endif
|
#endif
|
||||||
VideoThreadUnlock();
|
VideoThreadUnlock();
|
||||||
// printf("Display time %d\n",(GetusTicks()-first_time)/1000000);
|
// printf("Display time %d\n",(GetusTicks()-first_time)/1000000);
|
||||||
#else
|
#else
|
||||||
#ifdef CUVID
|
#ifdef CUVID
|
||||||
glXGetVideoSyncSGI(&Count); // get current frame
|
glXGetVideoSyncSGI(&Count); // get current frame
|
||||||
@ -4053,7 +4052,7 @@ static void CuvidSyncDecoder(CuvidDecoder * decoder)
|
|||||||
int err = 0;
|
int err = 0;
|
||||||
static uint64_t last_time;
|
static uint64_t last_time;
|
||||||
|
|
||||||
// video_clock = CuvidGetClock(decoder);
|
// video_clock = CuvidGetClock(decoder);
|
||||||
video_clock = decoder->PTS - (90 * 20 * 1); // 1 Frame in Output
|
video_clock = decoder->PTS - (90 * 20 * 1); // 1 Frame in Output
|
||||||
filled = atomic_read(&decoder->SurfacesFilled);
|
filled = atomic_read(&decoder->SurfacesFilled);
|
||||||
|
|
||||||
@ -4063,7 +4062,7 @@ static void CuvidSyncDecoder(CuvidDecoder * decoder)
|
|||||||
goto skip_sync;
|
goto skip_sync;
|
||||||
}
|
}
|
||||||
audio_clock = AudioGetClock();
|
audio_clock = AudioGetClock();
|
||||||
//printf("Diff %d %ld %ld filled %d \n",(video_clock - audio_clock - VideoAudioDelay)/90,video_clock,audio_clock,filled);
|
// printf("Diff %d %ld %ld filled %d \n",(video_clock - audio_clock - VideoAudioDelay)/90,video_clock,audio_clock,filled);
|
||||||
// 60Hz: repeat every 5th field
|
// 60Hz: repeat every 5th field
|
||||||
if (Video60HzMode && !(decoder->FramesDisplayed % 6)) {
|
if (Video60HzMode && !(decoder->FramesDisplayed % 6)) {
|
||||||
if (audio_clock == (int64_t) AV_NOPTS_VALUE || video_clock == (int64_t) AV_NOPTS_VALUE) {
|
if (audio_clock == (int64_t) AV_NOPTS_VALUE || video_clock == (int64_t) AV_NOPTS_VALUE) {
|
||||||
@ -4105,19 +4104,19 @@ static void CuvidSyncDecoder(CuvidDecoder * decoder)
|
|||||||
diff = (decoder->LastAVDiff + diff) / 2;
|
diff = (decoder->LastAVDiff + diff) / 2;
|
||||||
decoder->LastAVDiff = diff;
|
decoder->LastAVDiff = diff;
|
||||||
|
|
||||||
// if (CuvidDecoderN) {
|
// if (CuvidDecoderN) {
|
||||||
// CuvidDecoders[0]->Frameproc = (float)(diff / 90);
|
// CuvidDecoders[0]->Frameproc = (float)(diff / 90);
|
||||||
// }
|
// }
|
||||||
#if 0
|
#if 0
|
||||||
if (abs(diff / 90) > 0) {
|
if (abs(diff / 90) > 0) {
|
||||||
printf(" Diff %d filled %d \n", diff / 90, filled);
|
printf(" Diff %d filled %d \n", diff / 90, filled);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
if (abs(diff) > 5000 * 90) { // more than 5s
|
if (abs(diff) > 5000 * 90) { // more than 5s
|
||||||
err = CuvidMessage(2, "video: audio/video difference too big\n");
|
err = CuvidMessage(2, "video: audio/video difference too big\n");
|
||||||
// decoder->SyncCounter = 1;
|
// decoder->SyncCounter = 1;
|
||||||
// usleep(10);
|
// usleep(10);
|
||||||
// goto out;
|
// goto out;
|
||||||
} else if (diff > 100 * 90) {
|
} else if (diff > 100 * 90) {
|
||||||
// FIXME: this quicker sync step, did not work with new code!
|
// FIXME: this quicker sync step, did not work with new code!
|
||||||
err = CuvidMessage(4, "video: slow down video, duping frame %d\n", diff / 90);
|
err = CuvidMessage(4, "video: slow down video, duping frame %d\n", diff / 90);
|
||||||
@ -4154,8 +4153,6 @@ static void CuvidSyncDecoder(CuvidDecoder * decoder)
|
|||||||
|
|
||||||
skip_sync:
|
skip_sync:
|
||||||
// check if next field is available
|
// check if next field is available
|
||||||
//JOJO if (decoder->SurfaceField && filled <= 1 + 2 * decoder->Interlaced) {
|
|
||||||
|
|
||||||
if (decoder->SurfaceField && filled <= 1 + 2 * decoder->Interlaced) {
|
if (decoder->SurfaceField && filled <= 1 + 2 * decoder->Interlaced) {
|
||||||
if (filled < 1 + 2 * decoder->Interlaced) {
|
if (filled < 1 + 2 * decoder->Interlaced) {
|
||||||
++decoder->FramesDuped;
|
++decoder->FramesDuped;
|
||||||
@ -4171,8 +4168,8 @@ static void CuvidSyncDecoder(CuvidDecoder * decoder)
|
|||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
// Debug(3,"filled zu klein %d Field %d Interlaced %d\n",filled,decoder->SurfaceField,decoder->Interlaced);
|
// Debug(3,"filled zu klein %d Field %d Interlaced %d\n",filled,decoder->SurfaceField,decoder->Interlaced);
|
||||||
// goto out;
|
// goto out;
|
||||||
}
|
}
|
||||||
|
|
||||||
CuvidAdvanceDecoderFrame(decoder);
|
CuvidAdvanceDecoderFrame(decoder);
|
||||||
@ -4252,8 +4249,8 @@ static void CuvidSyncRenderFrame(CuvidDecoder * decoder, const AVCodecContext *
|
|||||||
}
|
}
|
||||||
|
|
||||||
// if (!decoder->Closing) {
|
// if (!decoder->Closing) {
|
||||||
// VideoSetPts(&decoder->PTS, decoder->Interlaced, video_ctx, frame);
|
// VideoSetPts(&decoder->PTS, decoder->Interlaced, video_ctx, frame);
|
||||||
// }
|
// }
|
||||||
CuvidRenderFrame(decoder, video_ctx, frame);
|
CuvidRenderFrame(decoder, video_ctx, frame);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -4320,7 +4317,7 @@ static void CuvidDisplayHandlerThread(void)
|
|||||||
// fill frame output ring buffer
|
// fill frame output ring buffer
|
||||||
//
|
//
|
||||||
filled = atomic_read(&decoder->SurfacesFilled);
|
filled = atomic_read(&decoder->SurfacesFilled);
|
||||||
//if (filled <= 1 + 2 * decoder->Interlaced) {
|
//if (filled <= 1 + 2 * decoder->Interlaced) {
|
||||||
if (filled < 4) {
|
if (filled < 4) {
|
||||||
// FIXME: hot polling
|
// FIXME: hot polling
|
||||||
// fetch+decode or reopen
|
// fetch+decode or reopen
|
||||||
@ -4436,14 +4433,14 @@ static const VideoModule CuvidModule = {
|
|||||||
.SetVideoMode = CuvidSetVideoMode,
|
.SetVideoMode = CuvidSetVideoMode,
|
||||||
|
|
||||||
.DisplayHandlerThread = CuvidDisplayHandlerThread,
|
.DisplayHandlerThread = CuvidDisplayHandlerThread,
|
||||||
// .OsdClear = GlxOsdClear,
|
// .OsdClear = GlxOsdClear,
|
||||||
// .OsdDrawARGB = GlxOsdDrawARGB,
|
// .OsdDrawARGB = GlxOsdDrawARGB,
|
||||||
// .OsdInit = GlxOsdInit,
|
// .OsdInit = GlxOsdInit,
|
||||||
// .OsdExit = GlxOsdExit,
|
// .OsdExit = GlxOsdExit,
|
||||||
// .OsdClear = CuvidOsdClear,
|
// .OsdClear = CuvidOsdClear,
|
||||||
// .OsdDrawARGB = CuvidOsdDrawARGB,
|
// .OsdDrawARGB = CuvidOsdDrawARGB,
|
||||||
// .OsdInit = CuvidOsdInit,
|
// .OsdInit = CuvidOsdInit,
|
||||||
// .OsdExit = CuvidOsdExit,
|
// .OsdExit = CuvidOsdExit,
|
||||||
.Exit = CuvidExit,
|
.Exit = CuvidExit,
|
||||||
.Init = CuvidGlxInit,
|
.Init = CuvidGlxInit,
|
||||||
};
|
};
|
||||||
@ -4560,7 +4557,7 @@ static void NoopDisplayHandlerThread(void)
|
|||||||
|
|
||||||
#else
|
#else
|
||||||
|
|
||||||
#define NoopDisplayHandlerThread NULL
|
#define NoopDisplayHandlerThread NULL
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
@ -4630,7 +4627,7 @@ void VideoOsdClear(void)
|
|||||||
OsdShown = 0;
|
OsdShown = 0;
|
||||||
#else
|
#else
|
||||||
VideoThreadLock();
|
VideoThreadLock();
|
||||||
// VideoUsedModule->OsdClear();
|
// VideoUsedModule->OsdClear();
|
||||||
OsdDirtyX = OsdWidth; // reset dirty area
|
OsdDirtyX = OsdWidth; // reset dirty area
|
||||||
OsdDirtyY = OsdHeight;
|
OsdDirtyY = OsdHeight;
|
||||||
OsdDirtyWidth = 0;
|
OsdDirtyWidth = 0;
|
||||||
@ -4829,20 +4826,20 @@ static void VideoEvent(void)
|
|||||||
|
|
||||||
case MapNotify:
|
case MapNotify:
|
||||||
Debug(3, "video/event: MapNotify\n");
|
Debug(3, "video/event: MapNotify\n");
|
||||||
// <EFBFBD>wm workaround
|
// wm workaround
|
||||||
VideoThreadLock();
|
VideoThreadLock();
|
||||||
xcb_change_window_attributes(Connection, VideoWindow, XCB_CW_CURSOR, &VideoBlankCursor);
|
xcb_change_window_attributes(Connection, VideoWindow, XCB_CW_CURSOR, &VideoBlankCursor);
|
||||||
VideoThreadUnlock();
|
VideoThreadUnlock();
|
||||||
VideoBlankTick = 0;
|
VideoBlankTick = 0;
|
||||||
break;
|
break;
|
||||||
case Expose:
|
case Expose:
|
||||||
//Debug(3, "video/event: Expose\n");
|
// Debug(3, "video/event: Expose\n");
|
||||||
break;
|
break;
|
||||||
case ReparentNotify:
|
case ReparentNotify:
|
||||||
Debug(3, "video/event: ReparentNotify\n");
|
Debug(3, "video/event: ReparentNotify\n");
|
||||||
break;
|
break;
|
||||||
case ConfigureNotify:
|
case ConfigureNotify:
|
||||||
//Debug(3, "video/event: ConfigureNotify\n");
|
// Debug(3, "video/event: ConfigureNotify\n");
|
||||||
VideoSetVideoMode(event.xconfigure.x, event.xconfigure.y, event.xconfigure.width, event.xconfigure.height);
|
VideoSetVideoMode(event.xconfigure.x, event.xconfigure.y, event.xconfigure.width, event.xconfigure.height);
|
||||||
break;
|
break;
|
||||||
case ButtonPress:
|
case ButtonPress:
|
||||||
@ -4966,7 +4963,7 @@ void delete_placebo()
|
|||||||
|
|
||||||
vkDestroySurfaceKHR(p->vk_inst->instance, p->pSurface, NULL);
|
vkDestroySurfaceKHR(p->vk_inst->instance, p->pSurface, NULL);
|
||||||
pl_vk_inst_destroy(&p->vk_inst);
|
pl_vk_inst_destroy(&p->vk_inst);
|
||||||
// pl_vulkan_destroy(&p->vk);
|
// pl_vulkan_destroy(&p->vk);
|
||||||
pl_context_destroy(&p->ctx);
|
pl_context_destroy(&p->ctx);
|
||||||
free(p);
|
free(p);
|
||||||
p = NULL;
|
p = NULL;
|
||||||
@ -4999,7 +4996,7 @@ void InitPlacebo()
|
|||||||
|
|
||||||
// create Vulkan instance
|
// create Vulkan instance
|
||||||
memcpy(&iparams, &pl_vk_inst_default_params, sizeof(iparams));
|
memcpy(&iparams, &pl_vk_inst_default_params, sizeof(iparams));
|
||||||
// iparams.debug = true;
|
// iparams.debug = true;
|
||||||
iparams.num_extensions = 2;
|
iparams.num_extensions = 2;
|
||||||
iparams.extensions = malloc(2 * sizeof(const char *));
|
iparams.extensions = malloc(2 * sizeof(const char *));
|
||||||
*iparams.extensions = surfext;
|
*iparams.extensions = surfext;
|
||||||
@ -5130,10 +5127,10 @@ static void *VideoHandlerThread(void *dummy)
|
|||||||
|
|
||||||
VideoPollEvent();
|
VideoPollEvent();
|
||||||
|
|
||||||
// first_time = GetusTicks();
|
// first_time = GetusTicks();
|
||||||
CuvidSyncDisplayFrame();
|
CuvidSyncDisplayFrame();
|
||||||
|
|
||||||
// printf("syncdisplayframe exec %d\n",(GetusTicks()-first_time)/1000);
|
// printf("syncdisplayframe exec %d\n",(GetusTicks()-first_time)/1000);
|
||||||
}
|
}
|
||||||
|
|
||||||
pthread_cleanup_pop(NULL);
|
pthread_cleanup_pop(NULL);
|
||||||
@ -5290,9 +5287,9 @@ void VideoDelHwDecoder(VideoHwDecoder * hw_decoder)
|
|||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
// only called from inside the thread
|
// only called from inside the thread
|
||||||
//VideoThreadLock();
|
// VideoThreadLock();
|
||||||
VideoUsedModule->DelHwDecoder(hw_decoder);
|
VideoUsedModule->DelHwDecoder(hw_decoder);
|
||||||
//VideoThreadUnlock();
|
// VideoThreadUnlock();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -5573,7 +5570,7 @@ uint8_t *VideoGrab(int *size, int *width, int *height, int write_header)
|
|||||||
///
|
///
|
||||||
uint8_t *VideoGrabService(int *size, int *width, int *height)
|
uint8_t *VideoGrabService(int *size, int *width, int *height)
|
||||||
{
|
{
|
||||||
// Debug(3, "video: grab service\n");
|
// Debug(3, "video: grab service\n");
|
||||||
|
|
||||||
#ifdef USE_GRAB
|
#ifdef USE_GRAB
|
||||||
if (VideoUsedModule->GrabOutput) {
|
if (VideoUsedModule->GrabOutput) {
|
||||||
@ -5746,11 +5743,11 @@ static void X11DPMSReenable(xcb_connection_t * connection)
|
|||||||
|
|
||||||
#else
|
#else
|
||||||
|
|
||||||
/// dummy function: Suspend X11 screen saver.
|
/// dummy function: Suspend X11 screen saver.
|
||||||
#define X11SuspendScreenSaver(connection, suspend)
|
#define X11SuspendScreenSaver(connection, suspend)
|
||||||
/// dummy function: Disable X11 DPMS.
|
/// dummy function: Disable X11 DPMS.
|
||||||
#define X11DPMSDisable(connection)
|
#define X11DPMSDisable(connection)
|
||||||
/// dummy function: Reenable X11 DPMS.
|
/// dummy function: Reenable X11 DPMS.
|
||||||
#define X11DPMSReenable(connection)
|
#define X11DPMSReenable(connection)
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
@ -6456,7 +6453,7 @@ void VideoInit(const char *display_name)
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
// prefetch extensions
|
// prefetch extensions
|
||||||
//xcb_prefetch_extension_data(Connection, &xcb_big_requests_id);
|
// xcb_prefetch_extension_data(Connection, &xcb_big_requests_id);
|
||||||
#ifdef xcb_USE_GLX
|
#ifdef xcb_USE_GLX
|
||||||
xcb_prefetch_extension_data(Connection, &xcb_glx_id);
|
xcb_prefetch_extension_data(Connection, &xcb_glx_id);
|
||||||
#endif
|
#endif
|
||||||
@ -6575,7 +6572,7 @@ void VideoExit(void)
|
|||||||
//
|
//
|
||||||
// FIXME: cleanup.
|
// FIXME: cleanup.
|
||||||
//
|
//
|
||||||
//RandrExit();
|
// RandrExit();
|
||||||
|
|
||||||
//
|
//
|
||||||
// X11/xcb cleanup
|
// X11/xcb cleanup
|
||||||
|
128
video.h
128
video.h
@ -30,10 +30,10 @@
|
|||||||
// Typedefs
|
// Typedefs
|
||||||
//----------------------------------------------------------------------------
|
//----------------------------------------------------------------------------
|
||||||
|
|
||||||
/// Video hardware decoder typedef
|
/// Video hardware decoder typedef
|
||||||
typedef struct _video_hw_decoder_ VideoHwDecoder;
|
typedef struct _video_hw_decoder_ VideoHwDecoder;
|
||||||
|
|
||||||
/// Video output stream typedef
|
/// Video output stream typedef
|
||||||
typedef struct __video_stream__ VideoStream;
|
typedef struct __video_stream__ VideoStream;
|
||||||
|
|
||||||
//----------------------------------------------------------------------------
|
//----------------------------------------------------------------------------
|
||||||
@ -49,179 +49,179 @@ extern char ConfigStartX11Server; ///< flag start the x11 server
|
|||||||
// Prototypes
|
// Prototypes
|
||||||
//----------------------------------------------------------------------------
|
//----------------------------------------------------------------------------
|
||||||
|
|
||||||
/// Allocate new video hardware decoder.
|
/// Allocate new video hardware decoder.
|
||||||
extern VideoHwDecoder *VideoNewHwDecoder(VideoStream *);
|
extern VideoHwDecoder *VideoNewHwDecoder(VideoStream *);
|
||||||
|
|
||||||
/// Deallocate video hardware decoder.
|
/// Deallocate video hardware decoder.
|
||||||
extern void VideoDelHwDecoder(VideoHwDecoder *);
|
extern void VideoDelHwDecoder(VideoHwDecoder *);
|
||||||
|
|
||||||
/// Get and allocate a video hardware surface.
|
/// Get and allocate a video hardware surface.
|
||||||
extern unsigned VideoGetSurface(VideoHwDecoder *, const AVCodecContext *);
|
extern unsigned VideoGetSurface(VideoHwDecoder *, const AVCodecContext *);
|
||||||
|
|
||||||
/// Release a video hardware surface
|
/// Release a video hardware surface
|
||||||
extern void VideoReleaseSurface(VideoHwDecoder *, unsigned);
|
extern void VideoReleaseSurface(VideoHwDecoder *, unsigned);
|
||||||
|
|
||||||
/// Callback to negotiate the PixelFormat.
|
/// Callback to negotiate the PixelFormat.
|
||||||
extern enum AVPixelFormat Video_get_format(VideoHwDecoder *, AVCodecContext *, const enum AVPixelFormat *);
|
extern enum AVPixelFormat Video_get_format(VideoHwDecoder *, AVCodecContext *, const enum AVPixelFormat *);
|
||||||
|
|
||||||
/// Render a ffmpeg frame.
|
/// Render a ffmpeg frame.
|
||||||
extern void VideoRenderFrame(VideoHwDecoder *, const AVCodecContext *, const AVFrame *);
|
extern void VideoRenderFrame(VideoHwDecoder *, const AVCodecContext *, const AVFrame *);
|
||||||
|
|
||||||
/// Get hwaccel context for ffmpeg.
|
/// Get hwaccel context for ffmpeg.
|
||||||
extern void *VideoGetHwAccelContext(VideoHwDecoder *);
|
extern void *VideoGetHwAccelContext(VideoHwDecoder *);
|
||||||
|
|
||||||
#ifdef AVCODEC_VDPAU_H
|
#ifdef AVCODEC_VDPAU_H
|
||||||
/// Draw vdpau render state.
|
/// Draw vdpau render state.
|
||||||
extern void VideoDrawRenderState(VideoHwDecoder *, struct vdpau_render_state *);
|
extern void VideoDrawRenderState(VideoHwDecoder *, struct vdpau_render_state *);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifdef USE_OPENGLOSD
|
#ifdef USE_OPENGLOSD
|
||||||
/// Set callback funktion to notify VDR about VideoEvents
|
/// Set callback funktion to notify VDR about VideoEvents
|
||||||
extern void VideoSetVideoEventCallback(void (*)(void));
|
extern void VideoSetVideoEventCallback(void (*)(void));
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
/// Poll video events.
|
/// Poll video events.
|
||||||
extern void VideoPollEvent(void);
|
extern void VideoPollEvent(void);
|
||||||
|
|
||||||
/// Wakeup display handler.
|
/// Wakeup display handler.
|
||||||
extern void VideoDisplayWakeup(void);
|
extern void VideoDisplayWakeup(void);
|
||||||
|
|
||||||
/// Set video device.
|
/// Set video device.
|
||||||
extern void VideoSetDevice(const char *);
|
extern void VideoSetDevice(const char *);
|
||||||
|
|
||||||
/// Get video driver name.
|
/// Get video driver name.
|
||||||
extern const char *VideoGetDriverName(void);
|
extern const char *VideoGetDriverName(void);
|
||||||
|
|
||||||
/// Set video geometry.
|
/// Set video geometry.
|
||||||
extern int VideoSetGeometry(const char *);
|
extern int VideoSetGeometry(const char *);
|
||||||
|
|
||||||
/// Set 60Hz display mode.
|
/// Set 60Hz display mode.
|
||||||
extern void VideoSet60HzMode(int);
|
extern void VideoSet60HzMode(int);
|
||||||
|
|
||||||
/// Set soft start audio/video sync.
|
/// Set soft start audio/video sync.
|
||||||
extern void VideoSetSoftStartSync(int);
|
extern void VideoSetSoftStartSync(int);
|
||||||
|
|
||||||
/// Set show black picture during channel switch.
|
/// Set show black picture during channel switch.
|
||||||
extern void VideoSetBlackPicture(int);
|
extern void VideoSetBlackPicture(int);
|
||||||
|
|
||||||
/// Set brightness adjustment.
|
/// Set brightness adjustment.
|
||||||
extern void VideoSetBrightness(int);
|
extern void VideoSetBrightness(int);
|
||||||
|
|
||||||
/// Set contrast adjustment.
|
/// Set contrast adjustment.
|
||||||
extern void VideoSetContrast(int);
|
extern void VideoSetContrast(int);
|
||||||
|
|
||||||
/// Set saturation adjustment.
|
/// Set saturation adjustment.
|
||||||
extern void VideoSetSaturation(int);
|
extern void VideoSetSaturation(int);
|
||||||
|
|
||||||
/// Set Gamm.
|
/// Set Gamma.
|
||||||
extern void VideoSetGamma(int);
|
extern void VideoSetGamma(int);
|
||||||
|
|
||||||
/// Set ColorSpace.
|
/// Set ColorSpace.
|
||||||
extern void VideoSetTargetColor(int);
|
extern void VideoSetTargetColor(int);
|
||||||
|
|
||||||
/// Set hue adjustment.
|
/// Set hue adjustment.
|
||||||
extern void VideoSetHue(int);
|
extern void VideoSetHue(int);
|
||||||
|
|
||||||
/// Set Color Blindness.
|
/// Set Color Blindness.
|
||||||
extern void VideoSetColorBlindness(int);
|
extern void VideoSetColorBlindness(int);
|
||||||
|
|
||||||
/// Set Color Blindness Faktor
|
/// Set Color Blindness Faktor
|
||||||
extern void VideoSetColorBlindnessFaktor(int);
|
extern void VideoSetColorBlindnessFaktor(int);
|
||||||
|
|
||||||
/// Set video output position.
|
/// Set video output position.
|
||||||
extern void VideoSetOutputPosition(VideoHwDecoder *, int, int, int, int);
|
extern void VideoSetOutputPosition(VideoHwDecoder *, int, int, int, int);
|
||||||
|
|
||||||
/// Set video mode.
|
/// Set video mode.
|
||||||
extern void VideoSetVideoMode(int, int, int, int);
|
extern void VideoSetVideoMode(int, int, int, int);
|
||||||
|
|
||||||
/// Set 4:3 display format.
|
/// Set 4:3 display format.
|
||||||
extern void VideoSet4to3DisplayFormat(int);
|
extern void VideoSet4to3DisplayFormat(int);
|
||||||
|
|
||||||
/// Set other display format.
|
/// Set other display format.
|
||||||
extern void VideoSetOtherDisplayFormat(int);
|
extern void VideoSetOtherDisplayFormat(int);
|
||||||
|
|
||||||
/// Set video fullscreen mode.
|
/// Set video fullscreen mode.
|
||||||
extern void VideoSetFullscreen(int);
|
extern void VideoSetFullscreen(int);
|
||||||
|
|
||||||
/// Set deinterlace.
|
/// Set deinterlace.
|
||||||
extern void VideoSetDeinterlace(int[]);
|
extern void VideoSetDeinterlace(int[]);
|
||||||
|
|
||||||
/// Set skip chroma deinterlace.
|
/// Set skip chroma deinterlace.
|
||||||
extern void VideoSetSkipChromaDeinterlace(int[]);
|
extern void VideoSetSkipChromaDeinterlace(int[]);
|
||||||
|
|
||||||
/// Set inverse telecine.
|
/// Set inverse telecine.
|
||||||
extern void VideoSetInverseTelecine(int[]);
|
extern void VideoSetInverseTelecine(int[]);
|
||||||
|
|
||||||
/// Set scaling.
|
/// Set scaling.
|
||||||
extern void VideoSetScaling(int[]);
|
extern void VideoSetScaling(int[]);
|
||||||
|
|
||||||
/// Set scaler test.
|
/// Set scaler test.
|
||||||
extern void VideoSetScalerTest(int);
|
extern void VideoSetScalerTest(int);
|
||||||
|
|
||||||
/// Set denoise.
|
/// Set denoise.
|
||||||
extern void VideoSetDenoise(int[]);
|
extern void VideoSetDenoise(int[]);
|
||||||
|
|
||||||
/// Set sharpen.
|
/// Set sharpen.
|
||||||
extern void VideoSetSharpen(int[]);
|
extern void VideoSetSharpen(int[]);
|
||||||
|
|
||||||
/// Set cut top and bottom.
|
/// Set cut top and bottom.
|
||||||
extern void VideoSetCutTopBottom(int[]);
|
extern void VideoSetCutTopBottom(int[]);
|
||||||
|
|
||||||
/// Set cut left and right.
|
/// Set cut left and right.
|
||||||
extern void VideoSetCutLeftRight(int[]);
|
extern void VideoSetCutLeftRight(int[]);
|
||||||
|
|
||||||
/// Set studio levels.
|
/// Set studio levels.
|
||||||
extern void VideoSetStudioLevels(int);
|
extern void VideoSetStudioLevels(int);
|
||||||
|
|
||||||
/// Set background.
|
/// Set background.
|
||||||
extern void VideoSetBackground(uint32_t);
|
extern void VideoSetBackground(uint32_t);
|
||||||
|
|
||||||
/// Set audio delay.
|
/// Set audio delay.
|
||||||
extern void VideoSetAudioDelay(int);
|
extern void VideoSetAudioDelay(int);
|
||||||
|
|
||||||
/// Set auto-crop parameters.
|
/// Set auto-crop parameters.
|
||||||
extern void VideoSetAutoCrop(int, int, int);
|
extern void VideoSetAutoCrop(int, int, int);
|
||||||
|
|
||||||
/// Clear OSD.
|
/// Clear OSD.
|
||||||
extern void VideoOsdClear(void);
|
extern void VideoOsdClear(void);
|
||||||
|
|
||||||
/// Draw an OSD ARGB image.
|
/// Draw an OSD ARGB image.
|
||||||
extern void VideoOsdDrawARGB(int, int, int, int, int, const uint8_t *, int, int);
|
extern void VideoOsdDrawARGB(int, int, int, int, int, const uint8_t *, int, int);
|
||||||
|
|
||||||
/// Get OSD size.
|
/// Get OSD size.
|
||||||
extern void VideoGetOsdSize(int *, int *);
|
extern void VideoGetOsdSize(int *, int *);
|
||||||
|
|
||||||
/// Set OSD size.
|
/// Set OSD size.
|
||||||
extern void VideoSetOsdSize(int, int);
|
extern void VideoSetOsdSize(int, int);
|
||||||
|
|
||||||
/// Set Osd 3D Mode
|
/// Set Osd 3D Mode
|
||||||
extern void VideoSetOsd3DMode(int);
|
extern void VideoSetOsd3DMode(int);
|
||||||
|
|
||||||
/// Set video clock.
|
/// Set video clock.
|
||||||
extern void VideoSetClock(VideoHwDecoder *, int64_t);
|
extern void VideoSetClock(VideoHwDecoder *, int64_t);
|
||||||
|
|
||||||
/// Get video clock.
|
/// Get video clock.
|
||||||
extern int64_t VideoGetClock(const VideoHwDecoder *);
|
extern int64_t VideoGetClock(const VideoHwDecoder *);
|
||||||
|
|
||||||
/// Set closing flag.
|
/// Set closing flag.
|
||||||
extern void VideoSetClosing(VideoHwDecoder *);
|
extern void VideoSetClosing(VideoHwDecoder *);
|
||||||
|
|
||||||
/// Reset start of frame counter
|
/// Reset start of frame counter
|
||||||
extern void VideoResetStart(VideoHwDecoder *);
|
extern void VideoResetStart(VideoHwDecoder *);
|
||||||
|
|
||||||
/// Set trick play speed.
|
/// Set trick play speed.
|
||||||
extern void VideoSetTrickSpeed(VideoHwDecoder *, int);
|
extern void VideoSetTrickSpeed(VideoHwDecoder *, int);
|
||||||
|
|
||||||
/// Grab screen.
|
/// Grab screen.
|
||||||
extern uint8_t *VideoGrab(int *, int *, int *, int);
|
extern uint8_t *VideoGrab(int *, int *, int *, int);
|
||||||
|
|
||||||
/// Grab screen raw.
|
/// Grab screen raw.
|
||||||
extern uint8_t *VideoGrabService(int *, int *, int *);
|
extern uint8_t *VideoGrabService(int *, int *, int *);
|
||||||
|
|
||||||
/// Get decoder statistics.
|
/// Get decoder statistics.
|
||||||
extern void VideoGetStats(VideoHwDecoder *, int *, int *, int *, int *, float *);
|
extern void VideoGetStats(VideoHwDecoder *, int *, int *, int *, int *, float *);
|
||||||
|
|
||||||
/// Get video stream size
|
/// Get video stream size
|
||||||
extern void VideoGetVideoSize(VideoHwDecoder *, int *, int *, int *, int *);
|
extern void VideoGetVideoSize(VideoHwDecoder *, int *, int *, int *, int *);
|
||||||
|
|
||||||
extern void VideoOsdInit(void); ///< Setup osd.
|
extern void VideoOsdInit(void); ///< Setup osd.
|
||||||
@ -230,19 +230,19 @@ extern void VideoOsdExit(void); ///< Cleanup osd.
|
|||||||
extern void VideoInit(const char *); ///< Setup video module.
|
extern void VideoInit(const char *); ///< Setup video module.
|
||||||
extern void VideoExit(void); ///< Cleanup and exit video module.
|
extern void VideoExit(void); ///< Cleanup and exit video module.
|
||||||
|
|
||||||
/// Poll video input buffers.
|
/// Poll video input buffers.
|
||||||
extern int VideoPollInput(VideoStream *);
|
extern int VideoPollInput(VideoStream *);
|
||||||
|
|
||||||
/// Decode video input buffers.
|
/// Decode video input buffers.
|
||||||
extern int VideoDecodeInput(VideoStream *);
|
extern int VideoDecodeInput(VideoStream *);
|
||||||
|
|
||||||
/// Get number of input buffers.
|
/// Get number of input buffers.
|
||||||
extern int VideoGetBuffers(const VideoStream *);
|
extern int VideoGetBuffers(const VideoStream *);
|
||||||
|
|
||||||
/// Set DPMS at Blackscreen switch
|
/// Set DPMS at Blackscreen switch
|
||||||
extern void SetDPMSatBlackScreen(int);
|
extern void SetDPMSatBlackScreen(int);
|
||||||
|
|
||||||
/// Raise the frontend window
|
/// Raise the frontend window
|
||||||
extern int VideoRaiseWindow(void);
|
extern int VideoRaiseWindow(void);
|
||||||
|
|
||||||
#ifdef USE_OPENGLOSD
|
#ifdef USE_OPENGLOSD
|
||||||
|
Loading…
Reference in New Issue
Block a user