mirror of
https://github.com/jojo61/vdr-plugin-softhdcuvid.git
synced 2023-10-10 13:37:41 +02:00
Merge pull request #45 from dnehring7/master
Reindent all sources to common coding. Reworked aspect function.
This commit is contained in:
commit
309ad1c90e
4
Makefile
4
Makefile
@ -385,9 +385,7 @@ HDRS= $(wildcard *.h)
|
|||||||
|
|
||||||
indent:
|
indent:
|
||||||
for i in $(SRCS) $(HDRS); do \
|
for i in $(SRCS) $(HDRS); do \
|
||||||
indent $$i; \
|
VERSION_CONTROL=none indent $$i; \
|
||||||
unexpand -a $$i | sed -e s/constconst/const/ > $$i.up; \
|
|
||||||
mv $$i.up $$i; \
|
|
||||||
done
|
done
|
||||||
|
|
||||||
video_test: video.c Makefile
|
video_test: video.c Makefile
|
||||||
|
@ -283,11 +283,13 @@ Setup: /etc/vdr/setup.conf
|
|||||||
0 pan and scan
|
0 pan and scan
|
||||||
1 letter box
|
1 letter box
|
||||||
2 center cut-out
|
2 center cut-out
|
||||||
|
3 original
|
||||||
|
|
||||||
softhddevice.VideoOtherDisplayFormat = 1
|
softhddevice.VideoOtherDisplayFormat = 1
|
||||||
0 pan and scan
|
0 pan and scan
|
||||||
1 pillar box
|
1 pillar box
|
||||||
2 center cut-out
|
2 center cut-out
|
||||||
|
3 original
|
||||||
|
|
||||||
softhddevice.pip.X = 79
|
softhddevice.pip.X = 79
|
||||||
softhddevice.pip.Y = 78
|
softhddevice.pip.Y = 78
|
||||||
@ -378,6 +380,3 @@ Running:
|
|||||||
Known Bugs:
|
Known Bugs:
|
||||||
-----------
|
-----------
|
||||||
SD Streams not working very well on vaapi
|
SD Streams not working very well on vaapi
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
9
audio.c
9
audio.c
@ -146,7 +146,6 @@ static volatile char AudioRunning; ///< thread running / stopped
|
|||||||
static volatile char AudioPaused; ///< audio paused
|
static volatile char AudioPaused; ///< audio paused
|
||||||
static volatile char AudioVideoIsReady; ///< video ready start early
|
static volatile char AudioVideoIsReady; ///< video ready start early
|
||||||
static int AudioSkip; ///< skip audio to sync to video
|
static int AudioSkip; ///< skip audio to sync to video
|
||||||
int AudioDelay; /// delay audio to sync to video
|
|
||||||
|
|
||||||
static const int AudioBytesProSample = 2; ///< number of bytes per sample
|
static const int AudioBytesProSample = 2; ///< number of bytes per sample
|
||||||
|
|
||||||
@ -2024,7 +2023,8 @@ static void *AudioPlayHandlerThread(void *dummy)
|
|||||||
|
|
||||||
Debug(3, "audio: ----> %dms %d start\n", (AudioUsedBytes() * 1000)
|
Debug(3, "audio: ----> %dms %d start\n", (AudioUsedBytes() * 1000)
|
||||||
/ (!AudioRing[AudioRingWrite].HwSampleRate + !AudioRing[AudioRingWrite].HwChannels +
|
/ (!AudioRing[AudioRingWrite].HwSampleRate + !AudioRing[AudioRingWrite].HwChannels +
|
||||||
AudioRing[AudioRingWrite].HwSampleRate * AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample),AudioUsedBytes());
|
AudioRing[AudioRingWrite].HwSampleRate * AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample),
|
||||||
|
AudioUsedBytes());
|
||||||
|
|
||||||
do {
|
do {
|
||||||
int filled;
|
int filled;
|
||||||
@ -2291,7 +2291,7 @@ void AudioEnqueue(const void *samples, int count)
|
|||||||
// no lock needed, can wakeup next time
|
// no lock needed, can wakeup next time
|
||||||
AudioRunning = 1;
|
AudioRunning = 1;
|
||||||
pthread_cond_signal(&AudioStartCond);
|
pthread_cond_signal(&AudioStartCond);
|
||||||
Debug(3, "Start on AudioEnque Threshold %d n %d\n",AudioStartThreshold,n);
|
Debug(3, "Start on AudioEnque Threshold %d n %d\n", AudioStartThreshold, n);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Update audio clock (stupid gcc developers thinks INT64_C is unsigned)
|
// Update audio clock (stupid gcc developers thinks INT64_C is unsigned)
|
||||||
@ -2337,6 +2337,7 @@ void AudioVideoReady(int64_t pts)
|
|||||||
|
|
||||||
if (!AudioRunning) {
|
if (!AudioRunning) {
|
||||||
int skip;
|
int skip;
|
||||||
|
|
||||||
// buffer ~15 video frames
|
// buffer ~15 video frames
|
||||||
// FIXME: HDTV can use smaller video buffer
|
// FIXME: HDTV can use smaller video buffer
|
||||||
skip = pts - 0 * 20 * 90 - AudioBufferTime * 90 - audio_pts + VideoAudioDelay;
|
skip = pts - 0 * 20 * 90 - AudioBufferTime * 90 - audio_pts + VideoAudioDelay;
|
||||||
@ -2479,7 +2480,7 @@ int64_t AudioGetDelay(void)
|
|||||||
pts += ((int64_t) RingBufferUsedBytes(AudioRing[AudioRingRead].RingBuffer)
|
pts += ((int64_t) RingBufferUsedBytes(AudioRing[AudioRingRead].RingBuffer)
|
||||||
* 90 * 1000) / (AudioRing[AudioRingRead].HwSampleRate * AudioRing[AudioRingRead].HwChannels *
|
* 90 * 1000) / (AudioRing[AudioRingRead].HwSampleRate * AudioRing[AudioRingRead].HwChannels *
|
||||||
AudioBytesProSample);
|
AudioBytesProSample);
|
||||||
Debug(4,"audio: hw+sw delay %zd %" PRId64 "ms\n", RingBufferUsedBytes(AudioRing[AudioRingRead].RingBuffer),
|
Debug(4, "audio: hw+sw delay %zd %" PRId64 "ms\n", RingBufferUsedBytes(AudioRing[AudioRingRead].RingBuffer),
|
||||||
pts / 90);
|
pts / 90);
|
||||||
|
|
||||||
return pts;
|
return pts;
|
||||||
|
8
codec.c
8
codec.c
@ -96,6 +96,7 @@ static pthread_mutex_t CodecLockMutex;
|
|||||||
/// Flag prefer fast channel switch
|
/// Flag prefer fast channel switch
|
||||||
char CodecUsePossibleDefectFrames;
|
char CodecUsePossibleDefectFrames;
|
||||||
AVBufferRef *hw_device_ctx;
|
AVBufferRef *hw_device_ctx;
|
||||||
|
|
||||||
//----------------------------------------------------------------------------
|
//----------------------------------------------------------------------------
|
||||||
// Video
|
// Video
|
||||||
//----------------------------------------------------------------------------
|
//----------------------------------------------------------------------------
|
||||||
@ -288,8 +289,8 @@ void CodecVideoOpen(VideoDecoder * decoder, int codec_id)
|
|||||||
}
|
}
|
||||||
decoder->VideoCtx->hw_device_ctx = av_buffer_ref(HwDeviceContext);
|
decoder->VideoCtx->hw_device_ctx = av_buffer_ref(HwDeviceContext);
|
||||||
#else
|
#else
|
||||||
decoder->VideoCtx->pix_fmt = AV_PIX_FMT_DRM_PRIME; /* request a DRM frame
|
decoder->VideoCtx->pix_fmt = AV_PIX_FMT_DRM_PRIME; /* request a DRM frame */
|
||||||
// decoder->VideoCtx->pix_fmt = AV_PIX_FMT_MMAL; /* request a DRM frame */
|
// decoder->VideoCtx->pix_fmt = AV_PIX_FMT_MMAL; /* request a DRM frame */
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
// FIXME: for software decoder use all cpus, otherwise 1
|
// FIXME: for software decoder use all cpus, otherwise 1
|
||||||
@ -1111,8 +1112,6 @@ static int CodecAudioPassthroughHelper(AudioDecoder * audio_decoder, const AVPac
|
|||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
#if defined(USE_SWRESAMPLE) || defined(USE_AVRESAMPLE)
|
#if defined(USE_SWRESAMPLE) || defined(USE_AVRESAMPLE)
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -1346,6 +1345,7 @@ void CodecAudioDecode(AudioDecoder * audio_decoder, const AVPacket * avpkt)
|
|||||||
if (audio_decoder->Resample) {
|
if (audio_decoder->Resample) {
|
||||||
uint8_t outbuf[8192 * 2 * 8];
|
uint8_t outbuf[8192 * 2 * 8];
|
||||||
uint8_t *out[1];
|
uint8_t *out[1];
|
||||||
|
|
||||||
out[0] = outbuf;
|
out[0] = outbuf;
|
||||||
ret =
|
ret =
|
||||||
swr_convert(audio_decoder->Resample, out, sizeof(outbuf) / (2 * audio_decoder->HwChannels),
|
swr_convert(audio_decoder->Resample, out, sizeof(outbuf) / (2 * audio_decoder->HwChannels),
|
||||||
|
5
drm.c
5
drm.c
@ -475,9 +475,8 @@ void VideoInitDrm()
|
|||||||
|
|
||||||
void get_drm_aspect(int *num,int *den)
|
void get_drm_aspect(int *num,int *den)
|
||||||
{
|
{
|
||||||
Debug(3,"mmHeight %d mmWidth %d VideoHeight %d VideoWidth %d\n",render->mmHeight,render->mmWidth,VideoWindowHeight,VideoWindowWidth);
|
*num = VideoWindowWidth;
|
||||||
*num = VideoWindowWidth * render->mmHeight;
|
*den = VideoWindowHeight;
|
||||||
*den = VideoWindowHeight * render->mmWidth;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
struct gbm_bo *bo = NULL, *next_bo=NULL;
|
struct gbm_bo *bo = NULL, *next_bo=NULL;
|
||||||
|
1
hdr.c
1
hdr.c
@ -490,4 +490,3 @@ static void set_hdr_metadata(int color,int trc, AVFrameSideData *sd1, AVFrameSid
|
|||||||
Debug(3,"DRM: HDR metadata: prop set\n");
|
Debug(3,"DRM: HDR metadata: prop set\n");
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
807
openglosd.cpp
807
openglosd.cpp
File diff suppressed because it is too large
Load Diff
40
po/de_DE.po
40
po/de_DE.po
@ -7,7 +7,7 @@ msgid ""
|
|||||||
msgstr ""
|
msgstr ""
|
||||||
"Project-Id-Version: VDR \n"
|
"Project-Id-Version: VDR \n"
|
||||||
"Report-Msgid-Bugs-To: <see README>\n"
|
"Report-Msgid-Bugs-To: <see README>\n"
|
||||||
"POT-Creation-Date: 2019-10-26 18:41+0200\n"
|
"POT-Creation-Date: 2020-04-13 16:26+0200\n"
|
||||||
"PO-Revision-Date: blabla\n"
|
"PO-Revision-Date: blabla\n"
|
||||||
"Last-Translator: blabla\n"
|
"Last-Translator: blabla\n"
|
||||||
"Language-Team: blabla\n"
|
"Language-Team: blabla\n"
|
||||||
@ -263,9 +263,6 @@ msgstr ""
|
|||||||
msgid "codec: can't allocate video codec context\n"
|
msgid "codec: can't allocate video codec context\n"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
msgid "VAAPI Refcounts invalid\n"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
msgid "codec: can't set option deint to video codec!\n"
|
msgid "codec: can't set option deint to video codec!\n"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
@ -306,24 +303,6 @@ msgstr ""
|
|||||||
msgid "codec/audio: decoded data smaller than encoded\n"
|
msgid "codec/audio: decoded data smaller than encoded\n"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
msgid "codec/audio: resample setup error\n"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
msgid "codec/audio: overwrite resample\n"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
msgid "codec/audio: AvResample setup error\n"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
msgid "codec: latm\n"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
msgid "codec: error audio data\n"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
msgid "codec: error more than one frame data\n"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
msgid "codec/audio: can't setup resample\n"
|
msgid "codec/audio: can't setup resample\n"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
@ -642,7 +621,7 @@ msgid " Frames missed(%d) duped(%d) dropped(%d) total(%d)"
|
|||||||
msgstr " Frames verloren(%d) verdoppelt(%d) übersprungen(%d) Gesamt(%d)"
|
msgstr " Frames verloren(%d) verdoppelt(%d) übersprungen(%d) Gesamt(%d)"
|
||||||
|
|
||||||
#, c-format
|
#, c-format
|
||||||
msgid " Frame Process time %2.2fms"
|
msgid " Video %dx%d Color: %s Gamma: %s"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
msgid "pass-through disabled"
|
msgid "pass-through disabled"
|
||||||
@ -807,10 +786,6 @@ msgstr ""
|
|||||||
msgid "video/glx: no GLX support\n"
|
msgid "video/glx: no GLX support\n"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#, c-format
|
|
||||||
msgid "video/glx: glx version %d.%d\n"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
msgid "did not get FBconfig"
|
msgid "did not get FBconfig"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
@ -898,11 +873,10 @@ msgid "Failed rendering frame!\n"
|
|||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#, c-format
|
#, c-format
|
||||||
msgid "video/vdpau: output buffer full, dropping frame (%d/%d)\n"
|
msgid "video/cuvid: output buffer full, dropping frame (%d/%d)\n"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#, c-format
|
msgid "Could not dynamically load CUDA\n"
|
||||||
msgid "video/vdpau: pixel format %d not supported\n"
|
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
msgid "Kein Cuda device gefunden"
|
msgid "Kein Cuda device gefunden"
|
||||||
@ -943,12 +917,6 @@ msgstr ""
|
|||||||
msgid "video/egl: can't create thread egl context\n"
|
msgid "video/egl: can't create thread egl context\n"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
msgid "video: can't queue cancel video display thread\n"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
msgid "video: can't cancel video display thread\n"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
#, c-format
|
#, c-format
|
||||||
msgid "video: repeated pict %d found, but not handled\n"
|
msgid "video: repeated pict %d found, but not handled\n"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
51
shaders.h
51
shaders.h
@ -11,7 +11,6 @@ const char *gl_version = "#version 300 es ";
|
|||||||
#endif
|
#endif
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
|
||||||
/* Color conversion matrix: RGB = m * YUV + c
|
/* Color conversion matrix: RGB = m * YUV + c
|
||||||
* m is in row-major matrix, with m[row][col], e.g.:
|
* m is in row-major matrix, with m[row][col], e.g.:
|
||||||
* [ a11 a12 a13 ] float m[3][3] = { { a11, a12, a13 },
|
* [ a11 a12 a13 ] float m[3][3] = { { a11, a12, a13 },
|
||||||
@ -71,16 +70,11 @@ float cms_matrix[3][3] = { {1.660497, -0.124547, -0.018154},
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Common constants for SMPTE ST.2084 (PQ)
|
// Common constants for SMPTE ST.2084 (PQ)
|
||||||
static const float PQ_M1 = 2610./4096 * 1./4,
|
static const float PQ_M1 = 2610. / 4096 * 1. / 4, PQ_M2 = 2523. / 4096 * 128, PQ_C1 = 3424. / 4096, PQ_C2 =
|
||||||
PQ_M2 = 2523./4096 * 128,
|
2413. / 4096 * 32, PQ_C3 = 2392. / 4096 * 32;
|
||||||
PQ_C1 = 3424./4096,
|
|
||||||
PQ_C2 = 2413./4096 * 32,
|
|
||||||
PQ_C3 = 2392./4096 * 32;
|
|
||||||
|
|
||||||
// Common constants for ARIB STD-B67 (HLG)
|
// Common constants for ARIB STD-B67 (HLG)
|
||||||
static const float HLG_A = 0.17883277,
|
static const float HLG_A = 0.17883277, HLG_B = 0.28466892, HLG_C = 0.55991073;
|
||||||
HLG_B = 0.28466892,
|
|
||||||
HLG_C = 0.55991073;
|
|
||||||
|
|
||||||
struct gl_vao_entry
|
struct gl_vao_entry
|
||||||
{
|
{
|
||||||
@ -124,43 +118,50 @@ static const struct gl_vao_entry vertex_vao[] = {
|
|||||||
char sh[SHADER_LENGTH];
|
char sh[SHADER_LENGTH];
|
||||||
char shv[SHADER_LENGTH];
|
char shv[SHADER_LENGTH];
|
||||||
|
|
||||||
GL_init() {
|
GL_init()
|
||||||
|
{
|
||||||
sh[0] = 0;
|
sh[0] = 0;
|
||||||
}
|
}
|
||||||
GLV_init() {
|
|
||||||
|
GLV_init()
|
||||||
|
{
|
||||||
shv[0] = 0;
|
shv[0] = 0;
|
||||||
}
|
}
|
||||||
pl_shader_append(const char *fmt, ...) {
|
|
||||||
|
pl_shader_append(const char *fmt, ...)
|
||||||
|
{
|
||||||
char temp[1000];
|
char temp[1000];
|
||||||
va_list ap;
|
va_list ap;
|
||||||
|
|
||||||
va_start(ap, fmt);
|
va_start(ap, fmt);
|
||||||
vsprintf(temp,fmt,ap);
|
vsprintf(temp, fmt, ap);
|
||||||
va_end(ap);
|
va_end(ap);
|
||||||
|
|
||||||
if (strlen(sh) + strlen(temp) > SHADER_LENGTH)
|
if (strlen(sh) + strlen(temp) > SHADER_LENGTH)
|
||||||
Fatal(_("Shaderlenght fault\n"));
|
Fatal(_("Shaderlenght fault\n"));
|
||||||
strcat(sh,temp);
|
strcat(sh, temp);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pl_shader_append_v(const char *fmt, ...) {
|
pl_shader_append_v(const char *fmt, ...)
|
||||||
|
{
|
||||||
char temp[1000];
|
char temp[1000];
|
||||||
va_list ap;
|
va_list ap;
|
||||||
|
|
||||||
va_start(ap, fmt);
|
va_start(ap, fmt);
|
||||||
vsprintf(temp,fmt,ap);
|
vsprintf(temp, fmt, ap);
|
||||||
va_end(ap);
|
va_end(ap);
|
||||||
|
|
||||||
if (strlen(shv) + strlen(temp) > SHADER_LENGTH)
|
if (strlen(shv) + strlen(temp) > SHADER_LENGTH)
|
||||||
Fatal(_("Shaderlenght fault\n"));
|
Fatal(_("Shaderlenght fault\n"));
|
||||||
strcat(shv,temp);
|
strcat(shv, temp);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static void compile_attach_shader(GLuint program, GLenum type, const char *source)
|
static void compile_attach_shader(GLuint program, GLenum type, const char *source)
|
||||||
{
|
{
|
||||||
GLuint shader;
|
GLuint shader;
|
||||||
GLint status=1234, log_length;
|
GLint status = 1234, log_length;
|
||||||
char log[4000];
|
char log[4000];
|
||||||
GLsizei len;
|
GLsizei len;
|
||||||
|
|
||||||
@ -199,7 +200,7 @@ static GLuint sc_generate_osd(GLuint gl_prog)
|
|||||||
gl_prog = glCreateProgram();
|
gl_prog = glCreateProgram();
|
||||||
|
|
||||||
GL_init();
|
GL_init();
|
||||||
GLSL("%s\n",gl_version);
|
GLSL("%s\n", gl_version);
|
||||||
GLSL("in vec2 vertex_position;\n");
|
GLSL("in vec2 vertex_position;\n");
|
||||||
GLSL("in vec2 vertex_texcoord0;\n");
|
GLSL("in vec2 vertex_texcoord0;\n");
|
||||||
GLSL("out vec2 texcoord0;\n");
|
GLSL("out vec2 texcoord0;\n");
|
||||||
@ -211,7 +212,7 @@ static GLuint sc_generate_osd(GLuint gl_prog)
|
|||||||
Debug(3, "vor compile vertex osd\n");
|
Debug(3, "vor compile vertex osd\n");
|
||||||
compile_attach_shader(gl_prog, GL_VERTEX_SHADER, sh); // vertex_osd);
|
compile_attach_shader(gl_prog, GL_VERTEX_SHADER, sh); // vertex_osd);
|
||||||
GL_init();
|
GL_init();
|
||||||
GLSL("%s\n",gl_version);
|
GLSL("%s\n", gl_version);
|
||||||
GLSL("#define texture1D texture\n");
|
GLSL("#define texture1D texture\n");
|
||||||
GLSL("precision mediump float; \n");
|
GLSL("precision mediump float; \n");
|
||||||
GLSL("layout(location = 0) out vec4 out_color;\n");
|
GLSL("layout(location = 0) out vec4 out_color;\n");
|
||||||
@ -247,7 +248,7 @@ static GLuint sc_generate(GLuint gl_prog, enum AVColorSpace colorspace)
|
|||||||
char *frag;
|
char *frag;
|
||||||
|
|
||||||
GL_init();
|
GL_init();
|
||||||
GLSL("%s\n",gl_version);
|
GLSL("%s\n", gl_version);
|
||||||
GLSL("in vec2 vertex_position; \n");
|
GLSL("in vec2 vertex_position; \n");
|
||||||
GLSL("in vec2 vertex_texcoord0; \n");
|
GLSL("in vec2 vertex_texcoord0; \n");
|
||||||
GLSL("out vec2 texcoord0; \n");
|
GLSL("out vec2 texcoord0; \n");
|
||||||
@ -270,8 +271,7 @@ static GLuint sc_generate(GLuint gl_prog, enum AVColorSpace colorspace)
|
|||||||
gl_prog = glCreateProgram();
|
gl_prog = glCreateProgram();
|
||||||
Debug(3, "vor compile vertex\n");
|
Debug(3, "vor compile vertex\n");
|
||||||
// printf("%s",sh);
|
// printf("%s",sh);
|
||||||
compile_attach_shader(gl_prog, GL_VERTEX_SHADER, sh );
|
compile_attach_shader(gl_prog, GL_VERTEX_SHADER, sh);
|
||||||
|
|
||||||
|
|
||||||
switch (colorspace) {
|
switch (colorspace) {
|
||||||
case AVCOL_SPC_RGB:
|
case AVCOL_SPC_RGB:
|
||||||
@ -300,7 +300,7 @@ static GLuint sc_generate(GLuint gl_prog, enum AVColorSpace colorspace)
|
|||||||
|
|
||||||
GL_init();
|
GL_init();
|
||||||
|
|
||||||
GLSL("%s\n",gl_version);
|
GLSL("%s\n", gl_version);
|
||||||
GLSL("precision mediump float; \n");
|
GLSL("precision mediump float; \n");
|
||||||
GLSL("layout(location = 0) out vec4 out_color;\n");
|
GLSL("layout(location = 0) out vec4 out_color;\n");
|
||||||
GLSL("in vec2 texcoord0; \n");
|
GLSL("in vec2 texcoord0; \n");
|
||||||
@ -348,8 +348,7 @@ static GLuint sc_generate(GLuint gl_prog, enum AVColorSpace colorspace)
|
|||||||
#endif
|
#endif
|
||||||
GLSL("out_color = color; \n");
|
GLSL("out_color = color; \n");
|
||||||
GLSL("} \n");
|
GLSL("} \n");
|
||||||
}
|
} else {
|
||||||
else {
|
|
||||||
|
|
||||||
GLSL("color.r = 1.000000 * vec4(texture(texture0, texcoord0)).r; \n");
|
GLSL("color.r = 1.000000 * vec4(texture(texture0, texcoord0)).r; \n");
|
||||||
if (Planes == 3) {
|
if (Planes == 3) {
|
||||||
|
@ -1096,10 +1096,10 @@ void cMenuSetupSoft::Create(void)
|
|||||||
"auto", "1920x1080", "1280x720", "custom",
|
"auto", "1920x1080", "1280x720", "custom",
|
||||||
};
|
};
|
||||||
static const char *const video_display_formats_4_3[] = {
|
static const char *const video_display_formats_4_3[] = {
|
||||||
"pan&scan", "letterbox", "center cut-out",
|
"pan&scan", "letterbox", "center cut-out", "original"
|
||||||
};
|
};
|
||||||
static const char *const video_display_formats_16_9[] = {
|
static const char *const video_display_formats_16_9[] = {
|
||||||
"pan&scan", "pillarbox", "center cut-out",
|
"pan&scan", "pillarbox", "center cut-out", "original"
|
||||||
};
|
};
|
||||||
#ifdef YADIF
|
#ifdef YADIF
|
||||||
static const char *const deinterlace[] = {
|
static const char *const deinterlace[] = {
|
||||||
@ -1133,7 +1133,7 @@ void cMenuSetupSoft::Create(void)
|
|||||||
static char *scalingtest[100];
|
static char *scalingtest[100];
|
||||||
|
|
||||||
if (scalers == 0) {
|
if (scalers == 0) {
|
||||||
scalingtest[0] = (char *) "Off";
|
scalingtest[0] = (char *)"Off";
|
||||||
for (scalers = 0; pl_named_filters[scalers].filter != NULL; scalers++) {
|
for (scalers = 0; pl_named_filters[scalers].filter != NULL; scalers++) {
|
||||||
scaling[scalers] = (char *)pl_named_filters[scalers].name;
|
scaling[scalers] = (char *)pl_named_filters[scalers].name;
|
||||||
scalingtest[scalers + 1] = (char *)pl_named_filters[scalers].name;
|
scalingtest[scalers + 1] = (char *)pl_named_filters[scalers].name;
|
||||||
@ -1182,9 +1182,9 @@ void cMenuSetupSoft::Create(void)
|
|||||||
Add(new cMenuEditBoolItem(tr("Enable Screensaver(DPMS) at black screen"), &EnableDPMSatBlackScreen,
|
Add(new cMenuEditBoolItem(tr("Enable Screensaver(DPMS) at black screen"), &EnableDPMSatBlackScreen,
|
||||||
trVDR("no"), trVDR("yes")));
|
trVDR("no"), trVDR("yes")));
|
||||||
#endif
|
#endif
|
||||||
Add(new cMenuEditStraItem(trVDR("4:3 video display format"), &Video4to3DisplayFormat, 3,
|
Add(new cMenuEditStraItem(trVDR("4:3 video display format"), &Video4to3DisplayFormat, 4,
|
||||||
video_display_formats_4_3));
|
video_display_formats_4_3));
|
||||||
Add(new cMenuEditStraItem(trVDR("16:9+other video display format"), &VideoOtherDisplayFormat, 3,
|
Add(new cMenuEditStraItem(trVDR("16:9+other video display format"), &VideoOtherDisplayFormat, 4,
|
||||||
video_display_formats_16_9));
|
video_display_formats_16_9));
|
||||||
|
|
||||||
#if 0
|
#if 0
|
||||||
@ -2153,7 +2153,7 @@ void cSoftHdMenu::Create(void)
|
|||||||
int dropped;
|
int dropped;
|
||||||
int counter;
|
int counter;
|
||||||
float frametime;
|
float frametime;
|
||||||
int width,height;
|
int width, height;
|
||||||
int color;
|
int color;
|
||||||
int eotf;
|
int eotf;
|
||||||
char *colorstr, *eotfstr;
|
char *colorstr, *eotfstr;
|
||||||
@ -2191,7 +2191,7 @@ void cSoftHdMenu::Create(void)
|
|||||||
#endif
|
#endif
|
||||||
Add(new cOsdItem(NULL, osUnknown, false));
|
Add(new cOsdItem(NULL, osUnknown, false));
|
||||||
Add(new cOsdItem(NULL, osUnknown, false));
|
Add(new cOsdItem(NULL, osUnknown, false));
|
||||||
GetStats(&missed, &duped, &dropped, &counter, &frametime, &width, &height, &color,&eotf);
|
GetStats(&missed, &duped, &dropped, &counter, &frametime, &width, &height, &color, &eotf);
|
||||||
switch (color) {
|
switch (color) {
|
||||||
case AVCOL_SPC_RGB:
|
case AVCOL_SPC_RGB:
|
||||||
colorstr = strdup("BT 601");
|
colorstr = strdup("BT 601");
|
||||||
@ -2213,7 +2213,8 @@ void cSoftHdMenu::Create(void)
|
|||||||
}
|
}
|
||||||
Add(new cOsdItem(cString::sprintf(tr(" Frames missed(%d) duped(%d) dropped(%d) total(%d)"), missed, duped, dropped,
|
Add(new cOsdItem(cString::sprintf(tr(" Frames missed(%d) duped(%d) dropped(%d) total(%d)"), missed, duped, dropped,
|
||||||
counter), osUnknown, false));
|
counter), osUnknown, false));
|
||||||
Add(new cOsdItem(cString::sprintf(tr(" Video %dx%d Color: %s Gamma: %s"), width, height, colorstr, eotfstr), osUnknown, false));
|
Add(new cOsdItem(cString::sprintf(tr(" Video %dx%d Color: %s Gamma: %s"), width, height, colorstr, eotfstr),
|
||||||
|
osUnknown, false));
|
||||||
// Add(new cOsdItem(cString::sprintf(tr(" Frame Process time %2.2fms"), frametime), osUnknown, false));
|
// Add(new cOsdItem(cString::sprintf(tr(" Frame Process time %2.2fms"), frametime), osUnknown, false));
|
||||||
SetCurrent(Get(current)); // restore selected menu entry
|
SetCurrent(Get(current)); // restore selected menu entry
|
||||||
Display(); // display build menu
|
Display(); // display build menu
|
||||||
|
15
softhddev.c
15
softhddev.c
@ -645,7 +645,7 @@ static void PesParse(PesDemux * pesdx, const uint8_t * data, int size, int is_st
|
|||||||
q = pesdx->Buffer + pesdx->Skip;
|
q = pesdx->Buffer + pesdx->Skip;
|
||||||
n = pesdx->Index - pesdx->Skip;
|
n = pesdx->Index - pesdx->Skip;
|
||||||
while (n >= 5) {
|
while (n >= 5) {
|
||||||
int r=0;
|
int r = 0;
|
||||||
unsigned codec_id = AV_CODEC_ID_NONE;
|
unsigned codec_id = AV_CODEC_ID_NONE;
|
||||||
|
|
||||||
// 4 bytes 0xFFExxxxx Mpeg audio
|
// 4 bytes 0xFFExxxxx Mpeg audio
|
||||||
@ -2871,11 +2871,10 @@ const char *CommandLineHelp(void)
|
|||||||
" -p device\taudio device for pass-through (hw:0,1 or /dev/dsp1)\n"
|
" -p device\taudio device for pass-through (hw:0,1 or /dev/dsp1)\n"
|
||||||
" -c channel\taudio mixer channel name (fe. PCM)\n" " -d display\tdisplay of x11 server (fe. :0.0)\n"
|
" -c channel\taudio mixer channel name (fe. PCM)\n" " -d display\tdisplay of x11 server (fe. :0.0)\n"
|
||||||
" -f\t\tstart with fullscreen window (only with window manager)\n"
|
" -f\t\tstart with fullscreen window (only with window manager)\n"
|
||||||
" -g geometry\tx11 window geometry wxh+x+y\n"
|
" -g geometry\tx11 window geometry wxh+x+y\n" " -r Refresh\tRefreshrate for DRM (default is 50 Hz)\n"
|
||||||
" -r Refresh\tRefreshrate for DRM (default is 50 Hz)\n"
|
|
||||||
" -C Connector\tConnector for DRM (default is current Connector)\n"
|
" -C Connector\tConnector for DRM (default is current Connector)\n"
|
||||||
" -v device\tvideo driver device (cuvid)\n"
|
" -v device\tvideo driver device (cuvid)\n" " -s\t\tstart in suspended mode\n"
|
||||||
" -s\t\tstart in suspended mode\n" " -x\t\tstart x11 server, with -xx try to connect, if this fails\n"
|
" -x\t\tstart x11 server, with -xx try to connect, if this fails\n"
|
||||||
" -X args\tX11 server arguments (f.e. -nocursor)\n" " -w workaround\tenable/disable workarounds\n"
|
" -X args\tX11 server arguments (f.e. -nocursor)\n" " -w workaround\tenable/disable workarounds\n"
|
||||||
"\tno-hw-decoder\t\tdisable hw decoder, use software decoder only\n"
|
"\tno-hw-decoder\t\tdisable hw decoder, use software decoder only\n"
|
||||||
"\tno-mpeg-hw-decoder\tdisable hw decoder for mpeg only\n"
|
"\tno-mpeg-hw-decoder\tdisable hw decoder for mpeg only\n"
|
||||||
@ -3369,7 +3368,8 @@ void Resume(void)
|
|||||||
** @param[out] dropped dropped frames
|
** @param[out] dropped dropped frames
|
||||||
** @param[out] count number of decoded frames
|
** @param[out] count number of decoded frames
|
||||||
*/
|
*/
|
||||||
void GetStats(int *missed, int *duped, int *dropped, int *counter, float *frametime, int *width, int *height, int *color, int *eotf)
|
void GetStats(int *missed, int *duped, int *dropped, int *counter, float *frametime, int *width, int *height,
|
||||||
|
int *color, int *eotf)
|
||||||
{
|
{
|
||||||
*missed = 0;
|
*missed = 0;
|
||||||
*duped = 0;
|
*duped = 0;
|
||||||
@ -3381,7 +3381,8 @@ void GetStats(int *missed, int *duped, int *dropped, int *counter, float *framet
|
|||||||
*color = NULL;
|
*color = NULL;
|
||||||
*eotf = NULL;
|
*eotf = NULL;
|
||||||
if (MyVideoStream->HwDecoder) {
|
if (MyVideoStream->HwDecoder) {
|
||||||
VideoGetStats(MyVideoStream->HwDecoder, missed, duped, dropped, counter, frametime, width, height, color, eotf);
|
VideoGetStats(MyVideoStream->HwDecoder, missed, duped, dropped, counter, frametime, width, height, color,
|
||||||
|
eotf);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
408
video.c
408
video.c
@ -253,7 +253,7 @@ typedef enum _video_zoom_modes_
|
|||||||
VideoNormal, ///< normal
|
VideoNormal, ///< normal
|
||||||
VideoStretch, ///< stretch to all edges
|
VideoStretch, ///< stretch to all edges
|
||||||
VideoCenterCutOut, ///< center and cut out
|
VideoCenterCutOut, ///< center and cut out
|
||||||
VideoAnamorphic, ///< anamorphic scaled (unsupported)
|
VideoNone, ///< no scaling
|
||||||
} VideoZoomModes;
|
} VideoZoomModes;
|
||||||
|
|
||||||
///
|
///
|
||||||
@ -495,7 +495,6 @@ static int GlxVSyncEnabled = 1; ///< enable/disable v-sync
|
|||||||
static GLXContext glxSharedContext; ///< shared gl context
|
static GLXContext glxSharedContext; ///< shared gl context
|
||||||
static GLXContext glxContext; ///< our gl context
|
static GLXContext glxContext; ///< our gl context
|
||||||
|
|
||||||
|
|
||||||
static GLXContext glxThreadContext; ///< our gl context for the thread
|
static GLXContext glxThreadContext; ///< our gl context for the thread
|
||||||
|
|
||||||
static XVisualInfo *GlxVisualInfo; ///< our gl visual
|
static XVisualInfo *GlxVisualInfo; ///< our gl visual
|
||||||
@ -543,7 +542,6 @@ static void X11DPMSReenable(xcb_connection_t *);
|
|||||||
static void X11DPMSDisable(xcb_connection_t *);
|
static void X11DPMSDisable(xcb_connection_t *);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
|
||||||
char *eglErrorString(EGLint error)
|
char *eglErrorString(EGLint error)
|
||||||
{
|
{
|
||||||
switch (error) {
|
switch (error) {
|
||||||
@ -681,44 +679,35 @@ static void VideoUpdateOutput(AVRational input_aspect_ratio, int input_width, in
|
|||||||
AVRational display_aspect_ratio;
|
AVRational display_aspect_ratio;
|
||||||
AVRational tmp_ratio;
|
AVRational tmp_ratio;
|
||||||
|
|
||||||
|
// input not initialized yet, return immediately
|
||||||
if (!input_aspect_ratio.num || !input_aspect_ratio.den) {
|
if (!input_aspect_ratio.num || !input_aspect_ratio.den) {
|
||||||
input_aspect_ratio.num = 1;
|
output_width = video_width;
|
||||||
input_aspect_ratio.den = 1;
|
output_height = video_height;
|
||||||
Debug(3, "video: aspect defaults to %d:%d\n", input_aspect_ratio.num, input_aspect_ratio.den);
|
return;
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
av_reduce(&input_aspect_ratio.num, &input_aspect_ratio.den, input_width * input_aspect_ratio.num,
|
|
||||||
input_height * input_aspect_ratio.den, 1024 * 1024);
|
|
||||||
|
|
||||||
// InputWidth/Height can be zero = uninitialized
|
|
||||||
if (!input_aspect_ratio.num || !input_aspect_ratio.den) {
|
|
||||||
input_aspect_ratio.num = 1;
|
|
||||||
input_aspect_ratio.den = 1;
|
|
||||||
}
|
}
|
||||||
#ifdef USE_DRM
|
#ifdef USE_DRM
|
||||||
get_drm_aspect(&display_aspect_ratio.num,&display_aspect_ratio.den);
|
get_drm_aspect(&display_aspect_ratio.num, &display_aspect_ratio.den);
|
||||||
#else
|
#else
|
||||||
Debug(3,"mmHeight %d mm Width %d VideoHeight %d VideoWidth %d\n",VideoScreen->height_in_millimeters,VideoScreen->width_in_millimeters,
|
display_aspect_ratio.num = VideoScreen->width_in_pixels;
|
||||||
VideoScreen->height_in_pixels,VideoScreen->width_in_pixels);
|
display_aspect_ratio.den = VideoScreen->height_in_pixels;
|
||||||
display_aspect_ratio.num = VideoScreen->width_in_pixels * VideoScreen->height_in_millimeters;
|
|
||||||
display_aspect_ratio.den = VideoScreen->height_in_pixels * VideoScreen->width_in_millimeters;
|
|
||||||
#endif
|
#endif
|
||||||
display_aspect_ratio = av_mul_q(input_aspect_ratio, display_aspect_ratio);
|
av_reduce(&display_aspect_ratio.num, &display_aspect_ratio.den, display_aspect_ratio.num, display_aspect_ratio.den,
|
||||||
Debug(3, "video: aspect %d:%d Resolution %d\n", display_aspect_ratio.num, display_aspect_ratio.den, resolution);
|
1024 * 1024);
|
||||||
|
|
||||||
|
Debug(3, "video: input %dx%d (%d:%d)\n", input_width, input_height, input_aspect_ratio.num,
|
||||||
|
input_aspect_ratio.den);
|
||||||
|
Debug(3, "video: display aspect %d:%d Resolution %d\n", display_aspect_ratio.num, display_aspect_ratio.den,
|
||||||
|
resolution);
|
||||||
|
Debug(3, "video: video %+d%+d %dx%d\n", video_x, video_y, video_width, video_height);
|
||||||
|
|
||||||
*crop_x = VideoCutLeftRight[resolution];
|
*crop_x = VideoCutLeftRight[resolution];
|
||||||
*crop_y = VideoCutTopBottom[resolution];
|
*crop_y = VideoCutTopBottom[resolution];
|
||||||
*crop_width = input_width - VideoCutLeftRight[resolution] * 2;
|
*crop_width = input_width - VideoCutLeftRight[resolution] * 2;
|
||||||
*crop_height = input_height - VideoCutTopBottom[resolution] * 2;
|
*crop_height = input_height - VideoCutTopBottom[resolution] * 2;
|
||||||
|
CuvidMessage(2, "video: crop to %+d%+d %dx%d\n", *crop_x, *crop_y, *crop_width, *crop_height);
|
||||||
|
|
||||||
// FIXME: store different positions for the ratios
|
|
||||||
tmp_ratio.num = 4;
|
tmp_ratio.num = 4;
|
||||||
tmp_ratio.den = 3;
|
tmp_ratio.den = 3;
|
||||||
#ifdef DEBUG
|
|
||||||
Debug(4, "ratio: %d:%d %d:%d\n", input_aspect_ratio.num, input_aspect_ratio.den, display_aspect_ratio.num,
|
|
||||||
display_aspect_ratio.den);
|
|
||||||
#endif
|
|
||||||
if (!av_cmp_q(input_aspect_ratio, tmp_ratio)) {
|
if (!av_cmp_q(input_aspect_ratio, tmp_ratio)) {
|
||||||
switch (Video4to3ZoomMode) {
|
switch (Video4to3ZoomMode) {
|
||||||
case VideoNormal:
|
case VideoNormal:
|
||||||
@ -727,9 +716,8 @@ static void VideoUpdateOutput(AVRational input_aspect_ratio, int input_width, in
|
|||||||
goto stretch;
|
goto stretch;
|
||||||
case VideoCenterCutOut:
|
case VideoCenterCutOut:
|
||||||
goto center_cut_out;
|
goto center_cut_out;
|
||||||
case VideoAnamorphic:
|
case VideoNone:
|
||||||
// FIXME: rest should be done by hardware
|
goto video_none;
|
||||||
goto stretch;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
switch (VideoOtherZoomMode) {
|
switch (VideoOtherZoomMode) {
|
||||||
@ -739,28 +727,24 @@ static void VideoUpdateOutput(AVRational input_aspect_ratio, int input_width, in
|
|||||||
goto stretch;
|
goto stretch;
|
||||||
case VideoCenterCutOut:
|
case VideoCenterCutOut:
|
||||||
goto center_cut_out;
|
goto center_cut_out;
|
||||||
case VideoAnamorphic:
|
case VideoNone:
|
||||||
// FIXME: rest should be done by hardware
|
goto video_none;
|
||||||
goto stretch;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
normal:
|
normal:
|
||||||
*output_x = video_x;
|
*output_x = video_x;
|
||||||
*output_y = video_y;
|
*output_y = video_y;
|
||||||
*output_width =
|
*output_height = video_height;
|
||||||
(video_height * display_aspect_ratio.num + display_aspect_ratio.den - 1) / display_aspect_ratio.den;
|
*output_width = (*crop_width * *output_height * input_aspect_ratio.num) / (input_aspect_ratio.den * *crop_height);
|
||||||
*output_height =
|
|
||||||
(video_width * display_aspect_ratio.den + display_aspect_ratio.num - 1) / display_aspect_ratio.num;
|
|
||||||
if (*output_width > video_width) {
|
if (*output_width > video_width) {
|
||||||
*output_width = video_width;
|
*output_width = video_width;
|
||||||
|
*output_height =
|
||||||
|
(*crop_height * *output_width * input_aspect_ratio.den) / (input_aspect_ratio.num * *crop_width);
|
||||||
*output_y += (video_height - *output_height) / 2;
|
*output_y += (video_height - *output_height) / 2;
|
||||||
} else if (*output_height > video_height) {
|
} else if (*output_width < video_width) {
|
||||||
*output_height = video_height;
|
|
||||||
*output_x += (video_width - *output_width) / 2;
|
*output_x += (video_width - *output_width) / 2;
|
||||||
}
|
}
|
||||||
|
CuvidMessage(2, "video: normal aspect output %dx%d%+d%+d\n", *output_width, *output_height, *output_x, *output_y);
|
||||||
CuvidMessage(2, "video: normal aspect output %dx%d%+d%+d Video %dx%d\n", *output_width, *output_height, *output_x,
|
|
||||||
*output_y, video_width, video_height);
|
|
||||||
return;
|
return;
|
||||||
|
|
||||||
stretch:
|
stretch:
|
||||||
@ -768,49 +752,34 @@ static void VideoUpdateOutput(AVRational input_aspect_ratio, int input_width, in
|
|||||||
*output_y = video_y;
|
*output_y = video_y;
|
||||||
*output_width = video_width;
|
*output_width = video_width;
|
||||||
*output_height = video_height;
|
*output_height = video_height;
|
||||||
Debug(3, "video: stretch output %dx%d%+d%+d\n", *output_width, *output_height, *output_x, *output_y);
|
CuvidMessage(2, "video: stretch output %dx%d%+d%+d\n", *output_width, *output_height, *output_x, *output_y);
|
||||||
return;
|
return;
|
||||||
|
|
||||||
center_cut_out:
|
center_cut_out:
|
||||||
*output_x = video_x;
|
*output_x = video_x;
|
||||||
*output_y = video_y;
|
*output_y = video_y;
|
||||||
*output_height = video_height;
|
*output_height = video_height;
|
||||||
|
*output_width = (*crop_width * *output_height * input_aspect_ratio.num) / (input_aspect_ratio.den * *crop_height);
|
||||||
|
if (*output_width > video_width) {
|
||||||
|
// fix height cropping
|
||||||
|
*crop_width = (int)((*crop_width * video_width) / (*output_width * 2.0) + 0.5) * 2;
|
||||||
|
*crop_x = (input_width - *crop_width) / 2;
|
||||||
|
*output_width = video_width;
|
||||||
|
} else if (*output_width < video_width) {
|
||||||
|
// fix width cropping
|
||||||
|
*crop_height = (int)((*crop_height * *output_width) / (video_width * 2.0) + 0.5) * 2;
|
||||||
|
*crop_y = (input_height - *crop_height) / 2;
|
||||||
*output_width = video_width;
|
*output_width = video_width;
|
||||||
|
|
||||||
*crop_width = (video_height * display_aspect_ratio.num + display_aspect_ratio.den - 1) / display_aspect_ratio.den;
|
|
||||||
*crop_height = (video_width * display_aspect_ratio.den + display_aspect_ratio.num - 1) / display_aspect_ratio.num;
|
|
||||||
|
|
||||||
// look which side must be cut
|
|
||||||
if (*crop_width > video_width) {
|
|
||||||
int tmp;
|
|
||||||
|
|
||||||
*crop_height = input_height - VideoCutTopBottom[resolution] * 2;
|
|
||||||
|
|
||||||
// adjust scaling
|
|
||||||
tmp = ((*crop_width - video_width) * input_width) / (2 * video_width);
|
|
||||||
// FIXME: round failure?
|
|
||||||
if (tmp > *crop_x) {
|
|
||||||
*crop_x = tmp;
|
|
||||||
}
|
}
|
||||||
*crop_width = input_width - *crop_x * 2;
|
CuvidMessage(2, "video: aspect crop %dx%d%+d%+d\n", *crop_width, *crop_height, *crop_x, *crop_y);
|
||||||
} else if (*crop_height > video_height) {
|
return;
|
||||||
int tmp;
|
|
||||||
|
|
||||||
*crop_width = input_width - VideoCutLeftRight[resolution] * 2;
|
video_none:
|
||||||
|
*output_height = *crop_height;
|
||||||
// adjust scaling
|
*output_width = (*crop_width * input_aspect_ratio.num) / input_aspect_ratio.den; // normalize pixel aspect ratio
|
||||||
tmp = ((*crop_height - video_height) * input_height)
|
*output_x = video_x + (video_width - *output_width) / 2;
|
||||||
/ (2 * video_height);
|
*output_y = video_y + (video_height - *output_height) / 2;
|
||||||
// FIXME: round failure?
|
CuvidMessage(2, "video: original aspect output %dx%d%+d%+d\n", *output_width, *output_height, *output_x, *output_y);
|
||||||
if (tmp > *crop_y) {
|
|
||||||
*crop_y = tmp;
|
|
||||||
}
|
|
||||||
*crop_height = input_height - *crop_y * 2;
|
|
||||||
} else {
|
|
||||||
*crop_width = input_width - VideoCutLeftRight[resolution] * 2;
|
|
||||||
*crop_height = input_height - VideoCutTopBottom[resolution] * 2;
|
|
||||||
}
|
|
||||||
Debug(3, "video: aspect crop %dx%d%+d%+d\n", *crop_width, *crop_height, *crop_x, *crop_y);
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -873,9 +842,6 @@ static PFNGLXSWAPINTERVALSGIPROC GlxSwapIntervalSGI;
|
|||||||
}\
|
}\
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
///
|
///
|
||||||
/// GLX check if a GLX extension is supported.
|
/// GLX check if a GLX extension is supported.
|
||||||
///
|
///
|
||||||
@ -974,7 +940,6 @@ static void EglInit(void)
|
|||||||
|
|
||||||
XVisualInfo *vi = NULL;
|
XVisualInfo *vi = NULL;
|
||||||
|
|
||||||
|
|
||||||
#ifdef PLACEBO
|
#ifdef PLACEBO
|
||||||
return;
|
return;
|
||||||
#endif
|
#endif
|
||||||
@ -1013,7 +978,7 @@ static void EglInit(void)
|
|||||||
if (!glXQueryVersion(XlibDisplay, &major, &minor)) {
|
if (!glXQueryVersion(XlibDisplay, &major, &minor)) {
|
||||||
Fatal(_("video/glx: no GLX support\n"));
|
Fatal(_("video/glx: no GLX support\n"));
|
||||||
}
|
}
|
||||||
Debug(3,"video/glx: glx version %d.%d\n", major, minor);
|
Debug(3, "video/glx: glx version %d.%d\n", major, minor);
|
||||||
|
|
||||||
//
|
//
|
||||||
// check which extension are supported
|
// check which extension are supported
|
||||||
@ -1159,12 +1124,13 @@ static void EglInit(void)
|
|||||||
EGLContext context;
|
EGLContext context;
|
||||||
|
|
||||||
// create egl context
|
// create egl context
|
||||||
setenv("MESA_GL_VERSION_OVERRIDE","3.3",0);
|
setenv("MESA_GL_VERSION_OVERRIDE", "3.3", 0);
|
||||||
setenv("V3D_DOUBLE_BUFFER","1",0);
|
setenv("V3D_DOUBLE_BUFFER", "1", 0);
|
||||||
make_egl();
|
make_egl();
|
||||||
|
|
||||||
if (!glewdone) {
|
if (!glewdone) {
|
||||||
GLenum err = glewInit();
|
GLenum err = glewInit();
|
||||||
|
|
||||||
glewdone = 1;
|
glewdone = 1;
|
||||||
if (err != GLEW_OK) {
|
if (err != GLEW_OK) {
|
||||||
Debug(3, "Error: %s\n", glewGetErrorString(err));
|
Debug(3, "Error: %s\n", glewGetErrorString(err));
|
||||||
@ -1509,7 +1475,8 @@ int CuvidMessage(int level, const char *format, ...)
|
|||||||
static inline void __checkCudaErrors(CUresult err, const char *file, const int line)
|
static inline void __checkCudaErrors(CUresult err, const char *file, const int line)
|
||||||
{
|
{
|
||||||
if (CUDA_SUCCESS != err) {
|
if (CUDA_SUCCESS != err) {
|
||||||
CuvidMessage(2, "checkCudaErrors() Driver API error = %04d >%s< from file <%s>, line %i.\n", err, getCudaDrvErrorString(err), file, line);
|
CuvidMessage(2, "checkCudaErrors() Driver API error = %04d >%s< from file <%s>, line %i.\n", err,
|
||||||
|
getCudaDrvErrorString(err), file, line);
|
||||||
exit(EXIT_FAILURE);
|
exit(EXIT_FAILURE);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1589,13 +1556,13 @@ static void CuvidDestroySurfaces(CuvidDecoder * decoder)
|
|||||||
checkCudaErrors(cu->cuGraphicsUnregisterResource(decoder->cu_res[i][j]));
|
checkCudaErrors(cu->cuGraphicsUnregisterResource(decoder->cu_res[i][j]));
|
||||||
#endif
|
#endif
|
||||||
#ifdef VAAPI
|
#ifdef VAAPI
|
||||||
if (decoder->images[i*Planes+j]) {
|
if (decoder->images[i * Planes + j]) {
|
||||||
DestroyImageKHR(eglGetCurrentDisplay(), decoder->images[i*Planes+j]);
|
DestroyImageKHR(eglGetCurrentDisplay(), decoder->images[i * Planes + j]);
|
||||||
if (decoder->fds[i*Planes+j])
|
if (decoder->fds[i * Planes + j])
|
||||||
close(decoder->fds[i*Planes+j]);
|
close(decoder->fds[i * Planes + j]);
|
||||||
}
|
}
|
||||||
decoder->fds[i*Planes+j] = 0;
|
decoder->fds[i * Planes + j] = 0;
|
||||||
decoder->images[i*Planes+j] = 0;
|
decoder->images[i * Planes + j] = 0;
|
||||||
#endif
|
#endif
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
@ -1604,7 +1571,7 @@ static void CuvidDestroySurfaces(CuvidDecoder * decoder)
|
|||||||
pl_renderer_destroy(&p->renderer);
|
pl_renderer_destroy(&p->renderer);
|
||||||
p->renderer = pl_renderer_create(p->ctx, p->gpu);
|
p->renderer = pl_renderer_create(p->ctx, p->gpu);
|
||||||
#else
|
#else
|
||||||
glDeleteTextures(CODEC_SURFACES_MAX * 2, (GLuint *) &decoder->gl_textures);
|
glDeleteTextures(CODEC_SURFACES_MAX * 2, (GLuint *) & decoder->gl_textures);
|
||||||
GlxCheck();
|
GlxCheck();
|
||||||
|
|
||||||
if (CuvidDecoderN == 1) { // only wenn last decoder closes
|
if (CuvidDecoderN == 1) { // only wenn last decoder closes
|
||||||
@ -1688,24 +1655,24 @@ static void CuvidReleaseSurface(CuvidDecoder * decoder, int surface)
|
|||||||
}
|
}
|
||||||
#else
|
#else
|
||||||
#ifdef VAAPI
|
#ifdef VAAPI
|
||||||
if (decoder->images[surface*Planes]) {
|
if (decoder->images[surface * Planes]) {
|
||||||
DestroyImageKHR(eglGetCurrentDisplay(), decoder->images[surface*Planes]);
|
DestroyImageKHR(eglGetCurrentDisplay(), decoder->images[surface * Planes]);
|
||||||
DestroyImageKHR(eglGetCurrentDisplay(), decoder->images[surface*Planes+1]);
|
DestroyImageKHR(eglGetCurrentDisplay(), decoder->images[surface * Planes + 1]);
|
||||||
#ifdef RASPI
|
#ifdef RASPI
|
||||||
DestroyImageKHR(eglGetCurrentDisplay(), decoder->images[surface*Planes+2]);
|
DestroyImageKHR(eglGetCurrentDisplay(), decoder->images[surface * Planes + 2]);
|
||||||
#endif
|
#endif
|
||||||
if (decoder->fds[surface*Planes]) {
|
if (decoder->fds[surface * Planes]) {
|
||||||
close(decoder->fds[surface*Planes]);
|
close(decoder->fds[surface * Planes]);
|
||||||
// close(decoder->fds[surface*Planes+1]);
|
// close(decoder->fds[surface*Planes+1]);
|
||||||
#ifdef RASPI
|
#ifdef RASPI
|
||||||
close(decoder->fds[surface*Planes+2]);
|
close(decoder->fds[surface * Planes + 2]);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
decoder->fds[surface*Planes] = 0;
|
decoder->fds[surface * Planes] = 0;
|
||||||
decoder->fds[surface*Planes+1] = 0;
|
decoder->fds[surface * Planes + 1] = 0;
|
||||||
decoder->images[surface*Planes] = 0;
|
decoder->images[surface * Planes] = 0;
|
||||||
decoder->images[surface*Planes+1] = 0;
|
decoder->images[surface * Planes + 1] = 0;
|
||||||
#endif
|
#endif
|
||||||
#endif
|
#endif
|
||||||
for (i = 0; i < decoder->SurfaceUsedN; ++i) {
|
for (i = 0; i < decoder->SurfaceUsedN; ++i) {
|
||||||
@ -1735,9 +1702,10 @@ static void CuvidPrintFrames(const CuvidDecoder * decoder)
|
|||||||
|
|
||||||
int CuvidTestSurfaces()
|
int CuvidTestSurfaces()
|
||||||
{
|
{
|
||||||
int i=0;
|
int i = 0;
|
||||||
|
|
||||||
if (CuvidDecoders[0] != NULL) {
|
if (CuvidDecoders[0] != NULL) {
|
||||||
if (i = atomic_read(&CuvidDecoders[0]->SurfacesFilled) < VIDEO_SURFACES_MAX-1)
|
if (i = atomic_read(&CuvidDecoders[0]->SurfacesFilled) < VIDEO_SURFACES_MAX - 1)
|
||||||
return i;
|
return i;
|
||||||
return 0;
|
return 0;
|
||||||
} else
|
} else
|
||||||
@ -1779,14 +1747,14 @@ const int mpgl_preferred_gl_versions[] = {
|
|||||||
0
|
0
|
||||||
};
|
};
|
||||||
|
|
||||||
static bool create_context_cb(EGLDisplay display, int es_version, EGLContext * out_context, EGLConfig * out_config, int *bpp)
|
static bool create_context_cb(EGLDisplay display, int es_version, EGLContext * out_context, EGLConfig * out_config,
|
||||||
|
int *bpp)
|
||||||
{
|
{
|
||||||
|
|
||||||
EGLenum api;
|
EGLenum api;
|
||||||
EGLint rend, *attribs;
|
EGLint rend, *attribs;
|
||||||
const char *name;
|
const char *name;
|
||||||
|
|
||||||
|
|
||||||
switch (es_version) {
|
switch (es_version) {
|
||||||
case 0:
|
case 0:
|
||||||
api = EGL_OPENGL_API;
|
api = EGL_OPENGL_API;
|
||||||
@ -1831,7 +1799,8 @@ static bool create_context_cb(EGLDisplay display, int es_version, EGLContext * o
|
|||||||
EGL_RENDERABLE_TYPE, rend,
|
EGL_RENDERABLE_TYPE, rend,
|
||||||
EGL_NONE
|
EGL_NONE
|
||||||
};
|
};
|
||||||
EGLint num_configs=0;
|
EGLint num_configs = 0;
|
||||||
|
|
||||||
#ifndef RASPI
|
#ifndef RASPI
|
||||||
attribs = attributes10;
|
attribs = attributes10;
|
||||||
*bpp = 10;
|
*bpp = 10;
|
||||||
@ -1909,6 +1878,7 @@ static bool create_context_cb(EGLDisplay display, int es_version, EGLContext * o
|
|||||||
make_egl()
|
make_egl()
|
||||||
{
|
{
|
||||||
int bpp;
|
int bpp;
|
||||||
|
|
||||||
CreateImageKHR = (void *)eglGetProcAddress("eglCreateImageKHR");
|
CreateImageKHR = (void *)eglGetProcAddress("eglCreateImageKHR");
|
||||||
DestroyImageKHR = (void *)eglGetProcAddress("eglDestroyImageKHR");
|
DestroyImageKHR = (void *)eglGetProcAddress("eglDestroyImageKHR");
|
||||||
EGLImageTargetTexture2DOES = (void *)eglGetProcAddress("glEGLImageTargetTexture2DOES");
|
EGLImageTargetTexture2DOES = (void *)eglGetProcAddress("glEGLImageTargetTexture2DOES");
|
||||||
@ -1933,7 +1903,7 @@ make_egl()
|
|||||||
int vID, n;
|
int vID, n;
|
||||||
|
|
||||||
eglGetConfigAttrib(eglDisplay, eglConfig, EGL_NATIVE_VISUAL_ID, &vID);
|
eglGetConfigAttrib(eglDisplay, eglConfig, EGL_NATIVE_VISUAL_ID, &vID);
|
||||||
Debug(3, "chose visual 0x%x bpp %d\n", vID,bpp);
|
Debug(3, "chose visual 0x%x bpp %d\n", vID, bpp);
|
||||||
#ifdef USE_DRM
|
#ifdef USE_DRM
|
||||||
InitBo(bpp);
|
InitBo(bpp);
|
||||||
#else
|
#else
|
||||||
@ -2439,14 +2409,18 @@ void createTextureDst(CuvidDecoder * decoder, int anz, unsigned int size_x, unsi
|
|||||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||||
#ifdef RASPI
|
#ifdef RASPI
|
||||||
if (PixFmt == AV_PIX_FMT_NV12)
|
if (PixFmt == AV_PIX_FMT_NV12)
|
||||||
glTexImage2D(GL_TEXTURE_2D, 0,GL_R8 ,n==0?size_x:size_x/2, n==0?size_y:size_y/2, 0, GL_RED , GL_UNSIGNED_BYTE , NULL);
|
glTexImage2D(GL_TEXTURE_2D, 0, GL_R8, n == 0 ? size_x : size_x / 2, n == 0 ? size_y : size_y / 2, 0,
|
||||||
|
GL_RED, GL_UNSIGNED_BYTE, NULL);
|
||||||
else
|
else
|
||||||
glTexImage2D(GL_TEXTURE_2D, 0,GL_R16,n==0?size_x:size_x/2, n==0?size_y:size_y/2, 0, GL_RED , GL_UNSIGNED_SHORT, NULL);
|
glTexImage2D(GL_TEXTURE_2D, 0, GL_R16, n == 0 ? size_x : size_x / 2, n == 0 ? size_y : size_y / 2, 0,
|
||||||
|
GL_RED, GL_UNSIGNED_SHORT, NULL);
|
||||||
#else
|
#else
|
||||||
if (PixFmt == AV_PIX_FMT_NV12)
|
if (PixFmt == AV_PIX_FMT_NV12)
|
||||||
glTexImage2D(GL_TEXTURE_2D, 0,n==0?GL_R8 :GL_RG8 ,n==0?size_x:size_x/2, n==0?size_y:size_y/2, 0, n==0?GL_RED:GL_RG , GL_UNSIGNED_BYTE , NULL);
|
glTexImage2D(GL_TEXTURE_2D, 0, n == 0 ? GL_R8 : GL_RG8, n == 0 ? size_x : size_x / 2,
|
||||||
|
n == 0 ? size_y : size_y / 2, 0, n == 0 ? GL_RED : GL_RG, GL_UNSIGNED_BYTE, NULL);
|
||||||
else
|
else
|
||||||
glTexImage2D(GL_TEXTURE_2D, 0,n==0?GL_R16:GL_RG16 ,n==0?size_x:size_x/2, n==0?size_y:size_y/2, 0, n==0?GL_RED:GL_RG , GL_UNSIGNED_SHORT, NULL);
|
glTexImage2D(GL_TEXTURE_2D, 0, n == 0 ? GL_R16 : GL_RG16, n == 0 ? size_x : size_x / 2,
|
||||||
|
n == 0 ? size_y : size_y / 2, 0, n == 0 ? GL_RED : GL_RG, GL_UNSIGNED_SHORT, NULL);
|
||||||
#endif
|
#endif
|
||||||
SDK_CHECK_ERROR_GL();
|
SDK_CHECK_ERROR_GL();
|
||||||
// register this texture with CUDA
|
// register this texture with CUDA
|
||||||
@ -2489,27 +2463,31 @@ void createTextureDst(CuvidDecoder * decoder, int anz, unsigned int size_x, unsi
|
|||||||
desc.layers[n].pitch[plane]); \
|
desc.layers[n].pitch[plane]); \
|
||||||
} while (0)
|
} while (0)
|
||||||
|
|
||||||
void generateVAAPIImage(CuvidDecoder * decoder, VASurfaceID index, const AVFrame * frame, int image_width, int image_height)
|
void generateVAAPIImage(CuvidDecoder * decoder, VASurfaceID index, const AVFrame * frame, int image_width,
|
||||||
|
int image_height)
|
||||||
{
|
{
|
||||||
VAStatus status;
|
VAStatus status;
|
||||||
|
|
||||||
uint64_t first_time;
|
uint64_t first_time;
|
||||||
|
|
||||||
#if defined (VAAPI) && !defined (RASPI)
|
#if defined (VAAPI) && !defined (RASPI)
|
||||||
VADRMPRIMESurfaceDescriptor desc;
|
VADRMPRIMESurfaceDescriptor desc;
|
||||||
|
|
||||||
status =
|
status =
|
||||||
vaExportSurfaceHandle(decoder->VaDisplay, (VASurfaceID)(uintptr_t)frame->data[3], VA_SURFACE_ATTRIB_MEM_TYPE_DRM_PRIME_2,
|
vaExportSurfaceHandle(decoder->VaDisplay, (VASurfaceID) (uintptr_t) frame->data[3],
|
||||||
VA_EXPORT_SURFACE_READ_ONLY | VA_EXPORT_SURFACE_SEPARATE_LAYERS, &desc);
|
VA_SURFACE_ATTRIB_MEM_TYPE_DRM_PRIME_2, VA_EXPORT_SURFACE_READ_ONLY | VA_EXPORT_SURFACE_SEPARATE_LAYERS,
|
||||||
|
&desc);
|
||||||
|
|
||||||
if (status != VA_STATUS_SUCCESS) {
|
if (status != VA_STATUS_SUCCESS) {
|
||||||
printf("Fehler beim export VAAPI Handle\n");
|
printf("Fehler beim export VAAPI Handle\n");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
vaSyncSurface(decoder->VaDisplay, (VASurfaceID)(uintptr_t)frame->data[3]);
|
vaSyncSurface(decoder->VaDisplay, (VASurfaceID) (uintptr_t) frame->data[3]);
|
||||||
#endif
|
#endif
|
||||||
#ifdef RASPI
|
#ifdef RASPI
|
||||||
AVDRMFrameDescriptor desc;
|
AVDRMFrameDescriptor desc;
|
||||||
memcpy(&desc,frame->data[0],sizeof(desc));
|
|
||||||
|
memcpy(&desc, frame->data[0], sizeof(desc));
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
@ -2520,26 +2498,27 @@ void generateVAAPIImage(CuvidDecoder * decoder, VASurfaceID index, const AVFrame
|
|||||||
int attribs[20] = { EGL_NONE };
|
int attribs[20] = { EGL_NONE };
|
||||||
uint num_attribs = 0;
|
uint num_attribs = 0;
|
||||||
int fd;
|
int fd;
|
||||||
|
|
||||||
#if defined (VAAPI) && !defined (RASPI)
|
#if defined (VAAPI) && !defined (RASPI)
|
||||||
ADD_ATTRIB(EGL_LINUX_DRM_FOURCC_EXT, desc.layers[n].drm_format);
|
ADD_ATTRIB(EGL_LINUX_DRM_FOURCC_EXT, desc.layers[n].drm_format);
|
||||||
ADD_ATTRIB(EGL_WIDTH, n==0?image_width:image_width/2);
|
ADD_ATTRIB(EGL_WIDTH, n == 0 ? image_width : image_width / 2);
|
||||||
ADD_ATTRIB(EGL_HEIGHT, n==0?image_height:image_height/2);
|
ADD_ATTRIB(EGL_HEIGHT, n == 0 ? image_height : image_height / 2);
|
||||||
ADD_PLANE_ATTRIBS(0);
|
ADD_PLANE_ATTRIBS(0);
|
||||||
#endif
|
#endif
|
||||||
#ifdef RASPI
|
#ifdef RASPI
|
||||||
ADD_ATTRIB(EGL_LINUX_DRM_FOURCC_EXT, DRM_FORMAT_R8);
|
ADD_ATTRIB(EGL_LINUX_DRM_FOURCC_EXT, DRM_FORMAT_R8);
|
||||||
ADD_ATTRIB(EGL_WIDTH, n==0?image_width:image_width/2);
|
ADD_ATTRIB(EGL_WIDTH, n == 0 ? image_width : image_width / 2);
|
||||||
ADD_ATTRIB(EGL_HEIGHT, n==0?image_height:image_height/2);
|
ADD_ATTRIB(EGL_HEIGHT, n == 0 ? image_height : image_height / 2);
|
||||||
if (n==0) {
|
if (n == 0) {
|
||||||
fd = dup(desc.objects[0].fd);
|
fd = dup(desc.objects[0].fd);
|
||||||
ADD_ATTRIB( EGL_DMA_BUF_PLANE0_FD_EXT,fd);
|
ADD_ATTRIB(EGL_DMA_BUF_PLANE0_FD_EXT, fd);
|
||||||
ADD_ATTRIB( EGL_DMA_BUF_PLANE0_OFFSET_EXT,desc.layers[0].planes[n].offset);
|
ADD_ATTRIB(EGL_DMA_BUF_PLANE0_OFFSET_EXT, desc.layers[0].planes[n].offset);
|
||||||
ADD_ATTRIB( EGL_DMA_BUF_PLANE0_PITCH_EXT,desc.layers[0].planes[n].pitch);
|
ADD_ATTRIB(EGL_DMA_BUF_PLANE0_PITCH_EXT, desc.layers[0].planes[n].pitch);
|
||||||
} else {
|
} else {
|
||||||
fd = dup(desc.objects[0].fd);
|
fd = dup(desc.objects[0].fd);
|
||||||
ADD_ATTRIB( EGL_DMA_BUF_PLANE0_FD_EXT,fd);
|
ADD_ATTRIB(EGL_DMA_BUF_PLANE0_FD_EXT, fd);
|
||||||
ADD_ATTRIB( EGL_DMA_BUF_PLANE0_OFFSET_EXT,desc.layers[0].planes[n].offset);
|
ADD_ATTRIB(EGL_DMA_BUF_PLANE0_OFFSET_EXT, desc.layers[0].planes[n].offset);
|
||||||
ADD_ATTRIB( EGL_DMA_BUF_PLANE0_PITCH_EXT,desc.layers[0].planes[n].pitch);
|
ADD_ATTRIB(EGL_DMA_BUF_PLANE0_PITCH_EXT, desc.layers[0].planes[n].pitch);
|
||||||
}
|
}
|
||||||
// Debug(3,"n %d fd %d nb_planes %d nb_layers %d plane %d offeset %d offset2 %d pitch %d \n",n, fd,
|
// Debug(3,"n %d fd %d nb_planes %d nb_layers %d plane %d offeset %d offset2 %d pitch %d \n",n, fd,
|
||||||
// desc.layers[0].nb_planes,desc.nb_layers,n,desc.layers[0].planes[n].offset,desc.layers[0].planes[n+1].offset,desc.layers[0].planes[n].pitch);
|
// desc.layers[0].nb_planes,desc.nb_layers,n,desc.layers[0].planes[n].offset,desc.layers[0].planes[n+1].offset,desc.layers[0].planes[n].pitch);
|
||||||
@ -2554,10 +2533,10 @@ void generateVAAPIImage(CuvidDecoder * decoder, VASurfaceID index, const AVFrame
|
|||||||
glBindTexture(GL_TEXTURE_2D, decoder->gl_textures[index * Planes + n]);
|
glBindTexture(GL_TEXTURE_2D, decoder->gl_textures[index * Planes + n]);
|
||||||
EGLImageTargetTexture2DOES(GL_TEXTURE_2D, decoder->images[index * Planes + n]);
|
EGLImageTargetTexture2DOES(GL_TEXTURE_2D, decoder->images[index * Planes + n]);
|
||||||
#ifdef RASPI
|
#ifdef RASPI
|
||||||
decoder->fds[index*Planes+n] = fd;
|
decoder->fds[index * Planes + n] = fd;
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
decoder->fds[index*Planes] = desc.objects[0].fd;
|
decoder->fds[index * Planes] = desc.objects[0].fd;
|
||||||
glBindTexture(GL_TEXTURE_2D, 0);
|
glBindTexture(GL_TEXTURE_2D, 0);
|
||||||
eglMakeCurrent(eglDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT);
|
eglMakeCurrent(eglDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT);
|
||||||
EglCheck();
|
EglCheck();
|
||||||
@ -2610,7 +2589,6 @@ static unsigned CuvidGetVideoSurface(CuvidDecoder * decoder, const AVCodecContex
|
|||||||
#if defined (VAAPI) || defined (YADIF)
|
#if defined (VAAPI) || defined (YADIF)
|
||||||
static void CuvidSyncRenderFrame(CuvidDecoder * decoder, const AVCodecContext * video_ctx, AVFrame * frame);
|
static void CuvidSyncRenderFrame(CuvidDecoder * decoder, const AVCodecContext * video_ctx, AVFrame * frame);
|
||||||
|
|
||||||
|
|
||||||
int push_filters(AVCodecContext * dec_ctx, CuvidDecoder * decoder, AVFrame * frame)
|
int push_filters(AVCodecContext * dec_ctx, CuvidDecoder * decoder, AVFrame * frame)
|
||||||
{
|
{
|
||||||
|
|
||||||
@ -2622,9 +2600,9 @@ int push_filters(AVCodecContext * dec_ctx, CuvidDecoder * decoder, AVFrame * fra
|
|||||||
av_log(NULL, AV_LOG_ERROR, "Error while feeding the filtergraph\n");
|
av_log(NULL, AV_LOG_ERROR, "Error while feeding the filtergraph\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
// printf("Interlaced %d tff %d\n",frame->interlaced_frame,frame->top_field_first);
|
// printf("Interlaced %d tff %d\n",frame->interlaced_frame,frame->top_field_first);
|
||||||
/* pull filtered frames from the filtergraph */
|
/* pull filtered frames from the filtergraph */
|
||||||
while ((ret = av_buffersink_get_frame(decoder->buffersink_ctx, filt_frame)) >= 0 ) {
|
while ((ret = av_buffersink_get_frame(decoder->buffersink_ctx, filt_frame)) >= 0) {
|
||||||
filt_frame->pts /= 2;
|
filt_frame->pts /= 2;
|
||||||
decoder->Interlaced = 0;
|
decoder->Interlaced = 0;
|
||||||
// printf("vaapideint video:new %#012" PRIx64 " old %#012" PRIx64 "\n",filt_frame->pts,frame->pts);
|
// printf("vaapideint video:new %#012" PRIx64 " old %#012" PRIx64 "\n",filt_frame->pts,frame->pts);
|
||||||
@ -2642,7 +2620,7 @@ int init_filters(AVCodecContext * dec_ctx, CuvidDecoder * decoder, AVFrame * fra
|
|||||||
enum AVPixelFormat format = PIXEL_FORMAT;
|
enum AVPixelFormat format = PIXEL_FORMAT;
|
||||||
|
|
||||||
#ifdef VAAPI
|
#ifdef VAAPI
|
||||||
const char *filters_descr = "deinterlace_vaapi=rate=field:auto=1"; //
|
const char *filters_descr = "deinterlace_vaapi=rate=field:auto=1";
|
||||||
#endif
|
#endif
|
||||||
#ifdef YADIF
|
#ifdef YADIF
|
||||||
const char *filters_descr = "yadif_cuda=1:0:1"; // mode=send_field,parity=tff,deint=interlaced";
|
const char *filters_descr = "yadif_cuda=1:0:1"; // mode=send_field,parity=tff,deint=interlaced";
|
||||||
@ -2751,7 +2729,7 @@ int init_filters(AVCodecContext * dec_ctx, CuvidDecoder * decoder, AVFrame * fra
|
|||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifdef VAAPI
|
#ifdef VAAPI
|
||||||
static int init_generic_hwaccel(CuvidDecoder * decoder, enum AVPixelFormat hw_fmt,AVCodecContext * video_ctx)
|
static int init_generic_hwaccel(CuvidDecoder * decoder, enum AVPixelFormat hw_fmt, AVCodecContext * video_ctx)
|
||||||
{
|
{
|
||||||
|
|
||||||
AVBufferRef *new_frames_ctx = NULL;
|
AVBufferRef *new_frames_ctx = NULL;
|
||||||
@ -2761,9 +2739,7 @@ static int init_generic_hwaccel(CuvidDecoder * decoder, enum AVPixelFormat hw_fm
|
|||||||
goto error;
|
goto error;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (avcodec_get_hw_frames_parameters(video_ctx,
|
if (avcodec_get_hw_frames_parameters(video_ctx, hw_device_ctx, hw_fmt, &new_frames_ctx) < 0) {
|
||||||
hw_device_ctx, hw_fmt, &new_frames_ctx) < 0)
|
|
||||||
{
|
|
||||||
Debug(3, "Hardware decoding of this stream is unsupported?\n");
|
Debug(3, "Hardware decoding of this stream is unsupported?\n");
|
||||||
goto error;
|
goto error;
|
||||||
}
|
}
|
||||||
@ -2773,17 +2749,14 @@ static int init_generic_hwaccel(CuvidDecoder * decoder, enum AVPixelFormat hw_fm
|
|||||||
// We might be able to reuse a previously allocated frame pool.
|
// We might be able to reuse a previously allocated frame pool.
|
||||||
if (decoder->cached_hw_frames_ctx) {
|
if (decoder->cached_hw_frames_ctx) {
|
||||||
AVHWFramesContext *old_fctx = (void *)decoder->cached_hw_frames_ctx->data;
|
AVHWFramesContext *old_fctx = (void *)decoder->cached_hw_frames_ctx->data;
|
||||||
Debug(3,"CMP %d:%d %d:%d %d:%d %d:%d %d:%d\,",new_fctx->format, old_fctx->format,
|
|
||||||
new_fctx->sw_format,old_fctx->sw_format ,
|
Debug(3, "CMP %d:%d %d:%d %d:%d %d:%d %d:%d\,", new_fctx->format, old_fctx->format, new_fctx->sw_format,
|
||||||
new_fctx->width, old_fctx->width ,
|
old_fctx->sw_format, new_fctx->width, old_fctx->width, new_fctx->height, old_fctx->height,
|
||||||
new_fctx->height, old_fctx->height ,
|
|
||||||
new_fctx->initial_pool_size, old_fctx->initial_pool_size);
|
new_fctx->initial_pool_size, old_fctx->initial_pool_size);
|
||||||
if (new_fctx->format != old_fctx->format ||
|
if (new_fctx->format != old_fctx->format || new_fctx->sw_format != old_fctx->sw_format
|
||||||
new_fctx->sw_format != old_fctx->sw_format ||
|
|| new_fctx->width != old_fctx->width || new_fctx->height != old_fctx->height
|
||||||
new_fctx->width != old_fctx->width ||
|
|| new_fctx->initial_pool_size != old_fctx->initial_pool_size) {
|
||||||
new_fctx->height != old_fctx->height ||
|
Debug(3, "delete old cache");
|
||||||
new_fctx->initial_pool_size != old_fctx->initial_pool_size) {
|
|
||||||
Debug(3,"delete old cache");
|
|
||||||
if (decoder->filter_graph)
|
if (decoder->filter_graph)
|
||||||
avfilter_graph_free(&decoder->filter_graph);
|
avfilter_graph_free(&decoder->filter_graph);
|
||||||
av_buffer_unref(&decoder->cached_hw_frames_ctx);
|
av_buffer_unref(&decoder->cached_hw_frames_ctx);
|
||||||
@ -2808,8 +2781,8 @@ static int init_generic_hwaccel(CuvidDecoder * decoder, enum AVPixelFormat hw_fm
|
|||||||
av_buffer_unref(&new_frames_ctx);
|
av_buffer_unref(&new_frames_ctx);
|
||||||
return 0;
|
return 0;
|
||||||
|
|
||||||
error:
|
error:
|
||||||
Debug(3,"Error with hwframes\n");
|
Debug(3, "Error with hwframes\n");
|
||||||
av_buffer_unref(&new_frames_ctx);
|
av_buffer_unref(&new_frames_ctx);
|
||||||
av_buffer_unref(&decoder->cached_hw_frames_ctx);
|
av_buffer_unref(&decoder->cached_hw_frames_ctx);
|
||||||
return -1;
|
return -1;
|
||||||
@ -2867,9 +2840,9 @@ static enum AVPixelFormat Cuvid_get_format(CuvidDecoder * decoder, AVCodecContex
|
|||||||
Fatal(_("video: no valid profile found\n"));
|
Fatal(_("video: no valid profile found\n"));
|
||||||
}
|
}
|
||||||
|
|
||||||
// decoder->newchannel = 1;
|
// decoder->newchannel = 1;
|
||||||
#ifdef VAAPI
|
#ifdef VAAPI
|
||||||
init_generic_hwaccel(decoder, PIXEL_FORMAT,video_ctx);
|
init_generic_hwaccel(decoder, PIXEL_FORMAT, video_ctx);
|
||||||
#endif
|
#endif
|
||||||
if (ist->GetFormatDone) {
|
if (ist->GetFormatDone) {
|
||||||
return PIXEL_FORMAT;
|
return PIXEL_FORMAT;
|
||||||
@ -2892,8 +2865,8 @@ static enum AVPixelFormat Cuvid_get_format(CuvidDecoder * decoder, AVCodecContex
|
|||||||
ist->hwaccel_output_format = AV_PIX_FMT_NV12;
|
ist->hwaccel_output_format = AV_PIX_FMT_NV12;
|
||||||
}
|
}
|
||||||
|
|
||||||
if ((video_ctx->width != decoder->InputWidth
|
if ((video_ctx->width != decoder->InputWidth || video_ctx->height != decoder->InputHeight)
|
||||||
|| video_ctx->height != decoder->InputHeight) && decoder->TrickSpeed == 0) {
|
&& decoder->TrickSpeed == 0) {
|
||||||
|
|
||||||
// if (decoder->TrickSpeed == 0) {
|
// if (decoder->TrickSpeed == 0) {
|
||||||
#ifdef PLACEBO
|
#ifdef PLACEBO
|
||||||
@ -3383,7 +3356,8 @@ static void CuvidRenderFrame(CuvidDecoder * decoder, const AVCodecContext * vide
|
|||||||
video_ctx->width != decoder->InputWidth
|
video_ctx->width != decoder->InputWidth
|
||||||
// || decoder->ColorSpace != color
|
// || decoder->ColorSpace != color
|
||||||
|| video_ctx->height != decoder->InputHeight) {
|
|| video_ctx->height != decoder->InputHeight) {
|
||||||
Debug(3,"fmt %02d:%02d width %d:%d hight %d:%d\n",decoder->ColorSpace,frame->colorspace ,video_ctx->width, decoder->InputWidth,video_ctx->height, decoder->InputHeight);
|
Debug(3, "fmt %02d:%02d width %d:%d hight %d:%d\n", decoder->ColorSpace, frame->colorspace, video_ctx->width,
|
||||||
|
decoder->InputWidth, video_ctx->height, decoder->InputHeight);
|
||||||
decoder->PixFmt = AV_PIX_FMT_NV12;
|
decoder->PixFmt = AV_PIX_FMT_NV12;
|
||||||
decoder->InputWidth = video_ctx->width;
|
decoder->InputWidth = video_ctx->width;
|
||||||
decoder->InputHeight = video_ctx->height;
|
decoder->InputHeight = video_ctx->height;
|
||||||
@ -3465,7 +3439,7 @@ Debug(3,"fmt %02d:%02d width %d:%d hight %d:%d\n",decoder->ColorSpace,frame->co
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Debug(3,"video/cuvid: pixel format %d not supported\n", video_ctx->pix_fmt);
|
// Debug(3,"video/cuvid: pixel format %d not supported\n", video_ctx->pix_fmt);
|
||||||
av_frame_free(&frame);
|
av_frame_free(&frame);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@ -3477,7 +3451,7 @@ Debug(3,"fmt %02d:%02d width %d:%d hight %d:%d\n",decoder->ColorSpace,frame->co
|
|||||||
///
|
///
|
||||||
static void *CuvidGetHwAccelContext(CuvidDecoder * decoder)
|
static void *CuvidGetHwAccelContext(CuvidDecoder * decoder)
|
||||||
{
|
{
|
||||||
unsigned int version,ret;
|
unsigned int version, ret;
|
||||||
|
|
||||||
Debug(3, "Initializing cuvid hwaccel thread ID:%ld\n", (long int)syscall(186));
|
Debug(3, "Initializing cuvid hwaccel thread ID:%ld\n", (long int)syscall(186));
|
||||||
// turn NULL;
|
// turn NULL;
|
||||||
@ -3629,13 +3603,13 @@ static void CuvidMixVideo(CuvidDecoder * decoder, __attribute__((unused))
|
|||||||
#ifdef USE_DRM
|
#ifdef USE_DRM
|
||||||
if (!decoder->Closing) {
|
if (!decoder->Closing) {
|
||||||
frame = decoder->frames[current];
|
frame = decoder->frames[current];
|
||||||
AVFrameSideData *sd1 = av_frame_get_side_data (frame, AV_FRAME_DATA_MASTERING_DISPLAY_METADATA);
|
AVFrameSideData *sd1 = av_frame_get_side_data(frame, AV_FRAME_DATA_MASTERING_DISPLAY_METADATA);
|
||||||
AVFrameSideData *sd2 = av_frame_get_side_data (frame, AV_FRAME_DATA_CONTENT_LIGHT_LEVEL);
|
AVFrameSideData *sd2 = av_frame_get_side_data(frame, AV_FRAME_DATA_CONTENT_LIGHT_LEVEL);
|
||||||
set_hdr_metadata(frame->color_primaries,frame->color_trc,sd1,sd2);
|
|
||||||
|
set_hdr_metadata(frame->color_primaries, frame->color_trc, sd1, sd2);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
|
||||||
// Render Progressive frame
|
// Render Progressive frame
|
||||||
#ifndef PLACEBO
|
#ifndef PLACEBO
|
||||||
y = VideoWindowHeight - decoder->OutputY - decoder->OutputHeight;
|
y = VideoWindowHeight - decoder->OutputY - decoder->OutputHeight;
|
||||||
@ -3809,7 +3783,6 @@ static void CuvidMixVideo(CuvidDecoder * decoder, __attribute__((unused))
|
|||||||
|
|
||||||
decoder->newchannel = 0;
|
decoder->newchannel = 0;
|
||||||
|
|
||||||
|
|
||||||
if (!pl_render_image(p->renderer, &decoder->pl_images[current], target, &render_params)) {
|
if (!pl_render_image(p->renderer, &decoder->pl_images[current], target, &render_params)) {
|
||||||
Debug(3, "Failed rendering frame!\n");
|
Debug(3, "Failed rendering frame!\n");
|
||||||
}
|
}
|
||||||
@ -3849,7 +3822,6 @@ void make_osd_overlay(int x, int y, int width, int height)
|
|||||||
const struct pl_fmt *fmt;
|
const struct pl_fmt *fmt;
|
||||||
struct pl_overlay *pl;
|
struct pl_overlay *pl;
|
||||||
|
|
||||||
|
|
||||||
int offset = VideoWindowHeight - (VideoWindowHeight - height - y) - (VideoWindowHeight - y);
|
int offset = VideoWindowHeight - (VideoWindowHeight - height - y) - (VideoWindowHeight - y);
|
||||||
|
|
||||||
fmt = pl_find_named_fmt(p->gpu, "rgba8"); // 8 Bit RGB
|
fmt = pl_find_named_fmt(p->gpu, "rgba8"); // 8 Bit RGB
|
||||||
@ -4275,7 +4247,8 @@ static void CuvidSetTrickSpeed(CuvidDecoder * decoder, int speed)
|
|||||||
/// @param[out] dropped dropped frames
|
/// @param[out] dropped dropped frames
|
||||||
/// @param[out] count number of decoded frames
|
/// @param[out] count number of decoded frames
|
||||||
///
|
///
|
||||||
void CuvidGetStats(CuvidDecoder * decoder, int *missed, int *duped, int *dropped, int *counter, float *frametime, int *width, int *height, int *color, int * eotf)
|
void CuvidGetStats(CuvidDecoder * decoder, int *missed, int *duped, int *dropped, int *counter, float *frametime,
|
||||||
|
int *width, int *height, int *color, int *eotf)
|
||||||
{
|
{
|
||||||
*missed = decoder->FramesMissed;
|
*missed = decoder->FramesMissed;
|
||||||
*duped = decoder->FramesDuped;
|
*duped = decoder->FramesDuped;
|
||||||
@ -4307,13 +4280,12 @@ static void CuvidSyncDecoder(CuvidDecoder * decoder)
|
|||||||
int64_t audio_clock;
|
int64_t audio_clock;
|
||||||
int64_t video_clock;
|
int64_t video_clock;
|
||||||
int err = 0;
|
int err = 0;
|
||||||
static int speedup=3;
|
static int speedup = 3;
|
||||||
|
|
||||||
#ifdef GAMMA
|
#ifdef GAMMA
|
||||||
Get_Gamma();
|
Get_Gamma();
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
|
||||||
video_clock = CuvidGetClock(decoder);
|
video_clock = CuvidGetClock(decoder);
|
||||||
|
|
||||||
filled = atomic_read(&decoder->SurfacesFilled);
|
filled = atomic_read(&decoder->SurfacesFilled);
|
||||||
@ -4373,7 +4345,7 @@ static void CuvidSyncDecoder(CuvidDecoder * decoder)
|
|||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
if (abs(diff) > 5000 * 90) { // more than 5s
|
if (abs(diff) > 5000 * 90) { // more than 5s
|
||||||
err = CuvidMessage(2, "video: audio/video difference too big %d\n",diff/90);
|
err = CuvidMessage(2, "video: audio/video difference too big %d\n", diff / 90);
|
||||||
// decoder->SyncCounter = 1;
|
// decoder->SyncCounter = 1;
|
||||||
// usleep(10);
|
// usleep(10);
|
||||||
goto skip_sync;
|
goto skip_sync;
|
||||||
@ -4403,8 +4375,7 @@ static void CuvidSyncDecoder(CuvidDecoder * decoder)
|
|||||||
AudioDelayms(abs(diff / 90));
|
AudioDelayms(abs(diff / 90));
|
||||||
}
|
}
|
||||||
decoder->SyncCounter = 1;
|
decoder->SyncCounter = 1;
|
||||||
}
|
} else {
|
||||||
else {
|
|
||||||
speedup = 2;
|
speedup = 2;
|
||||||
}
|
}
|
||||||
#if defined(DEBUG) || defined(AV_INFO)
|
#if defined(DEBUG) || defined(AV_INFO)
|
||||||
@ -4575,7 +4546,6 @@ static void CuvidDisplayHandlerThread(void)
|
|||||||
allfull = 1;
|
allfull = 1;
|
||||||
decoded = 0;
|
decoded = 0;
|
||||||
|
|
||||||
|
|
||||||
for (i = 0; i < CuvidDecoderN; ++i) {
|
for (i = 0; i < CuvidDecoderN; ++i) {
|
||||||
|
|
||||||
decoder = CuvidDecoders[i];
|
decoder = CuvidDecoders[i];
|
||||||
@ -4669,14 +4639,14 @@ static const VideoModule CuvidModule = {
|
|||||||
.RenderFrame = (void (*const) (VideoHwDecoder *,
|
.RenderFrame = (void (*const) (VideoHwDecoder *,
|
||||||
const AVCodecContext *, const AVFrame *))CuvidSyncRenderFrame,
|
const AVCodecContext *, const AVFrame *))CuvidSyncRenderFrame,
|
||||||
.GetHwAccelContext = (void *(*const)(VideoHwDecoder *))CuvidGetHwAccelContext,
|
.GetHwAccelContext = (void *(*const)(VideoHwDecoder *))CuvidGetHwAccelContext,
|
||||||
.SetClock = (void (*const)(VideoHwDecoder *, int64_t))CuvidSetClock,
|
.SetClock = (void(*const)(VideoHwDecoder *, int64_t))CuvidSetClock,
|
||||||
.GetClock = (int64_t(*const)(const VideoHwDecoder *))CuvidGetClock,
|
.GetClock = (int64_t(*const)(const VideoHwDecoder *))CuvidGetClock,
|
||||||
.SetClosing = (void (*const)(const VideoHwDecoder *))CuvidSetClosing,
|
.SetClosing = (void(*const)(const VideoHwDecoder *))CuvidSetClosing,
|
||||||
.ResetStart = (void (*const)(const VideoHwDecoder *))CuvidResetStart,
|
.ResetStart = (void(*const)(const VideoHwDecoder *))CuvidResetStart,
|
||||||
.SetTrickSpeed = (void (*const)(const VideoHwDecoder *, int))CuvidSetTrickSpeed,
|
.SetTrickSpeed = (void(*const)(const VideoHwDecoder *, int))CuvidSetTrickSpeed,
|
||||||
.GrabOutput = CuvidGrabOutputSurface,
|
.GrabOutput = CuvidGrabOutputSurface,
|
||||||
.GetStats = (void (*const)(VideoHwDecoder *, int *, int *, int *,
|
.GetStats = (void(*const)(VideoHwDecoder *, int *, int *, int *,
|
||||||
int *, float *, int *, int *, int * , int *))CuvidGetStats,
|
int *, float *, int *, int *, int *, int *))CuvidGetStats,
|
||||||
.SetBackground = CuvidSetBackground,
|
.SetBackground = CuvidSetBackground,
|
||||||
.SetVideoMode = CuvidSetVideoMode,
|
.SetVideoMode = CuvidSetVideoMode,
|
||||||
|
|
||||||
@ -4837,14 +4807,14 @@ static const VideoModule NoopModule = {
|
|||||||
const AVCodecContext *, const AVFrame *))NoopSyncRenderFrame,
|
const AVCodecContext *, const AVFrame *))NoopSyncRenderFrame,
|
||||||
.GetHwAccelContext = (void *(*const)(VideoHwDecoder *))
|
.GetHwAccelContext = (void *(*const)(VideoHwDecoder *))
|
||||||
DummyGetHwAccelContext,
|
DummyGetHwAccelContext,
|
||||||
.SetClock = (void (*const)(VideoHwDecoder *, int64_t))NoopSetClock,
|
.SetClock = (void(*const)(VideoHwDecoder *, int64_t))NoopSetClock,
|
||||||
.GetClock = (int64_t(*const)(const VideoHwDecoder *))NoopGetClock,
|
.GetClock = (int64_t(*const)(const VideoHwDecoder *))NoopGetClock,
|
||||||
.SetClosing = (void (*const)(const VideoHwDecoder *))NoopSetClosing,
|
.SetClosing = (void(*const)(const VideoHwDecoder *))NoopSetClosing,
|
||||||
.ResetStart = (void (*const)(const VideoHwDecoder *))NoopResetStart,
|
.ResetStart = (void(*const)(const VideoHwDecoder *))NoopResetStart,
|
||||||
.SetTrickSpeed =(void (*const)(const VideoHwDecoder *, int))NoopSetTrickSpeed,
|
.SetTrickSpeed = (void(*const)(const VideoHwDecoder *, int))NoopSetTrickSpeed,
|
||||||
.GrabOutput = NoopGrabOutputSurface,
|
.GrabOutput = NoopGrabOutputSurface,
|
||||||
.GetStats = (void (*const)(VideoHwDecoder *, int *, int *, int *,
|
.GetStats = (void(*const)(VideoHwDecoder *, int *, int *, int *,
|
||||||
int *, float *, int *, int *, int * , int *))NoopGetStats,
|
int *, float *, int *, int *, int *, int *))NoopGetStats,
|
||||||
#endif
|
#endif
|
||||||
.SetBackground = NoopSetBackground,
|
.SetBackground = NoopSetBackground,
|
||||||
.SetVideoMode = NoopVoid,
|
.SetVideoMode = NoopVoid,
|
||||||
@ -4999,7 +4969,7 @@ void VideoOsdInit(void)
|
|||||||
|
|
||||||
if (posd)
|
if (posd)
|
||||||
free(posd);
|
free(posd);
|
||||||
posd = (unsigned char *)calloc( (OsdWidth+1) * (OsdHeight+1) * 4, 1 );
|
posd = (unsigned char *)calloc((OsdWidth + 1) * (OsdHeight + 1) * 4, 1);
|
||||||
VideoOsdClear();
|
VideoOsdClear();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -5199,8 +5169,6 @@ void pl_log_intern(void *stream, enum pl_log_level level, const char *msg)
|
|||||||
printf("%5s: %s\n", prefix[level], msg);
|
printf("%5s: %s\n", prefix[level], msg);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
void InitPlacebo()
|
void InitPlacebo()
|
||||||
{
|
{
|
||||||
|
|
||||||
@ -5211,7 +5179,7 @@ void InitPlacebo()
|
|||||||
char xcbext[] = { "VK_KHR_xcb_surface" };
|
char xcbext[] = { "VK_KHR_xcb_surface" };
|
||||||
char surfext[] = { "VK_KHR_surface" };
|
char surfext[] = { "VK_KHR_surface" };
|
||||||
|
|
||||||
Debug(3, "Init Placebo mit API %d\n",PL_API_VER);
|
Debug(3, "Init Placebo mit API %d\n", PL_API_VER);
|
||||||
|
|
||||||
p = calloc(1, sizeof(struct priv));
|
p = calloc(1, sizeof(struct priv));
|
||||||
if (!p)
|
if (!p)
|
||||||
@ -5300,8 +5268,9 @@ void InitPlacebo()
|
|||||||
/// Video render thread.
|
/// Video render thread.
|
||||||
///
|
///
|
||||||
|
|
||||||
void delete_decode() {
|
void delete_decode()
|
||||||
Debug(3,"decoder thread exit\n");
|
{
|
||||||
|
Debug(3, "decoder thread exit\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
static void *VideoDisplayHandlerThread(void *dummy)
|
static void *VideoDisplayHandlerThread(void *dummy)
|
||||||
@ -5364,7 +5333,7 @@ void exit_display()
|
|||||||
eglThreadContext = NULL;
|
eglThreadContext = NULL;
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
Debug(3,"display thread exit\n");
|
Debug(3, "display thread exit\n");
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -5379,7 +5348,7 @@ static void *VideoHandlerThread(void *dummy)
|
|||||||
|
|
||||||
#ifdef GAMMA
|
#ifdef GAMMA
|
||||||
Init_Gamma();
|
Init_Gamma();
|
||||||
Set_Gamma(0.0,6500);
|
Set_Gamma(0.0, 6500);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifdef PLACEBO
|
#ifdef PLACEBO
|
||||||
@ -5455,20 +5424,20 @@ static void VideoThreadExit(void)
|
|||||||
|
|
||||||
// FIXME: can't cancel locked
|
// FIXME: can't cancel locked
|
||||||
if (pthread_cancel(VideoThread)) {
|
if (pthread_cancel(VideoThread)) {
|
||||||
Debug(3,"video: can't queue cancel video display thread\n");
|
Debug(3, "video: can't queue cancel video display thread\n");
|
||||||
}
|
}
|
||||||
usleep(200000); // 200ms
|
usleep(200000); // 200ms
|
||||||
if (pthread_join(VideoThread, &retval) || retval != PTHREAD_CANCELED) {
|
if (pthread_join(VideoThread, &retval) || retval != PTHREAD_CANCELED) {
|
||||||
Debug(3,"video: can't cancel video decoder thread\n");
|
Debug(3, "video: can't cancel video decoder thread\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (VideoDisplayThread) {
|
if (VideoDisplayThread) {
|
||||||
if (pthread_cancel(VideoDisplayThread)) {
|
if (pthread_cancel(VideoDisplayThread)) {
|
||||||
Debug(3,"video: can't queue cancel video display thread\n");
|
Debug(3, "video: can't queue cancel video display thread\n");
|
||||||
}
|
}
|
||||||
usleep(200000); // 200ms
|
usleep(200000); // 200ms
|
||||||
if (pthread_join(VideoDisplayThread, &retval) || retval != PTHREAD_CANCELED) {
|
if (pthread_join(VideoDisplayThread, &retval) || retval != PTHREAD_CANCELED) {
|
||||||
Debug(3,"video: can't cancel video display thread\n");
|
Debug(3, "video: can't cancel video display thread\n");
|
||||||
}
|
}
|
||||||
VideoDisplayThread = 0;
|
VideoDisplayThread = 0;
|
||||||
}
|
}
|
||||||
@ -5885,9 +5854,10 @@ uint8_t *VideoGrabService(int *size, int *width, int *height)
|
|||||||
/// @param[out] dropped dropped frames
|
/// @param[out] dropped dropped frames
|
||||||
/// @param[out] count number of decoded frames
|
/// @param[out] count number of decoded frames
|
||||||
///
|
///
|
||||||
void VideoGetStats(VideoHwDecoder * hw_decoder, int *missed, int *duped, int *dropped, int *counter, float *frametime, int *width, int *height, int *color, int *eotf)
|
void VideoGetStats(VideoHwDecoder * hw_decoder, int *missed, int *duped, int *dropped, int *counter, float *frametime,
|
||||||
|
int *width, int *height, int *color, int *eotf)
|
||||||
{
|
{
|
||||||
VideoUsedModule->GetStats(hw_decoder, missed, duped, dropped, counter, frametime, width, height , color, eotf);
|
VideoUsedModule->GetStats(hw_decoder, missed, duped, dropped, counter, frametime, width, height, color, eotf);
|
||||||
}
|
}
|
||||||
|
|
||||||
///
|
///
|
||||||
@ -6163,8 +6133,7 @@ void VideoSetDevice(const char *device)
|
|||||||
VideoDriverName = device;
|
VideoDriverName = device;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void VideoSetConnector(char *c)
|
||||||
void VideoSetConnector( char *c)
|
|
||||||
{
|
{
|
||||||
DRMConnector = c;
|
DRMConnector = c;
|
||||||
}
|
}
|
||||||
@ -6517,13 +6486,13 @@ void VideoSetDeinterlace(int mode[VideoResolutionMax])
|
|||||||
VideoDeinterlace[1] = 1; //mode[1]; // 720p
|
VideoDeinterlace[1] = 1; //mode[1]; // 720p
|
||||||
VideoDeinterlace[2] = mode[2]; // fake 1080
|
VideoDeinterlace[2] = mode[2]; // fake 1080
|
||||||
VideoDeinterlace[3] = mode[3]; // 1080
|
VideoDeinterlace[3] = mode[3]; // 1080
|
||||||
VideoDeinterlace[4] = 1, //mode[4]; 2160p
|
VideoDeinterlace[4] = 1; //mode[4]; 2160p
|
||||||
#else
|
#else
|
||||||
VideoDeinterlace[0] = 1; // 576i
|
VideoDeinterlace[0] = 1; // 576i
|
||||||
VideoDeinterlace[1] = 0; //mode[1]; // 720p
|
VideoDeinterlace[1] = 0; //mode[1]; // 720p
|
||||||
VideoDeinterlace[2] = 1; // fake 1080
|
VideoDeinterlace[2] = 1; // fake 1080
|
||||||
VideoDeinterlace[3] = 1; // 1080
|
VideoDeinterlace[3] = 1; // 1080
|
||||||
VideoDeinterlace[4] = 0, //mode[4]; 2160p
|
VideoDeinterlace[4] = 0; //mode[4]; 2160p
|
||||||
#endif
|
#endif
|
||||||
VideoSurfaceModesChanged = 1;
|
VideoSurfaceModesChanged = 1;
|
||||||
}
|
}
|
||||||
@ -6634,7 +6603,7 @@ void VideoSetStudioLevels(int onoff)
|
|||||||
{
|
{
|
||||||
VideoStudioLevels = onoff;
|
VideoStudioLevels = onoff;
|
||||||
#ifdef GAMMA
|
#ifdef GAMMA
|
||||||
Set_Gamma(2.4,6500);
|
Set_Gamma(2.4, 6500);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
}
|
}
|
||||||
@ -6865,7 +6834,7 @@ void VideoInit(const char *display_name)
|
|||||||
///
|
///
|
||||||
void VideoExit(void)
|
void VideoExit(void)
|
||||||
{
|
{
|
||||||
Debug(3,"Video Exit\n");
|
Debug(3, "Video Exit\n");
|
||||||
#ifndef USE_DRM
|
#ifndef USE_DRM
|
||||||
if (!XlibDisplay) { // no init or failed
|
if (!XlibDisplay) { // no init or failed
|
||||||
return;
|
return;
|
||||||
@ -6927,7 +6896,8 @@ void VideoExit(void)
|
|||||||
}
|
}
|
||||||
|
|
||||||
#ifdef USE_DRM
|
#ifdef USE_DRM
|
||||||
int GlxInitopengl () {
|
int GlxInitopengl()
|
||||||
|
{
|
||||||
EGLint contextAttrs[] = {
|
EGLint contextAttrs[] = {
|
||||||
EGL_CONTEXT_CLIENT_VERSION, 3,
|
EGL_CONTEXT_CLIENT_VERSION, 3,
|
||||||
EGL_NONE
|
EGL_NONE
|
||||||
@ -6947,13 +6917,15 @@ int GlxInitopengl () {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
int GlxDrawopengl () {
|
int GlxDrawopengl()
|
||||||
|
{
|
||||||
eglMakeCurrent(eglDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE, eglSharedContext);
|
eglMakeCurrent(eglDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE, eglSharedContext);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
void GlxDestroy() {
|
void GlxDestroy()
|
||||||
eglDestroyContext (eglDisplay, eglOSDContext);
|
{
|
||||||
|
eglDestroyContext(eglDisplay, eglOSDContext);
|
||||||
eglOSDContext = NULL;
|
eglOSDContext = NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -6963,24 +6935,25 @@ void GlxDestroy() {
|
|||||||
#include <sys/stat.h>
|
#include <sys/stat.h>
|
||||||
extern uint8_t *CreateJpeg(uint8_t *, int *, int, int, int);
|
extern uint8_t *CreateJpeg(uint8_t *, int *, int, int, int);
|
||||||
|
|
||||||
void makejpg(uint8_t *data, int width, int height) {
|
void makejpg(uint8_t * data, int width, int height)
|
||||||
static int count=0;
|
{
|
||||||
int i,n=0,gpu=0;;
|
static int count = 0;
|
||||||
char buf[32],FileName[32];
|
int i, n = 0, gpu = 0;;
|
||||||
|
char buf[32], FileName[32];
|
||||||
uint8_t *rgb;
|
uint8_t *rgb;
|
||||||
uint8_t *jpg_image;
|
uint8_t *jpg_image;
|
||||||
int size,size1;
|
int size, size1;
|
||||||
|
|
||||||
if (data == NULL) {
|
if (data == NULL) {
|
||||||
data = malloc(width*height*4);
|
data = malloc(width * height * 4);
|
||||||
gpu=1;
|
gpu = 1;
|
||||||
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
|
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
|
||||||
glPixelStorei(GL_PACK_ALIGNMENT, 1);
|
glPixelStorei(GL_PACK_ALIGNMENT, 1);
|
||||||
glReadPixels(0, 0, width, height, GL_BGRA, GL_UNSIGNED_BYTE, data);
|
glReadPixels(0, 0, width, height, GL_BGRA, GL_UNSIGNED_BYTE, data);
|
||||||
}
|
}
|
||||||
|
|
||||||
// n = snprintf(buf, sizeof(buf), "P6\n%d\n%d\n255\n", width, height);
|
// n = snprintf(buf, sizeof(buf), "P6\n%d\n%d\n255\n", width, height);
|
||||||
sprintf(FileName,"/tmp/test%d.jpg",count++);
|
sprintf(FileName, "/tmp/test%d.jpg", count++);
|
||||||
|
|
||||||
rgb = malloc(width * height * 3 + n);
|
rgb = malloc(width * height * 3 + n);
|
||||||
if (!rgb) {
|
if (!rgb) {
|
||||||
@ -7001,7 +6974,8 @@ void makejpg(uint8_t *data, int width, int height) {
|
|||||||
|
|
||||||
jpg_image = CreateJpeg(rgb, &size1, 90, width, height);
|
jpg_image = CreateJpeg(rgb, &size1, 90, width, height);
|
||||||
int fd = open(FileName, O_WRONLY | O_CREAT | O_NOFOLLOW | O_TRUNC, DEFFILEMODE);
|
int fd = open(FileName, O_WRONLY | O_CREAT | O_NOFOLLOW | O_TRUNC, DEFFILEMODE);
|
||||||
write(fd,jpg_image,size1);
|
|
||||||
|
write(fd, jpg_image, size1);
|
||||||
close(fd);
|
close(fd);
|
||||||
free(rgb);
|
free(rgb);
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user